Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
36c9c84047 | ||
|
|
88b4aaa301 | ||
|
|
eac4c227c9 | ||
|
|
d5eb7d4a55 | ||
|
|
80b15e16e2 | ||
|
|
cfacedbb1a | ||
|
|
c3bab033ed | ||
|
|
524f9bd84f | ||
|
|
4658ede9d6 |
32
07.md
32
07.md
@@ -1,32 +0,0 @@
|
|||||||
NIP-07
|
|
||||||
======
|
|
||||||
|
|
||||||
`window.nostr` capability for web browsers
|
|
||||||
------------------------------------------
|
|
||||||
|
|
||||||
`draft` `optional`
|
|
||||||
|
|
||||||
The `window.nostr` object may be made available by web browsers or extensions and websites or web-apps may make use of it after checking its availability.
|
|
||||||
|
|
||||||
That object must define the following methods:
|
|
||||||
|
|
||||||
```
|
|
||||||
async window.nostr.getPublicKey(): string // returns a public key as hex
|
|
||||||
async window.nostr.signEvent(event: { created_at: number, kind: number, tags: string[][], content: string }): Event // takes an event object, adds `id`, `pubkey` and `sig` and returns it
|
|
||||||
```
|
|
||||||
|
|
||||||
Aside from these two basic above, the following functions can also be implemented optionally:
|
|
||||||
```
|
|
||||||
async window.nostr.nip04.encrypt(pubkey, plaintext): string // returns ciphertext and iv as specified in nip-04 (deprecated)
|
|
||||||
async window.nostr.nip04.decrypt(pubkey, ciphertext): string // takes ciphertext and iv as specified in nip-04 (deprecated)
|
|
||||||
async window.nostr.nip44.encrypt(pubkey, plaintext): string // returns ciphertext as specified in nip-44
|
|
||||||
async window.nostr.nip44.decrypt(pubkey, ciphertext): string // takes ciphertext as specified in nip-44
|
|
||||||
```
|
|
||||||
|
|
||||||
### Recommendation to Extension Authors
|
|
||||||
To make sure that the `window.nostr` is available to nostr clients on page load, the authors who create Chromium and Firefox extensions should load their scripts by specifying `"run_at": "document_end"` in the extension's manifest.
|
|
||||||
|
|
||||||
|
|
||||||
### Implementation
|
|
||||||
|
|
||||||
See https://github.com/aljazceru/awesome-nostr#nip-07-browser-extensions.
|
|
||||||
63
Makefile
63
Makefile
@@ -36,10 +36,16 @@ $(NOSTR_CORE_LIB):
|
|||||||
@echo "Building nostr_core_lib..."
|
@echo "Building nostr_core_lib..."
|
||||||
cd nostr_core_lib && ./build.sh
|
cd nostr_core_lib && ./build.sh
|
||||||
|
|
||||||
# Generate version.h from git tags
|
# Update main.h version information (requires main.h to exist)
|
||||||
src/version.h:
|
src/main.h:
|
||||||
@if [ -d .git ]; then \
|
@if [ ! -f src/main.h ]; then \
|
||||||
echo "Generating version.h from git tags..."; \
|
echo "ERROR: src/main.h not found!"; \
|
||||||
|
echo "Please ensure src/main.h exists with relay metadata."; \
|
||||||
|
echo "Copy from a backup or create manually with proper relay configuration."; \
|
||||||
|
exit 1; \
|
||||||
|
fi; \
|
||||||
|
if [ -d .git ]; then \
|
||||||
|
echo "Updating main.h version information from git tags..."; \
|
||||||
RAW_VERSION=$$(git describe --tags --always 2>/dev/null || echo "unknown"); \
|
RAW_VERSION=$$(git describe --tags --always 2>/dev/null || echo "unknown"); \
|
||||||
if echo "$$RAW_VERSION" | grep -q "^v[0-9]"; then \
|
if echo "$$RAW_VERSION" | grep -q "^v[0-9]"; then \
|
||||||
CLEAN_VERSION=$$(echo "$$RAW_VERSION" | sed 's/^v//' | cut -d- -f1); \
|
CLEAN_VERSION=$$(echo "$$RAW_VERSION" | sed 's/^v//' | cut -d- -f1); \
|
||||||
@@ -51,54 +57,34 @@ src/version.h:
|
|||||||
VERSION="v0.0.0"; \
|
VERSION="v0.0.0"; \
|
||||||
MAJOR=0; MINOR=0; PATCH=0; \
|
MAJOR=0; MINOR=0; PATCH=0; \
|
||||||
fi; \
|
fi; \
|
||||||
echo "/* Auto-generated version information */" > src/version.h; \
|
echo "Updating version information in existing main.h..."; \
|
||||||
echo "#ifndef VERSION_H" >> src/version.h; \
|
sed -i "s/#define VERSION \".*\"/#define VERSION \"$$VERSION\"/g" src/main.h; \
|
||||||
echo "#define VERSION_H" >> src/version.h; \
|
sed -i "s/#define VERSION_MAJOR [0-9]*/#define VERSION_MAJOR $$MAJOR/g" src/main.h; \
|
||||||
echo "" >> src/version.h; \
|
sed -i "s/#define VERSION_MINOR [0-9]*/#define VERSION_MINOR $$MINOR/g" src/main.h; \
|
||||||
echo "#define VERSION \"$$VERSION\"" >> src/version.h; \
|
sed -i "s/#define VERSION_PATCH [0-9]*/#define VERSION_PATCH $$PATCH/g" src/main.h; \
|
||||||
echo "#define VERSION_MAJOR $$MAJOR" >> src/version.h; \
|
echo "Updated main.h version to: $$VERSION"; \
|
||||||
echo "#define VERSION_MINOR $$MINOR" >> src/version.h; \
|
|
||||||
echo "#define VERSION_PATCH $$PATCH" >> src/version.h; \
|
|
||||||
echo "" >> src/version.h; \
|
|
||||||
echo "#endif /* VERSION_H */" >> src/version.h; \
|
|
||||||
echo "Generated version.h with clean version: $$VERSION"; \
|
|
||||||
elif [ ! -f src/version.h ]; then \
|
|
||||||
echo "Git not available and version.h missing, creating fallback version.h..."; \
|
|
||||||
VERSION="v0.0.0"; \
|
|
||||||
echo "/* Auto-generated version information */" > src/version.h; \
|
|
||||||
echo "#ifndef VERSION_H" >> src/version.h; \
|
|
||||||
echo "#define VERSION_H" >> src/version.h; \
|
|
||||||
echo "" >> src/version.h; \
|
|
||||||
echo "#define VERSION \"$$VERSION\"" >> src/version.h; \
|
|
||||||
echo "#define VERSION_MAJOR 0" >> src/version.h; \
|
|
||||||
echo "#define VERSION_MINOR 0" >> src/version.h; \
|
|
||||||
echo "#define VERSION_PATCH 0" >> src/version.h; \
|
|
||||||
echo "" >> src/version.h; \
|
|
||||||
echo "#endif /* VERSION_H */" >> src/version.h; \
|
|
||||||
echo "Created fallback version.h with version: $$VERSION"; \
|
|
||||||
else \
|
else \
|
||||||
echo "Git not available, preserving existing version.h"; \
|
echo "Git not available, preserving existing main.h version information"; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Force version.h regeneration (useful for development)
|
# Update main.h version information (requires existing main.h)
|
||||||
force-version:
|
force-version:
|
||||||
@echo "Force regenerating version.h..."
|
@echo "Force updating main.h version information..."
|
||||||
@rm -f src/version.h
|
@$(MAKE) src/main.h
|
||||||
@$(MAKE) src/version.h
|
|
||||||
|
|
||||||
# Build the relay
|
# Build the relay
|
||||||
$(TARGET): $(BUILD_DIR) src/version.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
$(TARGET): $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
||||||
@echo "Compiling C-Relay for architecture: $(ARCH)"
|
@echo "Compiling C-Relay for architecture: $(ARCH)"
|
||||||
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(TARGET) $(NOSTR_CORE_LIB) $(LIBS)
|
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(TARGET) $(NOSTR_CORE_LIB) $(LIBS)
|
||||||
@echo "Build complete: $(TARGET)"
|
@echo "Build complete: $(TARGET)"
|
||||||
|
|
||||||
# Build for specific architectures
|
# Build for specific architectures
|
||||||
x86: $(BUILD_DIR) src/version.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
x86: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
||||||
@echo "Building C-Relay for x86_64..."
|
@echo "Building C-Relay for x86_64..."
|
||||||
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_x86 $(NOSTR_CORE_LIB) $(LIBS)
|
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_x86 $(NOSTR_CORE_LIB) $(LIBS)
|
||||||
@echo "Build complete: $(BUILD_DIR)/c_relay_x86"
|
@echo "Build complete: $(BUILD_DIR)/c_relay_x86"
|
||||||
|
|
||||||
arm64: $(BUILD_DIR) src/version.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
arm64: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
||||||
@echo "Cross-compiling C-Relay for ARM64..."
|
@echo "Cross-compiling C-Relay for ARM64..."
|
||||||
@if ! command -v aarch64-linux-gnu-gcc >/dev/null 2>&1; then \
|
@if ! command -v aarch64-linux-gnu-gcc >/dev/null 2>&1; then \
|
||||||
echo "ERROR: ARM64 cross-compiler not found."; \
|
echo "ERROR: ARM64 cross-compiler not found."; \
|
||||||
@@ -171,7 +157,6 @@ init-db:
|
|||||||
# Clean build artifacts
|
# Clean build artifacts
|
||||||
clean:
|
clean:
|
||||||
rm -rf $(BUILD_DIR)
|
rm -rf $(BUILD_DIR)
|
||||||
rm -f src/version.h
|
|
||||||
@echo "Clean complete"
|
@echo "Clean complete"
|
||||||
|
|
||||||
# Clean everything including nostr_core_lib
|
# Clean everything including nostr_core_lib
|
||||||
@@ -210,6 +195,6 @@ help:
|
|||||||
@echo " make check-toolchain # Check what compilers are available"
|
@echo " make check-toolchain # Check what compilers are available"
|
||||||
@echo " make test # Run tests"
|
@echo " make test # Run tests"
|
||||||
@echo " make init-db # Set up database"
|
@echo " make init-db # Set up database"
|
||||||
@echo " make force-version # Force regenerate version.h from git"
|
@echo " make force-version # Force regenerate main.h from git"
|
||||||
|
|
||||||
.PHONY: all x86 arm64 test init-db clean clean-all install-deps install-cross-tools install-arm64-deps check-toolchain help force-version
|
.PHONY: all x86 arm64 test init-db clean clean-all install-deps install-cross-tools install-arm64-deps check-toolchain help force-version
|
||||||
32
README.md
32
README.md
@@ -18,9 +18,9 @@ Do NOT modify the formatting, add emojis, or change the text. Keep the simple fo
|
|||||||
- [x] NIP-33: Parameterized Replaceable Events
|
- [x] NIP-33: Parameterized Replaceable Events
|
||||||
- [x] NIP-40: Expiration Timestamp
|
- [x] NIP-40: Expiration Timestamp
|
||||||
- [x] NIP-42: Authentication of clients to relays
|
- [x] NIP-42: Authentication of clients to relays
|
||||||
- [ ] NIP-45: Counting results
|
- [x] NIP-45: Counting results
|
||||||
- [ ] NIP-50: Keywords filter
|
- [x] NIP-50: Keywords filter
|
||||||
- [ ] NIP-70: Protected Events
|
- [x] NIP-70: Protected Events
|
||||||
|
|
||||||
## 🔧 Administrator API
|
## 🔧 Administrator API
|
||||||
|
|
||||||
@@ -91,8 +91,16 @@ All commands are sent as NIP-44 encrypted JSON arrays in the event content. The
|
|||||||
### Available Configuration Keys
|
### Available Configuration Keys
|
||||||
|
|
||||||
**Basic Relay Settings:**
|
**Basic Relay Settings:**
|
||||||
|
- `relay_name`: Relay name (displayed in NIP-11)
|
||||||
- `relay_description`: Relay description text
|
- `relay_description`: Relay description text
|
||||||
- `relay_contact`: Contact information
|
- `relay_contact`: Contact information
|
||||||
|
- `relay_software`: Software URL
|
||||||
|
- `relay_version`: Software version
|
||||||
|
- `supported_nips`: Comma-separated list of supported NIP numbers (e.g., "1,2,4,9,11,12,13,15,16,20,22,33,40,42")
|
||||||
|
- `language_tags`: Comma-separated list of supported language tags (e.g., "en,es,fr" or "*" for all)
|
||||||
|
- `relay_countries`: Comma-separated list of supported country codes (e.g., "US,CA,MX" or "*" for all)
|
||||||
|
- `posting_policy`: Posting policy URL or text
|
||||||
|
- `payments_url`: Payment URL for premium features
|
||||||
- `max_connections`: Maximum concurrent connections
|
- `max_connections`: Maximum concurrent connections
|
||||||
- `max_subscriptions_per_client`: Max subscriptions per client
|
- `max_subscriptions_per_client`: Max subscriptions per client
|
||||||
- `max_event_tags`: Maximum tags per event
|
- `max_event_tags`: Maximum tags per event
|
||||||
@@ -108,6 +116,24 @@ All commands are sent as NIP-44 encrypted JSON arrays in the event content. The
|
|||||||
- `pow_min_difficulty`: Minimum proof-of-work difficulty
|
- `pow_min_difficulty`: Minimum proof-of-work difficulty
|
||||||
- `nip40_expiration_enabled`: Enable event expiration (`true`/`false`)
|
- `nip40_expiration_enabled`: Enable event expiration (`true`/`false`)
|
||||||
|
|
||||||
|
### Dynamic Configuration Updates
|
||||||
|
|
||||||
|
C-Relay supports **dynamic configuration updates** without requiring a restart for most settings. Configuration parameters are categorized as either **dynamic** (can be updated immediately) or **restart-required** (require relay restart to take effect).
|
||||||
|
|
||||||
|
**Dynamic Configuration Parameters (No Restart Required):**
|
||||||
|
- All relay information (NIP-11) settings: `relay_name`, `relay_description`, `relay_contact`, `relay_software`, `relay_version`, `supported_nips`, `language_tags`, `relay_countries`, `posting_policy`, `payments_url`
|
||||||
|
- Authentication settings: `auth_enabled`, `nip42_auth_required`, `nip42_auth_required_kinds`, `nip42_challenge_timeout`
|
||||||
|
- Subscription limits: `max_subscriptions_per_client`, `max_total_subscriptions`
|
||||||
|
- Event validation limits: `max_event_tags`, `max_content_length`, `max_message_length`
|
||||||
|
- Proof of Work settings: `pow_min_difficulty`, `pow_mode`
|
||||||
|
- Event expiration settings: `nip40_expiration_enabled`, `nip40_expiration_strict`, `nip40_expiration_filter`, `nip40_expiration_grace_period`
|
||||||
|
|
||||||
|
**Restart-Required Configuration Parameters:**
|
||||||
|
- Connection settings: `max_connections`, `relay_port`
|
||||||
|
- Database and core system settings
|
||||||
|
|
||||||
|
When updating configuration, the admin API response will indicate whether a restart is required for each parameter. Dynamic updates take effect immediately and are reflected in NIP-11 relay information documents without restart.
|
||||||
|
|
||||||
### Response Format
|
### Response Format
|
||||||
|
|
||||||
All admin commands return **signed EVENT responses** via WebSocket following standard Nostr protocol. Responses use JSON content with structured data.
|
All admin commands return **signed EVENT responses** via WebSocket following standard Nostr protocol. Responses use JSON content with structured data.
|
||||||
|
|||||||
@@ -932,7 +932,7 @@
|
|||||||
description: 'C-Relay instance - pubkey provided manually',
|
description: 'C-Relay instance - pubkey provided manually',
|
||||||
pubkey: manualPubkey,
|
pubkey: manualPubkey,
|
||||||
contact: 'admin@manual.config.relay',
|
contact: 'admin@manual.config.relay',
|
||||||
supported_nips: [1, 2, 4, 9, 11, 12, 15, 16, 20, 22],
|
supported_nips: [1, 9, 11, 13, 15, 20, 33, 40, 42],
|
||||||
software: 'https://github.com/0xtrr/c-relay',
|
software: 'https://github.com/0xtrr/c-relay',
|
||||||
version: '1.0.0'
|
version: '1.0.0'
|
||||||
};
|
};
|
||||||
@@ -958,7 +958,7 @@
|
|||||||
description: 'C-Relay instance - pubkey provided manually',
|
description: 'C-Relay instance - pubkey provided manually',
|
||||||
pubkey: manualPubkey,
|
pubkey: manualPubkey,
|
||||||
contact: 'admin@manual.config.relay',
|
contact: 'admin@manual.config.relay',
|
||||||
supported_nips: [1, 2, 4, 9, 11, 12, 15, 16, 20, 22],
|
supported_nips: [1, 9, 11, 13, 15, 20, 33, 40, 42],
|
||||||
software: 'https://github.com/0xtrr/c-relay',
|
software: 'https://github.com/0xtrr/c-relay',
|
||||||
version: '1.0.0'
|
version: '1.0.0'
|
||||||
};
|
};
|
||||||
@@ -1286,18 +1286,6 @@
|
|||||||
console.log('Logout event handled successfully');
|
console.log('Logout event handled successfully');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Disconnect from relay and clean up connections
|
|
||||||
function disconnectFromRelay() {
|
|
||||||
if (relayPool) {
|
|
||||||
console.log('Cleaning up relay pool connection...');
|
|
||||||
const url = relayConnectionUrl.value.trim();
|
|
||||||
if (url) {
|
|
||||||
relayPool.close([url]);
|
|
||||||
}
|
|
||||||
relayPool = null;
|
|
||||||
subscriptionId = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update visibility of admin sections based on login and relay connection status
|
// Update visibility of admin sections based on login and relay connection status
|
||||||
function updateAdminSectionsVisibility() {
|
function updateAdminSectionsVisibility() {
|
||||||
@@ -2030,56 +2018,33 @@
|
|||||||
|
|
||||||
configForm.innerHTML = '';
|
configForm.innerHTML = '';
|
||||||
|
|
||||||
// Define field types and validation for different config parameters
|
// Define field types and validation for different config parameters (aligned with README.md)
|
||||||
const fieldTypes = {
|
const fieldTypes = {
|
||||||
'auth_enabled': 'boolean',
|
'auth_enabled': 'boolean',
|
||||||
'nip42_auth_required_events': 'boolean',
|
'nip42_auth_required': 'boolean',
|
||||||
'nip42_auth_required_subscriptions': 'boolean',
|
|
||||||
'nip40_expiration_enabled': 'boolean',
|
'nip40_expiration_enabled': 'boolean',
|
||||||
'nip40_expiration_strict': 'boolean',
|
|
||||||
'nip40_expiration_filter': 'boolean',
|
|
||||||
'relay_port': 'number',
|
|
||||||
'max_connections': 'number',
|
'max_connections': 'number',
|
||||||
'pow_min_difficulty': 'number',
|
'pow_min_difficulty': 'number',
|
||||||
'nip42_challenge_expiration': 'number',
|
'nip42_challenge_timeout': 'number',
|
||||||
'nip40_expiration_grace_period': 'number',
|
|
||||||
'max_subscriptions_per_client': 'number',
|
'max_subscriptions_per_client': 'number',
|
||||||
'max_total_subscriptions': 'number',
|
|
||||||
'max_filters_per_subscription': 'number',
|
|
||||||
'max_event_tags': 'number',
|
'max_event_tags': 'number',
|
||||||
'max_content_length': 'number',
|
'max_content_length': 'number'
|
||||||
'max_message_length': 'number',
|
|
||||||
'default_limit': 'number',
|
|
||||||
'max_limit': 'number'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const descriptions = {
|
const descriptions = {
|
||||||
'relay_pubkey': 'Relay Public Key (Read-only)',
|
'relay_pubkey': 'Relay Public Key (Read-only)',
|
||||||
'auth_enabled': 'Enable Authentication',
|
'auth_enabled': 'Enable Authentication',
|
||||||
'nip42_auth_required_events': 'Require Auth for Events',
|
'nip42_auth_required': 'Enable NIP-42 Cryptographic Authentication',
|
||||||
'nip42_auth_required_subscriptions': 'Require Auth for Subscriptions',
|
'nip42_auth_required_kinds': 'Event Kinds Requiring NIP-42 Auth',
|
||||||
'nip42_auth_required_kinds': 'Auth Required Event Kinds',
|
'nip42_challenge_timeout': 'NIP-42 Challenge Expiration Seconds',
|
||||||
'nip42_challenge_expiration': 'Auth Challenge Expiration (seconds)',
|
|
||||||
'relay_port': 'Relay Port',
|
|
||||||
'max_connections': 'Maximum Connections',
|
'max_connections': 'Maximum Connections',
|
||||||
'relay_description': 'Relay Description',
|
'relay_description': 'Relay Description',
|
||||||
'relay_contact': 'Relay Contact',
|
'relay_contact': 'Relay Contact',
|
||||||
'relay_software': 'Relay Software URL',
|
'pow_min_difficulty': 'Minimum Proof-of-Work Difficulty',
|
||||||
'relay_version': 'Relay Version',
|
|
||||||
'pow_min_difficulty': 'Minimum PoW Difficulty',
|
|
||||||
'pow_mode': 'PoW Mode',
|
|
||||||
'nip40_expiration_enabled': 'Enable Event Expiration',
|
'nip40_expiration_enabled': 'Enable Event Expiration',
|
||||||
'nip40_expiration_strict': 'Strict Expiration Mode',
|
|
||||||
'nip40_expiration_filter': 'Filter Expired Events',
|
|
||||||
'nip40_expiration_grace_period': 'Expiration Grace Period (seconds)',
|
|
||||||
'max_subscriptions_per_client': 'Max Subscriptions per Client',
|
'max_subscriptions_per_client': 'Max Subscriptions per Client',
|
||||||
'max_total_subscriptions': 'Max Total Subscriptions',
|
'max_event_tags': 'Maximum Tags per Event',
|
||||||
'max_filters_per_subscription': 'Max Filters per Subscription',
|
'max_content_length': 'Maximum Event Content Length'
|
||||||
'max_event_tags': 'Max Event Tags',
|
|
||||||
'max_content_length': 'Max Content Length',
|
|
||||||
'max_message_length': 'Max Message Length',
|
|
||||||
'default_limit': 'Default Query Limit',
|
|
||||||
'max_limit': 'Maximum Query Limit'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Process configuration tags (no d tag filtering for ephemeral events)
|
// Process configuration tags (no d tag filtering for ephemeral events)
|
||||||
@@ -3452,7 +3417,7 @@
|
|||||||
logTestEvent('SENT', `Add Whitelist event: ${JSON.stringify(signedEvent)}`, 'EVENT');
|
logTestEvent('SENT', `Add Whitelist event: ${JSON.stringify(signedEvent)}`, 'EVENT');
|
||||||
|
|
||||||
// Publish via SimplePool
|
// Publish via SimplePool
|
||||||
const url = relayUrl.value.trim();
|
const url = relayConnectionUrl.value.trim();
|
||||||
const publishPromises = relayPool.publish([url], signedEvent);
|
const publishPromises = relayPool.publish([url], signedEvent);
|
||||||
|
|
||||||
// Use Promise.allSettled to capture per-relay outcomes instead of Promise.any
|
// Use Promise.allSettled to capture per-relay outcomes instead of Promise.any
|
||||||
@@ -3594,7 +3559,7 @@
|
|||||||
logTestEvent('SENT', `Signed test event: ${JSON.stringify(signedEvent)}`, 'EVENT');
|
logTestEvent('SENT', `Signed test event: ${JSON.stringify(signedEvent)}`, 'EVENT');
|
||||||
|
|
||||||
// Publish via SimplePool to the same relay with detailed error diagnostics
|
// Publish via SimplePool to the same relay with detailed error diagnostics
|
||||||
const url = relayUrl.value.trim();
|
const url = relayConnectionUrl.value.trim();
|
||||||
logTestEvent('INFO', `Publishing to relay: ${url}`, 'INFO');
|
logTestEvent('INFO', `Publishing to relay: ${url}`, 'INFO');
|
||||||
|
|
||||||
const publishPromises = relayPool.publish([url], signedEvent);
|
const publishPromises = relayPool.publish([url], signedEvent);
|
||||||
|
|||||||
@@ -139,11 +139,11 @@ compile_project() {
|
|||||||
print_warning "Clean failed or no Makefile found"
|
print_warning "Clean failed or no Makefile found"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Force regenerate version.h to pick up new tags
|
# Force regenerate main.h to pick up new tags
|
||||||
if make force-version > /dev/null 2>&1; then
|
if make force-version > /dev/null 2>&1; then
|
||||||
print_success "Regenerated version.h"
|
print_success "Regenerated main.h"
|
||||||
else
|
else
|
||||||
print_warning "Failed to regenerate version.h"
|
print_warning "Failed to regenerate main.h"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Compile the project
|
# Compile the project
|
||||||
|
|||||||
8
c-relay.code-workspace
Normal file
8
c-relay.code-workspace
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"folders": [
|
||||||
|
{
|
||||||
|
"path": "."
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"settings": {}
|
||||||
|
}
|
||||||
@@ -282,7 +282,7 @@ cd build
|
|||||||
# Start relay in background and capture its PID
|
# Start relay in background and capture its PID
|
||||||
if [ "$USE_TEST_KEYS" = true ]; then
|
if [ "$USE_TEST_KEYS" = true ]; then
|
||||||
echo "Using deterministic test keys for development..."
|
echo "Using deterministic test keys for development..."
|
||||||
./$(basename $BINARY_PATH) -a aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa -r 1111111111111111111111111111111111111111111111111111111111111111 --strict-port > ../relay.log 2>&1 &
|
./$(basename $BINARY_PATH) -a 6a04ab98d9e4774ad806e302dddeb63bea16b5cb5f223ee77478e861bb583eb3 -r 1111111111111111111111111111111111111111111111111111111111111111 --strict-port > ../relay.log 2>&1 &
|
||||||
elif [ -n "$RELAY_ARGS" ]; then
|
elif [ -n "$RELAY_ARGS" ]; then
|
||||||
echo "Starting relay with custom configuration..."
|
echo "Starting relay with custom configuration..."
|
||||||
./$(basename $BINARY_PATH) $RELAY_ARGS --strict-port > ../relay.log 2>&1 &
|
./$(basename $BINARY_PATH) $RELAY_ARGS --strict-port > ../relay.log 2>&1 &
|
||||||
|
|||||||
@@ -1,455 +0,0 @@
|
|||||||
# NIP-11 Relay Connection Implementation Plan
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
Implement NIP-11 relay information fetching in the web admin interface to replace hardcoded relay pubkey and provide proper relay connection flow.
|
|
||||||
|
|
||||||
## Current Issues
|
|
||||||
1. **Hardcoded Relay Pubkey**: `getRelayPubkey()` returns hardcoded value `'4f355bdcb7cc0af728ef3cceb9615d90684bb5b2ca5f859ab0f0b704075871aa'`
|
|
||||||
2. **Relay URL in Debug Section**: Currently in "DEBUG - TEST FETCH WITHOUT LOGIN" section (lines 336-385)
|
|
||||||
3. **No Relay Verification**: Users can attempt admin operations without verifying relay identity
|
|
||||||
4. **Missing NIP-11 Support**: No fetching of relay information document
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
### 1. New Relay Connection Section (HTML Structure)
|
|
||||||
|
|
||||||
Add after User Info section (around line 332):
|
|
||||||
|
|
||||||
```html
|
|
||||||
<!-- Relay Connection Section -->
|
|
||||||
<div class="section">
|
|
||||||
<h2>RELAY CONNECTION</h2>
|
|
||||||
<div class="input-group">
|
|
||||||
<label for="relay-url-input">Relay URL:</label>
|
|
||||||
<input type="text" id="relay-url-input" value="ws://localhost:8888" placeholder="ws://localhost:8888 or wss://relay.example.com">
|
|
||||||
</div>
|
|
||||||
<div class="inline-buttons">
|
|
||||||
<button type="button" id="connect-relay-btn">CONNECT TO RELAY</button>
|
|
||||||
<button type="button" id="disconnect-relay-btn" style="display: none;">DISCONNECT</button>
|
|
||||||
</div>
|
|
||||||
<div class="status disconnected" id="relay-connection-status">NOT CONNECTED</div>
|
|
||||||
|
|
||||||
<!-- Relay Information Display -->
|
|
||||||
<div id="relay-info-display" class="hidden">
|
|
||||||
<h3>Relay Information</h3>
|
|
||||||
<div class="user-info">
|
|
||||||
<div><strong>Name:</strong> <span id="relay-name">-</span></div>
|
|
||||||
<div><strong>Description:</strong> <span id="relay-description">-</span></div>
|
|
||||||
<div><strong>Public Key:</strong>
|
|
||||||
<div class="user-pubkey" id="relay-pubkey-display">-</div>
|
|
||||||
</div>
|
|
||||||
<div><strong>Software:</strong> <span id="relay-software">-</span></div>
|
|
||||||
<div><strong>Version:</strong> <span id="relay-version">-</span></div>
|
|
||||||
<div><strong>Contact:</strong> <span id="relay-contact">-</span></div>
|
|
||||||
<div><strong>Supported NIPs:</strong> <span id="relay-nips">-</span></div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. JavaScript Implementation
|
|
||||||
|
|
||||||
#### Global State Variables
|
|
||||||
Add to global state section (around line 535):
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// Relay connection state
|
|
||||||
let relayInfo = null;
|
|
||||||
let isRelayConnected = false;
|
|
||||||
let relayWebSocket = null;
|
|
||||||
```
|
|
||||||
|
|
||||||
#### NIP-11 Fetching Function
|
|
||||||
Add new function:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// Fetch relay information using NIP-11
|
|
||||||
async function fetchRelayInfo(relayUrl) {
|
|
||||||
try {
|
|
||||||
console.log('=== FETCHING RELAY INFO VIA NIP-11 ===');
|
|
||||||
console.log('Relay URL:', relayUrl);
|
|
||||||
|
|
||||||
// Convert WebSocket URL to HTTP URL for NIP-11
|
|
||||||
let httpUrl = relayUrl;
|
|
||||||
if (relayUrl.startsWith('ws://')) {
|
|
||||||
httpUrl = relayUrl.replace('ws://', 'http://');
|
|
||||||
} else if (relayUrl.startsWith('wss://')) {
|
|
||||||
httpUrl = relayUrl.replace('wss://', 'https://');
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('HTTP URL for NIP-11:', httpUrl);
|
|
||||||
|
|
||||||
// Fetch relay information document
|
|
||||||
const response = await fetch(httpUrl, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
'Accept': 'application/nostr+json'
|
|
||||||
},
|
|
||||||
// Add timeout
|
|
||||||
signal: AbortSignal.timeout(10000) // 10 second timeout
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const contentType = response.headers.get('content-type');
|
|
||||||
if (!contentType || !contentType.includes('application/json')) {
|
|
||||||
throw new Error(`Invalid content type: ${contentType}. Expected application/json or application/nostr+json`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const relayInfoData = await response.json();
|
|
||||||
console.log('Fetched relay info:', relayInfoData);
|
|
||||||
|
|
||||||
// Validate required fields
|
|
||||||
if (!relayInfoData.pubkey) {
|
|
||||||
throw new Error('Relay information missing required pubkey field');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate pubkey format (64 hex characters)
|
|
||||||
if (!/^[0-9a-fA-F]{64}$/.test(relayInfoData.pubkey)) {
|
|
||||||
throw new Error(`Invalid relay pubkey format: ${relayInfoData.pubkey}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return relayInfoData;
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to fetch relay info:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Relay Connection Function
|
|
||||||
Add new function:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// Connect to relay and fetch information
|
|
||||||
async function connectToRelay() {
|
|
||||||
try {
|
|
||||||
const relayUrlInput = document.getElementById('relay-url-input');
|
|
||||||
const connectBtn = document.getElementById('connect-relay-btn');
|
|
||||||
const disconnectBtn = document.getElementById('disconnect-relay-btn');
|
|
||||||
const statusDiv = document.getElementById('relay-connection-status');
|
|
||||||
const infoDisplay = document.getElementById('relay-info-display');
|
|
||||||
|
|
||||||
const url = relayUrlInput.value.trim();
|
|
||||||
if (!url) {
|
|
||||||
throw new Error('Please enter a relay URL');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update UI to show connecting state
|
|
||||||
connectBtn.disabled = true;
|
|
||||||
statusDiv.textContent = 'CONNECTING...';
|
|
||||||
statusDiv.className = 'status connected';
|
|
||||||
|
|
||||||
console.log('Connecting to relay:', url);
|
|
||||||
|
|
||||||
// Fetch relay information via NIP-11
|
|
||||||
console.log('Fetching relay information...');
|
|
||||||
const fetchedRelayInfo = await fetchRelayInfo(url);
|
|
||||||
|
|
||||||
// Test WebSocket connection
|
|
||||||
console.log('Testing WebSocket connection...');
|
|
||||||
await testWebSocketConnection(url);
|
|
||||||
|
|
||||||
// Store relay information
|
|
||||||
relayInfo = fetchedRelayInfo;
|
|
||||||
isRelayConnected = true;
|
|
||||||
|
|
||||||
// Update UI with relay information
|
|
||||||
displayRelayInfo(relayInfo);
|
|
||||||
|
|
||||||
// Update connection status
|
|
||||||
statusDiv.textContent = 'CONNECTED';
|
|
||||||
statusDiv.className = 'status connected';
|
|
||||||
|
|
||||||
// Update button states
|
|
||||||
connectBtn.style.display = 'none';
|
|
||||||
disconnectBtn.style.display = 'inline-block';
|
|
||||||
relayUrlInput.disabled = true;
|
|
||||||
|
|
||||||
// Show relay info
|
|
||||||
infoDisplay.classList.remove('hidden');
|
|
||||||
|
|
||||||
console.log('Successfully connected to relay:', relayInfo.name || url);
|
|
||||||
log(`Connected to relay: ${relayInfo.name || url}`, 'INFO');
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to connect to relay:', error);
|
|
||||||
|
|
||||||
// Reset UI state
|
|
||||||
const connectBtn = document.getElementById('connect-relay-btn');
|
|
||||||
const statusDiv = document.getElementById('relay-connection-status');
|
|
||||||
|
|
||||||
connectBtn.disabled = false;
|
|
||||||
statusDiv.textContent = `CONNECTION FAILED: ${error.message}`;
|
|
||||||
statusDiv.className = 'status error';
|
|
||||||
|
|
||||||
// Clear any partial state
|
|
||||||
relayInfo = null;
|
|
||||||
isRelayConnected = false;
|
|
||||||
|
|
||||||
log(`Failed to connect to relay: ${error.message}`, 'ERROR');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### WebSocket Connection Test
|
|
||||||
Add new function:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// Test WebSocket connection to relay
|
|
||||||
async function testWebSocketConnection(url) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const timeout = setTimeout(() => {
|
|
||||||
ws.close();
|
|
||||||
reject(new Error('WebSocket connection timeout'));
|
|
||||||
}, 5000);
|
|
||||||
|
|
||||||
const ws = new WebSocket(url);
|
|
||||||
|
|
||||||
ws.onopen = () => {
|
|
||||||
clearTimeout(timeout);
|
|
||||||
console.log('WebSocket connection successful');
|
|
||||||
ws.close();
|
|
||||||
resolve();
|
|
||||||
};
|
|
||||||
|
|
||||||
ws.onerror = (error) => {
|
|
||||||
clearTimeout(timeout);
|
|
||||||
console.error('WebSocket connection failed:', error);
|
|
||||||
reject(new Error('WebSocket connection failed'));
|
|
||||||
};
|
|
||||||
|
|
||||||
ws.onclose = (event) => {
|
|
||||||
if (event.code !== 1000) {
|
|
||||||
clearTimeout(timeout);
|
|
||||||
reject(new Error(`WebSocket closed with code ${event.code}: ${event.reason}`));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Display Relay Information
|
|
||||||
Add new function:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// Display relay information in the UI
|
|
||||||
function displayRelayInfo(info) {
|
|
||||||
document.getElementById('relay-name').textContent = info.name || 'Unknown';
|
|
||||||
document.getElementById('relay-description').textContent = info.description || 'No description';
|
|
||||||
document.getElementById('relay-pubkey-display').textContent = info.pubkey || 'Unknown';
|
|
||||||
document.getElementById('relay-software').textContent = info.software || 'Unknown';
|
|
||||||
document.getElementById('relay-version').textContent = info.version || 'Unknown';
|
|
||||||
document.getElementById('relay-contact').textContent = info.contact || 'No contact info';
|
|
||||||
|
|
||||||
// Format supported NIPs
|
|
||||||
let nipsText = 'None specified';
|
|
||||||
if (info.supported_nips && Array.isArray(info.supported_nips) && info.supported_nips.length > 0) {
|
|
||||||
nipsText = info.supported_nips.map(nip => `NIP-${nip.toString().padStart(2, '0')}`).join(', ');
|
|
||||||
}
|
|
||||||
document.getElementById('relay-nips').textContent = nipsText;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Disconnect Function
|
|
||||||
Add new function:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// Disconnect from relay
|
|
||||||
function disconnectFromRelay() {
|
|
||||||
console.log('Disconnecting from relay...');
|
|
||||||
|
|
||||||
// Clear relay state
|
|
||||||
relayInfo = null;
|
|
||||||
isRelayConnected = false;
|
|
||||||
|
|
||||||
// Close any existing connections
|
|
||||||
if (relayPool) {
|
|
||||||
const url = document.getElementById('relay-url-input').value.trim();
|
|
||||||
if (url) {
|
|
||||||
relayPool.close([url]);
|
|
||||||
}
|
|
||||||
relayPool = null;
|
|
||||||
subscriptionId = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reset UI
|
|
||||||
const connectBtn = document.getElementById('connect-relay-btn');
|
|
||||||
const disconnectBtn = document.getElementById('disconnect-relay-btn');
|
|
||||||
const statusDiv = document.getElementById('relay-connection-status');
|
|
||||||
const infoDisplay = document.getElementById('relay-info-display');
|
|
||||||
const relayUrlInput = document.getElementById('relay-url-input');
|
|
||||||
|
|
||||||
connectBtn.style.display = 'inline-block';
|
|
||||||
disconnectBtn.style.display = 'none';
|
|
||||||
connectBtn.disabled = false;
|
|
||||||
relayUrlInput.disabled = false;
|
|
||||||
|
|
||||||
statusDiv.textContent = 'NOT CONNECTED';
|
|
||||||
statusDiv.className = 'status disconnected';
|
|
||||||
|
|
||||||
infoDisplay.classList.add('hidden');
|
|
||||||
|
|
||||||
// Reset configuration status
|
|
||||||
updateConfigStatus(false);
|
|
||||||
|
|
||||||
log('Disconnected from relay', 'INFO');
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Update getRelayPubkey Function
|
|
||||||
Replace existing function (around line 3142):
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// Helper function to get relay pubkey from connected relay info
|
|
||||||
function getRelayPubkey() {
|
|
||||||
if (relayInfo && relayInfo.pubkey) {
|
|
||||||
return relayInfo.pubkey;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback to hardcoded value if no relay connected (for testing)
|
|
||||||
console.warn('No relay connected, using fallback pubkey');
|
|
||||||
return '4f355bdcb7cc0af728ef3cceb9615d90684bb5b2ca5f859ab0f0b704075871aa';
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Event Handlers
|
|
||||||
|
|
||||||
Add event handlers in the DOMContentLoaded section:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// Relay connection event handlers
|
|
||||||
const connectRelayBtn = document.getElementById('connect-relay-btn');
|
|
||||||
const disconnectRelayBtn = document.getElementById('disconnect-relay-btn');
|
|
||||||
|
|
||||||
if (connectRelayBtn) {
|
|
||||||
connectRelayBtn.addEventListener('click', function(e) {
|
|
||||||
e.preventDefault();
|
|
||||||
connectToRelay().catch(error => {
|
|
||||||
console.error('Connect to relay failed:', error);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (disconnectRelayBtn) {
|
|
||||||
disconnectRelayBtn.addEventListener('click', function(e) {
|
|
||||||
e.preventDefault();
|
|
||||||
disconnectFromRelay();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Update Existing Functions
|
|
||||||
|
|
||||||
#### Update fetchConfiguration Function
|
|
||||||
Add relay connection check at the beginning:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
async function fetchConfiguration() {
|
|
||||||
try {
|
|
||||||
console.log('=== FETCHING CONFIGURATION VIA ADMIN API ===');
|
|
||||||
|
|
||||||
// Check if relay is connected
|
|
||||||
if (!isRelayConnected || !relayInfo) {
|
|
||||||
throw new Error('Must be connected to relay first. Please connect to relay in the Relay Connection section.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// ... rest of existing function
|
|
||||||
} catch (error) {
|
|
||||||
// ... existing error handling
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Update subscribeToConfiguration Function
|
|
||||||
Add relay connection check:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
async function subscribeToConfiguration() {
|
|
||||||
try {
|
|
||||||
console.log('=== STARTING SIMPLEPOOL CONFIGURATION SUBSCRIPTION ===');
|
|
||||||
|
|
||||||
if (!isRelayConnected || !relayInfo) {
|
|
||||||
console.error('Must be connected to relay first');
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use the relay URL from the connection section instead of the debug section
|
|
||||||
const url = document.getElementById('relay-url-input').value.trim();
|
|
||||||
|
|
||||||
// ... rest of existing function
|
|
||||||
} catch (error) {
|
|
||||||
// ... existing error handling
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. Update UI Flow
|
|
||||||
|
|
||||||
#### Modify showMainInterface Function
|
|
||||||
Update to show relay connection requirement:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
function showMainInterface() {
|
|
||||||
loginSection.classList.add('hidden');
|
|
||||||
mainInterface.classList.remove('hidden');
|
|
||||||
userPubkeyDisplay.textContent = userPubkey;
|
|
||||||
|
|
||||||
// Show message about relay connection requirement
|
|
||||||
if (!isRelayConnected) {
|
|
||||||
log('Please connect to a relay to access admin functions', 'INFO');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 6. Remove/Update Debug Section
|
|
||||||
|
|
||||||
#### Option 1: Remove Debug Section Entirely
|
|
||||||
Remove the "DEBUG - TEST FETCH WITHOUT LOGIN" section (lines 335-385) since relay URL is now in the proper connection section.
|
|
||||||
|
|
||||||
#### Option 2: Keep Debug Section for Testing
|
|
||||||
Update the debug section to use the connected relay URL and add a note that it's for testing purposes.
|
|
||||||
|
|
||||||
### 7. Error Handling
|
|
||||||
|
|
||||||
Add comprehensive error handling for:
|
|
||||||
- Network timeouts
|
|
||||||
- Invalid relay URLs
|
|
||||||
- Missing NIP-11 support
|
|
||||||
- Invalid relay pubkey format
|
|
||||||
- WebSocket connection failures
|
|
||||||
- CORS issues
|
|
||||||
|
|
||||||
### 8. Security Considerations
|
|
||||||
|
|
||||||
- Validate relay pubkey format (64 hex characters)
|
|
||||||
- Verify relay identity before admin operations
|
|
||||||
- Handle CORS properly for NIP-11 requests
|
|
||||||
- Sanitize relay information display
|
|
||||||
- Warn users about connecting to untrusted relays
|
|
||||||
|
|
||||||
## Testing Plan
|
|
||||||
|
|
||||||
1. **NIP-11 Fetching**: Test with various relay URLs (localhost, remote relays)
|
|
||||||
2. **Error Handling**: Test with invalid URLs, non-Nostr servers, network failures
|
|
||||||
3. **WebSocket Connection**: Verify WebSocket connectivity after NIP-11 fetch
|
|
||||||
4. **Admin API Integration**: Ensure admin commands use correct relay pubkey
|
|
||||||
5. **UI Flow**: Test complete user journey from login → relay connection → admin operations
|
|
||||||
|
|
||||||
## Benefits
|
|
||||||
|
|
||||||
1. **Proper Relay Identification**: Uses actual relay pubkey instead of hardcoded value
|
|
||||||
2. **Better UX**: Clear connection flow and relay information display
|
|
||||||
3. **Protocol Compliance**: Implements NIP-11 standard for relay discovery
|
|
||||||
4. **Security**: Verifies relay identity before admin operations
|
|
||||||
5. **Flexibility**: Works with any NIP-11 compliant relay
|
|
||||||
|
|
||||||
## Migration Notes
|
|
||||||
|
|
||||||
- Existing users will need to connect to relay after this update
|
|
||||||
- Debug section can be kept for development/testing purposes
|
|
||||||
- All admin functions will require relay connection
|
|
||||||
- Relay pubkey will be dynamically fetched instead of hardcoded
|
|
||||||
397
src/config.c
397
src/config.c
@@ -112,6 +112,12 @@ static int get_cache_timeout(void) {
|
|||||||
return 300; // Default 5 minutes
|
return 300; // Default 5 minutes
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Helper function to safely return dynamically allocated string from static buffer
|
||||||
|
static char* safe_strdup_from_static(const char* static_str) {
|
||||||
|
if (!static_str) return NULL;
|
||||||
|
return strdup(static_str);
|
||||||
|
}
|
||||||
|
|
||||||
// Force cache refresh - invalidates current cache
|
// Force cache refresh - invalidates current cache
|
||||||
void force_config_cache_refresh(void) {
|
void force_config_cache_refresh(void) {
|
||||||
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
@@ -121,6 +127,87 @@ void force_config_cache_refresh(void) {
|
|||||||
log_info("Configuration cache forcibly invalidated");
|
log_info("Configuration cache forcibly invalidated");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update specific cache value without full refresh
|
||||||
|
int update_cache_value(const char* key, const char* value) {
|
||||||
|
if (!key || !value) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
|
|
||||||
|
// Update specific cache fields
|
||||||
|
if (strcmp(key, "admin_pubkey") == 0) {
|
||||||
|
strncpy(g_unified_cache.admin_pubkey, value, sizeof(g_unified_cache.admin_pubkey) - 1);
|
||||||
|
g_unified_cache.admin_pubkey[sizeof(g_unified_cache.admin_pubkey) - 1] = '\0';
|
||||||
|
} else if (strcmp(key, "relay_pubkey") == 0) {
|
||||||
|
strncpy(g_unified_cache.relay_pubkey, value, sizeof(g_unified_cache.relay_pubkey) - 1);
|
||||||
|
g_unified_cache.relay_pubkey[sizeof(g_unified_cache.relay_pubkey) - 1] = '\0';
|
||||||
|
} else if (strcmp(key, "auth_required") == 0) {
|
||||||
|
g_unified_cache.auth_required = (strcmp(value, "true") == 0) ? 1 : 0;
|
||||||
|
} else if (strcmp(key, "admin_enabled") == 0) {
|
||||||
|
g_unified_cache.admin_enabled = (strcmp(value, "true") == 0) ? 1 : 0;
|
||||||
|
} else if (strcmp(key, "max_file_size") == 0) {
|
||||||
|
g_unified_cache.max_file_size = atol(value);
|
||||||
|
} else if (strcmp(key, "nip42_mode") == 0) {
|
||||||
|
if (strcmp(value, "disabled") == 0) {
|
||||||
|
g_unified_cache.nip42_mode = 0;
|
||||||
|
} else if (strcmp(value, "required") == 0) {
|
||||||
|
g_unified_cache.nip42_mode = 2;
|
||||||
|
} else {
|
||||||
|
g_unified_cache.nip42_mode = 1; // Optional/enabled
|
||||||
|
}
|
||||||
|
} else if (strcmp(key, "nip42_challenge_timeout") == 0) {
|
||||||
|
g_unified_cache.nip42_challenge_timeout = atoi(value);
|
||||||
|
} else if (strcmp(key, "nip42_time_tolerance") == 0) {
|
||||||
|
g_unified_cache.nip42_time_tolerance = atoi(value);
|
||||||
|
} else if (strcmp(key, "nip70_protected_events_enabled") == 0) {
|
||||||
|
g_unified_cache.nip70_protected_events_enabled = (strcmp(value, "true") == 0) ? 1 : 0;
|
||||||
|
} else {
|
||||||
|
// For NIP-11 relay info fields, update the cache buffers
|
||||||
|
if (strcmp(key, "relay_name") == 0) {
|
||||||
|
strncpy(g_unified_cache.relay_info.name, value, sizeof(g_unified_cache.relay_info.name) - 1);
|
||||||
|
g_unified_cache.relay_info.name[sizeof(g_unified_cache.relay_info.name) - 1] = '\0';
|
||||||
|
} else if (strcmp(key, "relay_description") == 0) {
|
||||||
|
strncpy(g_unified_cache.relay_info.description, value, sizeof(g_unified_cache.relay_info.description) - 1);
|
||||||
|
g_unified_cache.relay_info.description[sizeof(g_unified_cache.relay_info.description) - 1] = '\0';
|
||||||
|
} else if (strcmp(key, "relay_contact") == 0) {
|
||||||
|
strncpy(g_unified_cache.relay_info.contact, value, sizeof(g_unified_cache.relay_info.contact) - 1);
|
||||||
|
g_unified_cache.relay_info.contact[sizeof(g_unified_cache.relay_info.contact) - 1] = '\0';
|
||||||
|
} else if (strcmp(key, "relay_software") == 0) {
|
||||||
|
strncpy(g_unified_cache.relay_info.software, value, sizeof(g_unified_cache.relay_info.software) - 1);
|
||||||
|
g_unified_cache.relay_info.software[sizeof(g_unified_cache.relay_info.software) - 1] = '\0';
|
||||||
|
} else if (strcmp(key, "relay_version") == 0) {
|
||||||
|
strncpy(g_unified_cache.relay_info.version, value, sizeof(g_unified_cache.relay_info.version) - 1);
|
||||||
|
g_unified_cache.relay_info.version[sizeof(g_unified_cache.relay_info.version) - 1] = '\0';
|
||||||
|
} else if (strcmp(key, "supported_nips") == 0) {
|
||||||
|
strncpy(g_unified_cache.relay_info.supported_nips_str, value, sizeof(g_unified_cache.relay_info.supported_nips_str) - 1);
|
||||||
|
g_unified_cache.relay_info.supported_nips_str[sizeof(g_unified_cache.relay_info.supported_nips_str) - 1] = '\0';
|
||||||
|
} else if (strcmp(key, "language_tags") == 0) {
|
||||||
|
strncpy(g_unified_cache.relay_info.language_tags_str, value, sizeof(g_unified_cache.relay_info.language_tags_str) - 1);
|
||||||
|
g_unified_cache.relay_info.language_tags_str[sizeof(g_unified_cache.relay_info.language_tags_str) - 1] = '\0';
|
||||||
|
} else if (strcmp(key, "relay_countries") == 0) {
|
||||||
|
strncpy(g_unified_cache.relay_info.relay_countries_str, value, sizeof(g_unified_cache.relay_info.relay_countries_str) - 1);
|
||||||
|
g_unified_cache.relay_info.relay_countries_str[sizeof(g_unified_cache.relay_info.relay_countries_str) - 1] = '\0';
|
||||||
|
} else if (strcmp(key, "posting_policy") == 0) {
|
||||||
|
strncpy(g_unified_cache.relay_info.posting_policy, value, sizeof(g_unified_cache.relay_info.posting_policy) - 1);
|
||||||
|
g_unified_cache.relay_info.posting_policy[sizeof(g_unified_cache.relay_info.posting_policy) - 1] = '\0';
|
||||||
|
} else if (strcmp(key, "payments_url") == 0) {
|
||||||
|
strncpy(g_unified_cache.relay_info.payments_url, value, sizeof(g_unified_cache.relay_info.payments_url) - 1);
|
||||||
|
g_unified_cache.relay_info.payments_url[sizeof(g_unified_cache.relay_info.payments_url) - 1] = '\0';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset cache expiration to extend validity
|
||||||
|
int cache_timeout = get_cache_timeout();
|
||||||
|
g_unified_cache.cache_expires = time(NULL) + cache_timeout;
|
||||||
|
|
||||||
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
|
||||||
|
log_info("Updated specific cache value");
|
||||||
|
printf(" Key: %s\n", key);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
// Refresh unified cache from database
|
// Refresh unified cache from database
|
||||||
static int refresh_unified_cache_from_table(void) {
|
static int refresh_unified_cache_from_table(void) {
|
||||||
if (!g_db) {
|
if (!g_db) {
|
||||||
@@ -174,6 +261,10 @@ static int refresh_unified_cache_from_table(void) {
|
|||||||
const char* time_tolerance = get_config_value_from_table("nip42_time_tolerance");
|
const char* time_tolerance = get_config_value_from_table("nip42_time_tolerance");
|
||||||
g_unified_cache.nip42_time_tolerance = time_tolerance ? atoi(time_tolerance) : 300;
|
g_unified_cache.nip42_time_tolerance = time_tolerance ? atoi(time_tolerance) : 300;
|
||||||
|
|
||||||
|
// Load NIP-70 protected events config
|
||||||
|
const char* nip70_enabled = get_config_value_from_table("nip70_protected_events_enabled");
|
||||||
|
g_unified_cache.nip70_protected_events_enabled = (nip70_enabled && strcmp(nip70_enabled, "true") == 0) ? 1 : 0;
|
||||||
|
|
||||||
// Set cache expiration
|
// Set cache expiration
|
||||||
int cache_timeout = get_cache_timeout();
|
int cache_timeout = get_cache_timeout();
|
||||||
g_unified_cache.cache_expires = time(NULL) + cache_timeout;
|
g_unified_cache.cache_expires = time(NULL) + cache_timeout;
|
||||||
@@ -399,10 +490,12 @@ const char* get_config_value(const char* key) {
|
|||||||
|
|
||||||
// Special fast path for frequently accessed keys via unified cache
|
// Special fast path for frequently accessed keys via unified cache
|
||||||
if (strcmp(key, "admin_pubkey") == 0) {
|
if (strcmp(key, "admin_pubkey") == 0) {
|
||||||
return get_admin_pubkey_cached();
|
const char* cached_value = get_admin_pubkey_cached();
|
||||||
|
return safe_strdup_from_static(cached_value);
|
||||||
}
|
}
|
||||||
if (strcmp(key, "relay_pubkey") == 0) {
|
if (strcmp(key, "relay_pubkey") == 0) {
|
||||||
return get_relay_pubkey_cached();
|
const char* cached_value = get_relay_pubkey_cached();
|
||||||
|
return safe_strdup_from_static(cached_value);
|
||||||
}
|
}
|
||||||
|
|
||||||
// For other keys, try config table first
|
// For other keys, try config table first
|
||||||
@@ -439,8 +532,9 @@ const char* get_config_value(const char* key) {
|
|||||||
strncpy(g_unified_cache.temp_buffer, cJSON_GetStringValue(tag_value),
|
strncpy(g_unified_cache.temp_buffer, cJSON_GetStringValue(tag_value),
|
||||||
sizeof(g_unified_cache.temp_buffer) - 1);
|
sizeof(g_unified_cache.temp_buffer) - 1);
|
||||||
g_unified_cache.temp_buffer[sizeof(g_unified_cache.temp_buffer) - 1] = '\0';
|
g_unified_cache.temp_buffer[sizeof(g_unified_cache.temp_buffer) - 1] = '\0';
|
||||||
|
const char* result = safe_strdup_from_static(g_unified_cache.temp_buffer);
|
||||||
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
return g_unified_cache.temp_buffer;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -461,9 +555,13 @@ int get_config_int(const char* key, int default_value) {
|
|||||||
long val = strtol(str_value, &endptr, 10);
|
long val = strtol(str_value, &endptr, 10);
|
||||||
|
|
||||||
if (endptr == str_value || *endptr != '\0') {
|
if (endptr == str_value || *endptr != '\0') {
|
||||||
|
// Free the dynamically allocated string
|
||||||
|
free((char*)str_value);
|
||||||
return default_value;
|
return default_value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Free the dynamically allocated string
|
||||||
|
free((char*)str_value);
|
||||||
return (int)val;
|
return (int)val;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -473,17 +571,22 @@ int get_config_bool(const char* key, int default_value) {
|
|||||||
return default_value;
|
return default_value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int result;
|
||||||
if (strcasecmp(str_value, "true") == 0 ||
|
if (strcasecmp(str_value, "true") == 0 ||
|
||||||
strcasecmp(str_value, "yes") == 0 ||
|
strcasecmp(str_value, "yes") == 0 ||
|
||||||
strcasecmp(str_value, "1") == 0) {
|
strcasecmp(str_value, "1") == 0) {
|
||||||
return 1;
|
result = 1;
|
||||||
} else if (strcasecmp(str_value, "false") == 0 ||
|
} else if (strcasecmp(str_value, "false") == 0 ||
|
||||||
strcasecmp(str_value, "no") == 0 ||
|
strcasecmp(str_value, "no") == 0 ||
|
||||||
strcasecmp(str_value, "0") == 0) {
|
strcasecmp(str_value, "0") == 0) {
|
||||||
return 0;
|
result = 0;
|
||||||
|
} else {
|
||||||
|
result = default_value;
|
||||||
}
|
}
|
||||||
|
|
||||||
return default_value;
|
// Free the dynamically allocated string
|
||||||
|
free((char*)str_value);
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ================================
|
// ================================
|
||||||
@@ -921,39 +1024,48 @@ int first_time_startup_sequence(const cli_options_t* cli_options) {
|
|||||||
// 1. Generate or use provided admin keypair
|
// 1. Generate or use provided admin keypair
|
||||||
unsigned char admin_privkey_bytes[32];
|
unsigned char admin_privkey_bytes[32];
|
||||||
char admin_privkey[65], admin_pubkey[65];
|
char admin_privkey[65], admin_pubkey[65];
|
||||||
|
int generated_admin_key = 0; // Track if we generated a new admin key
|
||||||
|
|
||||||
if (cli_options && strlen(cli_options->admin_privkey_override) == 64) {
|
if (cli_options && strlen(cli_options->admin_pubkey_override) == 64) {
|
||||||
// Use provided admin private key
|
// Use provided admin public key directly - skip private key generation entirely
|
||||||
log_info("Using provided admin private key override");
|
log_info("Using provided admin public key override - skipping private key generation");
|
||||||
strncpy(admin_privkey, cli_options->admin_privkey_override, sizeof(admin_privkey) - 1);
|
strncpy(admin_pubkey, cli_options->admin_pubkey_override, sizeof(admin_pubkey) - 1);
|
||||||
admin_privkey[sizeof(admin_privkey) - 1] = '\0';
|
admin_pubkey[sizeof(admin_pubkey) - 1] = '\0';
|
||||||
|
|
||||||
// Convert hex string to bytes
|
// Validate the public key format (must be 64 hex characters)
|
||||||
if (nostr_hex_to_bytes(admin_privkey, admin_privkey_bytes, 32) != NOSTR_SUCCESS) {
|
for (int i = 0; i < 64; i++) {
|
||||||
log_error("Failed to convert admin private key hex to bytes");
|
char c = admin_pubkey[i];
|
||||||
return -1;
|
if (!((c >= '0' && c <= '9') ||
|
||||||
|
(c >= 'a' && c <= 'f') ||
|
||||||
|
(c >= 'A' && c <= 'F'))) {
|
||||||
|
log_error("Invalid admin public key format - must contain only hex characters");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate the private key
|
// Skip private key generation - we only need the pubkey for admin verification
|
||||||
if (nostr_ec_private_key_verify(admin_privkey_bytes) != NOSTR_SUCCESS) {
|
// Set a dummy private key that will never be used (not displayed or stored)
|
||||||
log_error("Provided admin private key is invalid");
|
memset(admin_privkey_bytes, 0, 32); // Zero out for security
|
||||||
return -1;
|
memset(admin_privkey, 0, sizeof(admin_privkey)); // Zero out the hex string
|
||||||
}
|
generated_admin_key = 0; // Did not generate a new key
|
||||||
} else {
|
} else {
|
||||||
// Generate random admin keypair using /dev/urandom + nostr_core_lib
|
// Generate random admin keypair using /dev/urandom + nostr_core_lib
|
||||||
|
log_info("Generating random admin keypair");
|
||||||
if (generate_random_private_key_bytes(admin_privkey_bytes) != 0) {
|
if (generate_random_private_key_bytes(admin_privkey_bytes) != 0) {
|
||||||
log_error("Failed to generate admin private key");
|
log_error("Failed to generate admin private key");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
nostr_bytes_to_hex(admin_privkey_bytes, 32, admin_privkey);
|
nostr_bytes_to_hex(admin_privkey_bytes, 32, admin_privkey);
|
||||||
}
|
|
||||||
|
|
||||||
unsigned char admin_pubkey_bytes[32];
|
// Derive public key from private key
|
||||||
if (nostr_ec_public_key_from_private_key(admin_privkey_bytes, admin_pubkey_bytes) != NOSTR_SUCCESS) {
|
unsigned char admin_pubkey_bytes[32];
|
||||||
log_error("Failed to derive admin public key");
|
if (nostr_ec_public_key_from_private_key(admin_privkey_bytes, admin_pubkey_bytes) != NOSTR_SUCCESS) {
|
||||||
return -1;
|
log_error("Failed to derive admin public key");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
nostr_bytes_to_hex(admin_pubkey_bytes, 32, admin_pubkey);
|
||||||
|
generated_admin_key = 1; // Generated a new key
|
||||||
}
|
}
|
||||||
nostr_bytes_to_hex(admin_pubkey_bytes, 32, admin_pubkey);
|
|
||||||
|
|
||||||
// 2. Generate or use provided relay keypair
|
// 2. Generate or use provided relay keypair
|
||||||
unsigned char relay_privkey_bytes[32];
|
unsigned char relay_privkey_bytes[32];
|
||||||
@@ -1011,48 +1123,40 @@ int first_time_startup_sequence(const cli_options_t* cli_options) {
|
|||||||
g_temp_relay_privkey[sizeof(g_temp_relay_privkey) - 1] = '\0';
|
g_temp_relay_privkey[sizeof(g_temp_relay_privkey) - 1] = '\0';
|
||||||
log_info("Relay private key cached for secure storage after database initialization");
|
log_info("Relay private key cached for secure storage after database initialization");
|
||||||
|
|
||||||
// 6. Create initial configuration event using defaults (without private key)
|
// 6. Handle configuration setup - defaults will be populated after database initialization
|
||||||
cJSON* config_event = create_default_config_event(admin_privkey_bytes, relay_privkey, relay_pubkey, cli_options);
|
log_info("Configuration setup prepared - defaults will be populated after database initialization");
|
||||||
if (!config_event) {
|
|
||||||
log_error("Failed to create default configuration event");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 7. Process configuration through admin API instead of storing in events table
|
|
||||||
if (process_startup_config_event_with_fallback(config_event) == 0) {
|
// CLI overrides will be applied after database initialization in main.c
|
||||||
log_success("Initial configuration processed successfully through admin API");
|
// This prevents "g_db is NULL" errors during first-time startup
|
||||||
|
|
||||||
|
// 10. Print admin private key for user to save (only if we generated a new key)
|
||||||
|
if (generated_admin_key) {
|
||||||
|
printf("\n");
|
||||||
|
printf("=================================================================\n");
|
||||||
|
printf("IMPORTANT: SAVE THIS ADMIN PRIVATE KEY SECURELY!\n");
|
||||||
|
printf("=================================================================\n");
|
||||||
|
printf("Admin Private Key: %s\n", admin_privkey);
|
||||||
|
printf("Admin Public Key: %s\n", admin_pubkey);
|
||||||
|
printf("Relay Public Key: %s\n", relay_pubkey);
|
||||||
|
printf("\nDatabase: %s\n", g_database_path);
|
||||||
|
printf("\nThis admin private key is needed to update configuration!\n");
|
||||||
|
printf("Store it safely - it will not be displayed again.\n");
|
||||||
|
printf("=================================================================\n");
|
||||||
|
printf("\n");
|
||||||
} else {
|
} else {
|
||||||
log_warning("Failed to process initial configuration - will retry after database init");
|
printf("\n");
|
||||||
// Cache the event for later processing
|
printf("=================================================================\n");
|
||||||
if (g_pending_config_event) {
|
printf("RELAY STARTUP COMPLETE\n");
|
||||||
cJSON_Delete(g_pending_config_event);
|
printf("=================================================================\n");
|
||||||
}
|
printf("Using provided admin public key for authentication\n");
|
||||||
g_pending_config_event = cJSON_Duplicate(config_event, 1);
|
printf("Admin Public Key: %s\n", admin_pubkey);
|
||||||
|
printf("Relay Public Key: %s\n", relay_pubkey);
|
||||||
|
printf("\nDatabase: %s\n", g_database_path);
|
||||||
|
printf("=================================================================\n");
|
||||||
|
printf("\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
// 8. Cache the current config
|
|
||||||
if (g_current_config) {
|
|
||||||
cJSON_Delete(g_current_config);
|
|
||||||
}
|
|
||||||
g_current_config = cJSON_Duplicate(config_event, 1);
|
|
||||||
|
|
||||||
// 9. Clean up
|
|
||||||
cJSON_Delete(config_event);
|
|
||||||
|
|
||||||
// 10. Print admin private key for user to save
|
|
||||||
printf("\n");
|
|
||||||
printf("=================================================================\n");
|
|
||||||
printf("IMPORTANT: SAVE THIS ADMIN PRIVATE KEY SECURELY!\n");
|
|
||||||
printf("=================================================================\n");
|
|
||||||
printf("Admin Private Key: %s\n", admin_privkey);
|
|
||||||
printf("Admin Public Key: %s\n", admin_pubkey);
|
|
||||||
printf("Relay Public Key: %s\n", relay_pubkey);
|
|
||||||
printf("\nDatabase: %s\n", g_database_path);
|
|
||||||
printf("\nThis admin private key is needed to update configuration!\n");
|
|
||||||
printf("Store it safely - it will not be displayed again.\n");
|
|
||||||
printf("=================================================================\n");
|
|
||||||
printf("\n");
|
|
||||||
|
|
||||||
log_success("First-time startup sequence completed");
|
log_success("First-time startup sequence completed");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@@ -1083,6 +1187,11 @@ int startup_existing_relay(const char* relay_pubkey) {
|
|||||||
g_database_path[sizeof(g_database_path) - 1] = '\0';
|
g_database_path[sizeof(g_database_path) - 1] = '\0';
|
||||||
free(db_name);
|
free(db_name);
|
||||||
|
|
||||||
|
// Ensure default configuration values are populated (for any missing keys)
|
||||||
|
if (populate_default_config_values() != 0) {
|
||||||
|
log_warning("Failed to populate default config values for existing relay - continuing");
|
||||||
|
}
|
||||||
|
|
||||||
// Configuration will be migrated from events to table after database initialization
|
// Configuration will be migrated from events to table after database initialization
|
||||||
log_info("Configuration migration will be performed after database is available");
|
log_info("Configuration migration will be performed after database is available");
|
||||||
|
|
||||||
@@ -1829,12 +1938,71 @@ const char* get_config_value_from_table(const char* key) {
|
|||||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||||
const char* value = (char*)sqlite3_column_text(stmt, 0);
|
const char* value = (char*)sqlite3_column_text(stmt, 0);
|
||||||
if (value) {
|
if (value) {
|
||||||
// Use unified cache buffer with thread safety
|
// For NIP-11 fields, store in cache buffers but return dynamically allocated strings for consistency
|
||||||
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
if (strcmp(key, "relay_name") == 0) {
|
||||||
strncpy(g_unified_cache.temp_buffer, value, sizeof(g_unified_cache.temp_buffer) - 1);
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
g_unified_cache.temp_buffer[sizeof(g_unified_cache.temp_buffer) - 1] = '\0';
|
strncpy(g_unified_cache.relay_info.name, value, sizeof(g_unified_cache.relay_info.name) - 1);
|
||||||
result = g_unified_cache.temp_buffer;
|
g_unified_cache.relay_info.name[sizeof(g_unified_cache.relay_info.name) - 1] = '\0';
|
||||||
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
result = strdup(value); // Return dynamically allocated copy
|
||||||
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
} else if (strcmp(key, "relay_description") == 0) {
|
||||||
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
|
strncpy(g_unified_cache.relay_info.description, value, sizeof(g_unified_cache.relay_info.description) - 1);
|
||||||
|
g_unified_cache.relay_info.description[sizeof(g_unified_cache.relay_info.description) - 1] = '\0';
|
||||||
|
result = strdup(value); // Return dynamically allocated copy
|
||||||
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
} else if (strcmp(key, "relay_contact") == 0) {
|
||||||
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
|
strncpy(g_unified_cache.relay_info.contact, value, sizeof(g_unified_cache.relay_info.contact) - 1);
|
||||||
|
g_unified_cache.relay_info.contact[sizeof(g_unified_cache.relay_info.contact) - 1] = '\0';
|
||||||
|
result = strdup(value); // Return dynamically allocated copy
|
||||||
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
} else if (strcmp(key, "relay_software") == 0) {
|
||||||
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
|
strncpy(g_unified_cache.relay_info.software, value, sizeof(g_unified_cache.relay_info.software) - 1);
|
||||||
|
g_unified_cache.relay_info.software[sizeof(g_unified_cache.relay_info.software) - 1] = '\0';
|
||||||
|
result = strdup(value); // Return dynamically allocated copy
|
||||||
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
} else if (strcmp(key, "relay_version") == 0) {
|
||||||
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
|
strncpy(g_unified_cache.relay_info.version, value, sizeof(g_unified_cache.relay_info.version) - 1);
|
||||||
|
g_unified_cache.relay_info.version[sizeof(g_unified_cache.relay_info.version) - 1] = '\0';
|
||||||
|
result = strdup(value); // Return dynamically allocated copy
|
||||||
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
} else if (strcmp(key, "supported_nips") == 0) {
|
||||||
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
|
strncpy(g_unified_cache.relay_info.supported_nips_str, value, sizeof(g_unified_cache.relay_info.supported_nips_str) - 1);
|
||||||
|
g_unified_cache.relay_info.supported_nips_str[sizeof(g_unified_cache.relay_info.supported_nips_str) - 1] = '\0';
|
||||||
|
result = strdup(value); // Return dynamically allocated copy
|
||||||
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
} else if (strcmp(key, "language_tags") == 0) {
|
||||||
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
|
strncpy(g_unified_cache.relay_info.language_tags_str, value, sizeof(g_unified_cache.relay_info.language_tags_str) - 1);
|
||||||
|
g_unified_cache.relay_info.language_tags_str[sizeof(g_unified_cache.relay_info.language_tags_str) - 1] = '\0';
|
||||||
|
result = strdup(value); // Return dynamically allocated copy
|
||||||
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
} else if (strcmp(key, "relay_countries") == 0) {
|
||||||
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
|
strncpy(g_unified_cache.relay_info.relay_countries_str, value, sizeof(g_unified_cache.relay_info.relay_countries_str) - 1);
|
||||||
|
g_unified_cache.relay_info.relay_countries_str[sizeof(g_unified_cache.relay_info.relay_countries_str) - 1] = '\0';
|
||||||
|
result = strdup(value); // Return dynamically allocated copy
|
||||||
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
} else if (strcmp(key, "posting_policy") == 0) {
|
||||||
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
|
strncpy(g_unified_cache.relay_info.posting_policy, value, sizeof(g_unified_cache.relay_info.posting_policy) - 1);
|
||||||
|
g_unified_cache.relay_info.posting_policy[sizeof(g_unified_cache.relay_info.posting_policy) - 1] = '\0';
|
||||||
|
result = strdup(value); // Return dynamically allocated copy
|
||||||
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
} else if (strcmp(key, "payments_url") == 0) {
|
||||||
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
|
strncpy(g_unified_cache.relay_info.payments_url, value, sizeof(g_unified_cache.relay_info.payments_url) - 1);
|
||||||
|
g_unified_cache.relay_info.payments_url[sizeof(g_unified_cache.relay_info.payments_url) - 1] = '\0';
|
||||||
|
result = strdup(value); // Return dynamically allocated copy
|
||||||
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
} else {
|
||||||
|
// For other keys, return a dynamically allocated string to prevent buffer reuse
|
||||||
|
result = strdup(value);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1941,9 +2109,17 @@ int populate_default_config_values(void) {
|
|||||||
category = "limits";
|
category = "limits";
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine if requires restart
|
// Determine if requires restart (0 = dynamic, 1 = restart required)
|
||||||
int requires_restart = 0;
|
int requires_restart = 0;
|
||||||
if (strcmp(key, "relay_port") == 0) {
|
|
||||||
|
// Restart required configs
|
||||||
|
if (strcmp(key, "relay_port") == 0 ||
|
||||||
|
strcmp(key, "max_connections") == 0 ||
|
||||||
|
strcmp(key, "auth_enabled") == 0 ||
|
||||||
|
strcmp(key, "nip42_auth_required") == 0 ||
|
||||||
|
strcmp(key, "nip42_auth_required_kinds") == 0 ||
|
||||||
|
strcmp(key, "nip42_challenge_timeout") == 0 ||
|
||||||
|
strcmp(key, "database_path") == 0) {
|
||||||
requires_restart = 1;
|
requires_restart = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1954,7 +2130,7 @@ int populate_default_config_values(void) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
log_success("Default configuration values populated");
|
log_success("Default configuration values populated with restart requirements");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3714,10 +3890,59 @@ int handle_config_update_unified(cJSON* event, char* error_message, size_t error
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if this config requires restart
|
||||||
|
const char* requires_restart_sql = "SELECT requires_restart FROM config WHERE key = ?";
|
||||||
|
sqlite3_stmt* restart_stmt;
|
||||||
|
int requires_restart = 0;
|
||||||
|
|
||||||
|
if (sqlite3_prepare_v2(g_db, requires_restart_sql, -1, &restart_stmt, NULL) == SQLITE_OK) {
|
||||||
|
sqlite3_bind_text(restart_stmt, 1, key, -1, SQLITE_STATIC);
|
||||||
|
if (sqlite3_step(restart_stmt) == SQLITE_ROW) {
|
||||||
|
requires_restart = sqlite3_column_int(restart_stmt, 0);
|
||||||
|
}
|
||||||
|
sqlite3_finalize(restart_stmt);
|
||||||
|
}
|
||||||
|
|
||||||
// Update the configuration value in the table
|
// Update the configuration value in the table
|
||||||
if (update_config_in_table(key, value) == 0) {
|
if (update_config_in_table(key, value) == 0) {
|
||||||
updates_applied++;
|
updates_applied++;
|
||||||
|
|
||||||
|
// For dynamic configs (requires_restart = 0), refresh cache immediately
|
||||||
|
if (requires_restart == 0) {
|
||||||
|
log_info("Dynamic config updated - refreshing cache");
|
||||||
|
refresh_unified_cache_from_table();
|
||||||
|
|
||||||
|
// Apply selective re-initialization for specific dynamic configs
|
||||||
|
log_info("Applying selective re-initialization for dynamic config changes");
|
||||||
|
if (strcmp(key, "max_subscriptions_per_client") == 0 ||
|
||||||
|
strcmp(key, "max_total_subscriptions") == 0) {
|
||||||
|
log_info("Subscription limits changed - updating subscription manager");
|
||||||
|
update_subscription_manager_config();
|
||||||
|
// Also refresh NIP-11 relay info since max_subscriptions_per_client affects limitation field
|
||||||
|
log_info("Subscription limits changed - reinitializing relay info for NIP-11");
|
||||||
|
init_relay_info();
|
||||||
|
} else if (strcmp(key, "pow_min_difficulty") == 0 ||
|
||||||
|
strcmp(key, "pow_mode") == 0) {
|
||||||
|
log_info("PoW configuration changed - reinitializing PoW system");
|
||||||
|
init_pow_config();
|
||||||
|
} else if (strcmp(key, "nip40_expiration_enabled") == 0 ||
|
||||||
|
strcmp(key, "nip40_expiration_strict") == 0 ||
|
||||||
|
strcmp(key, "nip40_expiration_filter") == 0 ||
|
||||||
|
strcmp(key, "nip40_expiration_grace_period") == 0) {
|
||||||
|
log_info("Expiration configuration changed - reinitializing expiration system");
|
||||||
|
init_expiration_config();
|
||||||
|
} else if (strcmp(key, "relay_description") == 0 ||
|
||||||
|
strcmp(key, "relay_contact") == 0 ||
|
||||||
|
strcmp(key, "relay_software") == 0 ||
|
||||||
|
strcmp(key, "relay_version") == 0 ||
|
||||||
|
strcmp(key, "max_message_length") == 0 ||
|
||||||
|
strcmp(key, "max_event_tags") == 0 ||
|
||||||
|
strcmp(key, "max_content_length") == 0) {
|
||||||
|
log_info("Relay information changed - reinitializing relay info");
|
||||||
|
init_relay_info();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Add successful config to response array
|
// Add successful config to response array
|
||||||
cJSON* success_config = cJSON_CreateObject();
|
cJSON* success_config = cJSON_CreateObject();
|
||||||
cJSON_AddStringToObject(success_config, "key", key);
|
cJSON_AddStringToObject(success_config, "key", key);
|
||||||
@@ -3725,10 +3950,11 @@ int handle_config_update_unified(cJSON* event, char* error_message, size_t error
|
|||||||
cJSON_AddStringToObject(success_config, "data_type", data_type);
|
cJSON_AddStringToObject(success_config, "data_type", data_type);
|
||||||
cJSON_AddStringToObject(success_config, "category", category);
|
cJSON_AddStringToObject(success_config, "category", category);
|
||||||
cJSON_AddStringToObject(success_config, "status", "updated");
|
cJSON_AddStringToObject(success_config, "status", "updated");
|
||||||
|
cJSON_AddBoolToObject(success_config, "requires_restart", requires_restart);
|
||||||
cJSON_AddItemToArray(processed_configs, success_config);
|
cJSON_AddItemToArray(processed_configs, success_config);
|
||||||
|
|
||||||
log_success("Config field updated successfully");
|
log_success("Config field updated successfully");
|
||||||
printf(" Updated: %s = %s\n", key, value);
|
printf(" Updated: %s = %s (restart: %s)\n", key, value, requires_restart ? "yes" : "no");
|
||||||
} else {
|
} else {
|
||||||
log_error("Failed to update config field in database");
|
log_error("Failed to update config field in database");
|
||||||
printf(" Failed to update: %s = %s\n", key, value);
|
printf(" Failed to update: %s = %s\n", key, value);
|
||||||
@@ -3905,12 +4131,17 @@ const char* get_config_value_hybrid(const char* key) {
|
|||||||
if (is_config_table_ready()) {
|
if (is_config_table_ready()) {
|
||||||
const char* table_value = get_config_value_from_table(key);
|
const char* table_value = get_config_value_from_table(key);
|
||||||
if (table_value) {
|
if (table_value) {
|
||||||
return table_value;
|
return table_value; // Already dynamically allocated
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fall back to event-based config
|
// Fall back to event-based config, but ensure it's dynamically allocated
|
||||||
return get_config_value(key);
|
const char* fallback_value = get_config_value(key);
|
||||||
|
if (fallback_value) {
|
||||||
|
return strdup(fallback_value); // Make a copy since fallback might be static
|
||||||
|
}
|
||||||
|
|
||||||
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if config table is ready
|
// Check if config table is ready
|
||||||
@@ -4074,9 +4305,17 @@ int populate_config_table_from_event(const cJSON* event) {
|
|||||||
category = "limits";
|
category = "limits";
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine if requires restart
|
// Determine if requires restart (0 = dynamic, 1 = restart required)
|
||||||
int requires_restart = 0;
|
int requires_restart = 0;
|
||||||
if (strcmp(key, "relay_port") == 0) {
|
|
||||||
|
// Restart required configs
|
||||||
|
if (strcmp(key, "relay_port") == 0 ||
|
||||||
|
strcmp(key, "max_connections") == 0 ||
|
||||||
|
strcmp(key, "auth_enabled") == 0 ||
|
||||||
|
strcmp(key, "nip42_auth_required") == 0 ||
|
||||||
|
strcmp(key, "nip42_auth_required_kinds") == 0 ||
|
||||||
|
strcmp(key, "nip42_challenge_timeout") == 0 ||
|
||||||
|
strcmp(key, "database_path") == 0) {
|
||||||
requires_restart = 1;
|
requires_restart = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -40,6 +40,7 @@ typedef struct {
|
|||||||
int nip42_mode;
|
int nip42_mode;
|
||||||
int nip42_challenge_timeout;
|
int nip42_challenge_timeout;
|
||||||
int nip42_time_tolerance;
|
int nip42_time_tolerance;
|
||||||
|
int nip70_protected_events_enabled;
|
||||||
|
|
||||||
// Static buffer for config values (replaces static buffers in get_config_value functions)
|
// Static buffer for config values (replaces static buffers in get_config_value functions)
|
||||||
char temp_buffer[CONFIG_VALUE_MAX_LENGTH];
|
char temp_buffer[CONFIG_VALUE_MAX_LENGTH];
|
||||||
@@ -56,6 +57,11 @@ typedef struct {
|
|||||||
char version[64];
|
char version[64];
|
||||||
char privacy_policy[RELAY_URL_MAX_LENGTH];
|
char privacy_policy[RELAY_URL_MAX_LENGTH];
|
||||||
char terms_of_service[RELAY_URL_MAX_LENGTH];
|
char terms_of_service[RELAY_URL_MAX_LENGTH];
|
||||||
|
// Raw string values for parsing into cJSON arrays
|
||||||
|
char supported_nips_str[CONFIG_VALUE_MAX_LENGTH];
|
||||||
|
char language_tags_str[CONFIG_VALUE_MAX_LENGTH];
|
||||||
|
char relay_countries_str[CONFIG_VALUE_MAX_LENGTH];
|
||||||
|
// Parsed cJSON arrays
|
||||||
cJSON* supported_nips;
|
cJSON* supported_nips;
|
||||||
cJSON* limitation;
|
cJSON* limitation;
|
||||||
cJSON* retention;
|
cJSON* retention;
|
||||||
@@ -96,7 +102,7 @@ typedef struct {
|
|||||||
// Command line options structure for first-time startup
|
// Command line options structure for first-time startup
|
||||||
typedef struct {
|
typedef struct {
|
||||||
int port_override; // -1 = not set, >0 = port value
|
int port_override; // -1 = not set, >0 = port value
|
||||||
char admin_privkey_override[65]; // Empty string = not set, 64-char hex = override
|
char admin_pubkey_override[65]; // Empty string = not set, 64-char hex = override
|
||||||
char relay_privkey_override[65]; // Empty string = not set, 64-char hex = override
|
char relay_privkey_override[65]; // Empty string = not set, 64-char hex = override
|
||||||
int strict_port; // 0 = allow port increment, 1 = fail if exact port unavailable
|
int strict_port; // 0 = allow port increment, 1 = fail if exact port unavailable
|
||||||
} cli_options_t;
|
} cli_options_t;
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
|
|
||||||
#include <cjson/cJSON.h>
|
#include <cjson/cJSON.h>
|
||||||
#include "config.h" // For cli_options_t definition
|
#include "config.h" // For cli_options_t definition
|
||||||
|
#include "main.h" // For relay metadata constants
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Default Configuration Event Template
|
* Default Configuration Event Template
|
||||||
@@ -28,15 +29,24 @@ static const struct {
|
|||||||
{"nip42_auth_required_kinds", "4,14"}, // Default: DM kinds require auth
|
{"nip42_auth_required_kinds", "4,14"}, // Default: DM kinds require auth
|
||||||
{"nip42_challenge_expiration", "600"}, // 10 minutes
|
{"nip42_challenge_expiration", "600"}, // 10 minutes
|
||||||
|
|
||||||
|
// NIP-70 Protected Events
|
||||||
|
{"nip70_protected_events_enabled", "false"},
|
||||||
|
|
||||||
// Server Core Settings
|
// Server Core Settings
|
||||||
{"relay_port", "8888"},
|
{"relay_port", "8888"},
|
||||||
{"max_connections", "100"},
|
{"max_connections", "100"},
|
||||||
|
|
||||||
// NIP-11 Relay Information (relay keys will be populated at runtime)
|
// NIP-11 Relay Information (relay keys will be populated at runtime)
|
||||||
{"relay_description", "High-performance C Nostr relay with SQLite storage"},
|
{"relay_name", RELAY_NAME},
|
||||||
{"relay_contact", ""},
|
{"relay_description", RELAY_DESCRIPTION},
|
||||||
{"relay_software", "https://git.laantungir.net/laantungir/c-relay.git"},
|
{"relay_contact", RELAY_CONTACT},
|
||||||
{"relay_version", "v1.0.0"},
|
{"relay_software", RELAY_SOFTWARE},
|
||||||
|
{"relay_version", RELAY_VERSION},
|
||||||
|
{"supported_nips", SUPPORTED_NIPS},
|
||||||
|
{"language_tags", LANGUAGE_TAGS},
|
||||||
|
{"relay_countries", RELAY_COUNTRIES},
|
||||||
|
{"posting_policy", POSTING_POLICY},
|
||||||
|
{"payments_url", PAYMENTS_URL},
|
||||||
|
|
||||||
// NIP-13 Proof of Work (pow_min_difficulty = 0 means PoW disabled)
|
// NIP-13 Proof of Work (pow_min_difficulty = 0 means PoW disabled)
|
||||||
{"pow_min_difficulty", "0"},
|
{"pow_min_difficulty", "0"},
|
||||||
|
|||||||
198
src/main.c
198
src/main.c
@@ -120,6 +120,9 @@ int nostr_validate_unified_request(const char* json_string, size_t json_length);
|
|||||||
// Forward declaration for admin event processing (kind 23456)
|
// Forward declaration for admin event processing (kind 23456)
|
||||||
int process_admin_event_in_config(cJSON* event, char* error_message, size_t error_size, struct lws* wsi);
|
int process_admin_event_in_config(cJSON* event, char* error_message, size_t error_size, struct lws* wsi);
|
||||||
|
|
||||||
|
// Forward declaration for NIP-45 COUNT message handling
|
||||||
|
int handle_count_message(const char* sub_id, cJSON* filters, struct lws *wsi, struct per_session_data *pss);
|
||||||
|
|
||||||
// Forward declaration for enhanced admin event authorization
|
// Forward declaration for enhanced admin event authorization
|
||||||
int is_authorized_admin_event(cJSON* event, char* error_message, size_t error_size);
|
int is_authorized_admin_event(cJSON* event, char* error_message, size_t error_size);
|
||||||
|
|
||||||
@@ -348,18 +351,18 @@ int init_database(const char* database_path_override) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!has_auth_rules) {
|
if (!has_auth_rules) {
|
||||||
// Add auth_rules table
|
// Add auth_rules table matching sql_schema.h
|
||||||
const char* create_auth_rules_sql =
|
const char* create_auth_rules_sql =
|
||||||
"CREATE TABLE IF NOT EXISTS auth_rules ("
|
"CREATE TABLE IF NOT EXISTS auth_rules ("
|
||||||
" id INTEGER PRIMARY KEY AUTOINCREMENT,"
|
" id INTEGER PRIMARY KEY AUTOINCREMENT,"
|
||||||
" rule_type TEXT NOT NULL," // 'pubkey_whitelist', 'pubkey_blacklist', 'hash_blacklist'
|
" rule_type TEXT NOT NULL CHECK (rule_type IN ('whitelist', 'blacklist', 'rate_limit', 'auth_required')),"
|
||||||
" operation TEXT NOT NULL," // 'event', 'event_kind_1', etc.
|
" pattern_type TEXT NOT NULL CHECK (pattern_type IN ('pubkey', 'kind', 'ip', 'global')),"
|
||||||
" rule_target TEXT NOT NULL," // pubkey, hash, or other identifier
|
" pattern_value TEXT,"
|
||||||
" enabled INTEGER DEFAULT 1," // 0 = disabled, 1 = enabled
|
" action TEXT NOT NULL CHECK (action IN ('allow', 'deny', 'require_auth', 'rate_limit')),"
|
||||||
" priority INTEGER DEFAULT 1000," // Lower numbers = higher priority
|
" parameters TEXT,"
|
||||||
" description TEXT," // Optional description
|
" active INTEGER NOT NULL DEFAULT 1,"
|
||||||
" created_at INTEGER DEFAULT (strftime('%s', 'now')),"
|
" created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),"
|
||||||
" UNIQUE(rule_type, operation, rule_target)"
|
" updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now'))"
|
||||||
");";
|
");";
|
||||||
|
|
||||||
char* error_msg = NULL;
|
char* error_msg = NULL;
|
||||||
@@ -373,6 +376,24 @@ int init_database(const char* database_path_override) {
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
log_success("Created auth_rules table");
|
log_success("Created auth_rules table");
|
||||||
|
|
||||||
|
// Add indexes for auth_rules table
|
||||||
|
const char* create_auth_rules_indexes_sql =
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_auth_rules_pattern ON auth_rules(pattern_type, pattern_value);"
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_auth_rules_type ON auth_rules(rule_type);"
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_auth_rules_active ON auth_rules(active);";
|
||||||
|
|
||||||
|
char* index_error_msg = NULL;
|
||||||
|
int index_rc = sqlite3_exec(g_db, create_auth_rules_indexes_sql, NULL, NULL, &index_error_msg);
|
||||||
|
if (index_rc != SQLITE_OK) {
|
||||||
|
char index_error_log[512];
|
||||||
|
snprintf(index_error_log, sizeof(index_error_log), "Failed to create auth_rules indexes: %s",
|
||||||
|
index_error_msg ? index_error_msg : "unknown error");
|
||||||
|
log_error(index_error_log);
|
||||||
|
if (index_error_msg) sqlite3_free(index_error_msg);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
log_success("Created auth_rules indexes");
|
||||||
} else {
|
} else {
|
||||||
log_info("auth_rules table already exists, skipping creation");
|
log_info("auth_rules table already exists, skipping creation");
|
||||||
}
|
}
|
||||||
@@ -863,7 +884,7 @@ int handle_req_message(const char* sub_id, cJSON* filters, struct lws *wsi, stru
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Build SQL query based on filter - exclude ephemeral events (kinds 20000-29999) from historical queries
|
// Build SQL query based on filter - exclude ephemeral events (kinds 20000-29999) from historical queries
|
||||||
char sql[1024] = "SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind < 20000";
|
char sql[1024] = "SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND (kind < 20000 OR kind >= 30000)";
|
||||||
char* sql_ptr = sql + strlen(sql);
|
char* sql_ptr = sql + strlen(sql);
|
||||||
int remaining = sizeof(sql) - strlen(sql);
|
int remaining = sizeof(sql) - strlen(sql);
|
||||||
|
|
||||||
@@ -954,6 +975,71 @@ int handle_req_message(const char* sub_id, cJSON* filters, struct lws *wsi, stru
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle tag filters (#e, #p, #t, etc.)
|
||||||
|
cJSON* filter_item = NULL;
|
||||||
|
cJSON_ArrayForEach(filter_item, filter) {
|
||||||
|
const char* filter_key = filter_item->string;
|
||||||
|
if (filter_key && filter_key[0] == '#' && strlen(filter_key) > 1) {
|
||||||
|
// This is a tag filter like "#e", "#p", etc.
|
||||||
|
const char* tag_name = filter_key + 1; // Get the tag name (e, p, t, type, etc.)
|
||||||
|
|
||||||
|
if (cJSON_IsArray(filter_item)) {
|
||||||
|
int tag_value_count = cJSON_GetArraySize(filter_item);
|
||||||
|
if (tag_value_count > 0) {
|
||||||
|
// Use EXISTS with LIKE to check for matching tags
|
||||||
|
snprintf(sql_ptr, remaining, " AND EXISTS (SELECT 1 FROM json_each(json(tags)) WHERE json_extract(value, '$[0]') = '%s' AND json_extract(value, '$[1]') IN (", tag_name);
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
|
||||||
|
for (int i = 0; i < tag_value_count; i++) {
|
||||||
|
cJSON* tag_value = cJSON_GetArrayItem(filter_item, i);
|
||||||
|
if (cJSON_IsString(tag_value)) {
|
||||||
|
if (i > 0) {
|
||||||
|
snprintf(sql_ptr, remaining, ",");
|
||||||
|
sql_ptr++;
|
||||||
|
remaining--;
|
||||||
|
}
|
||||||
|
snprintf(sql_ptr, remaining, "'%s'", cJSON_GetStringValue(tag_value));
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
snprintf(sql_ptr, remaining, "))");
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle search filter (NIP-50)
|
||||||
|
cJSON* search = cJSON_GetObjectItem(filter, "search");
|
||||||
|
if (search && cJSON_IsString(search)) {
|
||||||
|
const char* search_term = cJSON_GetStringValue(search);
|
||||||
|
if (search_term && strlen(search_term) > 0) {
|
||||||
|
// Search in both content and tag values using LIKE
|
||||||
|
// Escape single quotes in search term for SQL safety
|
||||||
|
char escaped_search[256];
|
||||||
|
size_t escaped_len = 0;
|
||||||
|
for (size_t i = 0; search_term[i] && escaped_len < sizeof(escaped_search) - 1; i++) {
|
||||||
|
if (search_term[i] == '\'') {
|
||||||
|
escaped_search[escaped_len++] = '\'';
|
||||||
|
escaped_search[escaped_len++] = '\'';
|
||||||
|
} else {
|
||||||
|
escaped_search[escaped_len++] = search_term[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
escaped_search[escaped_len] = '\0';
|
||||||
|
|
||||||
|
// Add search conditions for content and tags
|
||||||
|
// Use tags LIKE to search within the JSON string representation of tags
|
||||||
|
snprintf(sql_ptr, remaining, " AND (content LIKE '%%%s%%' OR tags LIKE '%%\"%s\"%%')",
|
||||||
|
escaped_search, escaped_search);
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Handle since filter
|
// Handle since filter
|
||||||
cJSON* since = cJSON_GetObjectItem(filter, "since");
|
cJSON* since = cJSON_GetObjectItem(filter, "since");
|
||||||
if (since && cJSON_IsNumber(since)) {
|
if (since && cJSON_IsNumber(since)) {
|
||||||
@@ -1204,9 +1290,9 @@ void print_usage(const char* program_name) {
|
|||||||
printf(" -h, --help Show this help message\n");
|
printf(" -h, --help Show this help message\n");
|
||||||
printf(" -v, --version Show version information\n");
|
printf(" -v, --version Show version information\n");
|
||||||
printf(" -p, --port PORT Override relay port (first-time startup only)\n");
|
printf(" -p, --port PORT Override relay port (first-time startup only)\n");
|
||||||
printf(" -a, --admin-privkey HEX Override admin private key (64-char hex)\n");
|
|
||||||
printf(" -r, --relay-privkey HEX Override relay private key (64-char hex)\n");
|
|
||||||
printf(" --strict-port Fail if exact port is unavailable (no port increment)\n");
|
printf(" --strict-port Fail if exact port is unavailable (no port increment)\n");
|
||||||
|
printf(" -a, --admin-pubkey HEX Override admin public key (64-char hex)\n");
|
||||||
|
printf(" -r, --relay-privkey HEX Override relay private key (64-char hex)\n");
|
||||||
printf("\n");
|
printf("\n");
|
||||||
printf("Configuration:\n");
|
printf("Configuration:\n");
|
||||||
printf(" This relay uses event-based configuration stored in the database.\n");
|
printf(" This relay uses event-based configuration stored in the database.\n");
|
||||||
@@ -1221,12 +1307,12 @@ void print_usage(const char* program_name) {
|
|||||||
printf("\n");
|
printf("\n");
|
||||||
printf("Examples:\n");
|
printf("Examples:\n");
|
||||||
printf(" %s # Start relay (auto-configure on first run)\n", program_name);
|
printf(" %s # Start relay (auto-configure on first run)\n", program_name);
|
||||||
printf(" %s -p 8080 # First-time setup with port 8080\n", program_name);
|
printf(" %s -p 8080 # First-time setup with port 8080\n", program_name);
|
||||||
printf(" %s --port 9000 # First-time setup with port 9000\n", program_name);
|
printf(" %s --port 9000 # First-time setup with port 9000\n", program_name);
|
||||||
printf(" %s --strict-port # Fail if default port 8888 is unavailable\n", program_name);
|
printf(" %s --strict-port # Fail if default port 8888 is unavailable\n", program_name);
|
||||||
printf(" %s -p 8080 --strict-port # Fail if port 8080 is unavailable\n", program_name);
|
printf(" %s -p 8080 --strict-port # Fail if port 8080 is unavailable\n", program_name);
|
||||||
printf(" %s --help # Show this help\n", program_name);
|
printf(" %s --help # Show this help\n", program_name);
|
||||||
printf(" %s --version # Show version info\n", program_name);
|
printf(" %s --version # Show version info\n", program_name);
|
||||||
printf("\n");
|
printf("\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1242,7 +1328,7 @@ int main(int argc, char* argv[]) {
|
|||||||
// Initialize CLI options structure
|
// Initialize CLI options structure
|
||||||
cli_options_t cli_options = {
|
cli_options_t cli_options = {
|
||||||
.port_override = -1, // -1 = not set
|
.port_override = -1, // -1 = not set
|
||||||
.admin_privkey_override = {0}, // Empty string = not set
|
.admin_pubkey_override = {0}, // Empty string = not set
|
||||||
.relay_privkey_override = {0}, // Empty string = not set
|
.relay_privkey_override = {0}, // Empty string = not set
|
||||||
.strict_port = 0 // 0 = allow port increment (default)
|
.strict_port = 0 // 0 = allow port increment (default)
|
||||||
};
|
};
|
||||||
@@ -1279,17 +1365,17 @@ int main(int argc, char* argv[]) {
|
|||||||
char port_msg[128];
|
char port_msg[128];
|
||||||
snprintf(port_msg, sizeof(port_msg), "Port override specified: %d", cli_options.port_override);
|
snprintf(port_msg, sizeof(port_msg), "Port override specified: %d", cli_options.port_override);
|
||||||
log_info(port_msg);
|
log_info(port_msg);
|
||||||
} else if (strcmp(argv[i], "-a") == 0 || strcmp(argv[i], "--admin-privkey") == 0) {
|
} else if (strcmp(argv[i], "-a") == 0 || strcmp(argv[i], "--admin-pubkey") == 0) {
|
||||||
// Admin private key override option
|
// Admin public key override option
|
||||||
if (i + 1 >= argc) {
|
if (i + 1 >= argc) {
|
||||||
log_error("Admin privkey option requires a value. Use --help for usage information.");
|
log_error("Admin pubkey option requires a value. Use --help for usage information.");
|
||||||
print_usage(argv[0]);
|
print_usage(argv[0]);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate private key format (must be 64 hex characters)
|
// Validate public key format (must be 64 hex characters)
|
||||||
if (strlen(argv[i + 1]) != 64) {
|
if (strlen(argv[i + 1]) != 64) {
|
||||||
log_error("Invalid admin private key length. Must be exactly 64 hex characters.");
|
log_error("Invalid admin public key length. Must be exactly 64 hex characters.");
|
||||||
print_usage(argv[0]);
|
print_usage(argv[0]);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
@@ -1298,17 +1384,17 @@ int main(int argc, char* argv[]) {
|
|||||||
for (int j = 0; j < 64; j++) {
|
for (int j = 0; j < 64; j++) {
|
||||||
char c = argv[i + 1][j];
|
char c = argv[i + 1][j];
|
||||||
if (!((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F'))) {
|
if (!((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F'))) {
|
||||||
log_error("Invalid admin private key format. Must contain only hex characters (0-9, a-f, A-F).");
|
log_error("Invalid admin public key format. Must contain only hex characters (0-9, a-f, A-F).");
|
||||||
print_usage(argv[0]);
|
print_usage(argv[0]);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
strncpy(cli_options.admin_privkey_override, argv[i + 1], sizeof(cli_options.admin_privkey_override) - 1);
|
strncpy(cli_options.admin_pubkey_override, argv[i + 1], sizeof(cli_options.admin_pubkey_override) - 1);
|
||||||
cli_options.admin_privkey_override[sizeof(cli_options.admin_privkey_override) - 1] = '\0';
|
cli_options.admin_pubkey_override[sizeof(cli_options.admin_pubkey_override) - 1] = '\0';
|
||||||
i++; // Skip the key argument
|
i++; // Skip the key argument
|
||||||
|
|
||||||
log_info("Admin private key override specified");
|
log_info("Admin public key override specified");
|
||||||
} else if (strcmp(argv[i], "-r") == 0 || strcmp(argv[i], "--relay-privkey") == 0) {
|
} else if (strcmp(argv[i], "-r") == 0 || strcmp(argv[i], "--relay-privkey") == 0) {
|
||||||
// Relay private key override option
|
// Relay private key override option
|
||||||
if (i + 1 >= argc) {
|
if (i + 1 >= argc) {
|
||||||
@@ -1407,18 +1493,45 @@ int main(int argc, char* argv[]) {
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Systematically add pubkeys to config table
|
// Handle configuration setup after database is initialized
|
||||||
if (add_pubkeys_to_config_table() != 0) {
|
// Always populate defaults directly in config table (abandoning legacy event signing)
|
||||||
log_warning("Failed to add pubkeys to config table systematically");
|
log_info("Populating config table with defaults after database initialization");
|
||||||
} else {
|
|
||||||
log_success("Pubkeys added to config table systematically");
|
// Populate default config values in table
|
||||||
|
if (populate_default_config_values() != 0) {
|
||||||
|
log_error("Failed to populate default config values");
|
||||||
|
cleanup_configuration_system();
|
||||||
|
nostr_cleanup();
|
||||||
|
close_database();
|
||||||
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Retry storing the configuration event now that database is initialized
|
// Apply CLI overrides now that database is available
|
||||||
if (retry_store_initial_config_event() != 0) {
|
if (cli_options.port_override > 0) {
|
||||||
log_warning("Failed to store initial configuration event after database init");
|
char port_str[16];
|
||||||
|
snprintf(port_str, sizeof(port_str), "%d", cli_options.port_override);
|
||||||
|
if (update_config_in_table("relay_port", port_str) != 0) {
|
||||||
|
log_error("Failed to update relay port override in config table");
|
||||||
|
cleanup_configuration_system();
|
||||||
|
nostr_cleanup();
|
||||||
|
close_database();
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
log_info("Applied port override from command line");
|
||||||
|
printf(" Port: %d (overriding default)\n", cli_options.port_override);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add pubkeys to config table
|
||||||
|
if (add_pubkeys_to_config_table() != 0) {
|
||||||
|
log_error("Failed to add pubkeys to config table");
|
||||||
|
cleanup_configuration_system();
|
||||||
|
nostr_cleanup();
|
||||||
|
close_database();
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
log_success("Configuration populated directly in config table after database initialization");
|
||||||
|
|
||||||
// Now store the pubkeys in config table since database is available
|
// Now store the pubkeys in config table since database is available
|
||||||
const char* admin_pubkey = get_admin_pubkey_cached();
|
const char* admin_pubkey = get_admin_pubkey_cached();
|
||||||
const char* relay_pubkey_from_cache = get_relay_pubkey_cached();
|
const char* relay_pubkey_from_cache = get_relay_pubkey_cached();
|
||||||
@@ -1520,6 +1633,21 @@ int main(int argc, char* argv[]) {
|
|||||||
log_warning("No configuration event found in existing database");
|
log_warning("No configuration event found in existing database");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Apply CLI overrides for existing relay (port override should work even for existing relays)
|
||||||
|
if (cli_options.port_override > 0) {
|
||||||
|
char port_str[16];
|
||||||
|
snprintf(port_str, sizeof(port_str), "%d", cli_options.port_override);
|
||||||
|
if (update_config_in_table("relay_port", port_str) != 0) {
|
||||||
|
log_error("Failed to update relay port override in config table for existing relay");
|
||||||
|
cleanup_configuration_system();
|
||||||
|
nostr_cleanup();
|
||||||
|
close_database();
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
log_info("Applied port override from command line for existing relay");
|
||||||
|
printf(" Port: %d (overriding configured port)\n", cli_options.port_override);
|
||||||
|
}
|
||||||
|
|
||||||
// Free memory
|
// Free memory
|
||||||
free(relay_pubkey);
|
free(relay_pubkey);
|
||||||
for (int i = 0; existing_files[i]; i++) {
|
for (int i = 0; existing_files[i]; i++) {
|
||||||
|
|||||||
30
src/main.h
Normal file
30
src/main.h
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
/*
|
||||||
|
* C-Relay Main Header - Version and Metadata Information
|
||||||
|
*
|
||||||
|
* This header contains version information and relay metadata.
|
||||||
|
* Version macros are auto-updated by the build system.
|
||||||
|
* Relay metadata should be manually maintained.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef MAIN_H
|
||||||
|
#define MAIN_H
|
||||||
|
|
||||||
|
// Version information (auto-updated by build system)
|
||||||
|
#define VERSION "v0.4.6"
|
||||||
|
#define VERSION_MAJOR 0
|
||||||
|
#define VERSION_MINOR 4
|
||||||
|
#define VERSION_PATCH 6
|
||||||
|
|
||||||
|
// Relay metadata (authoritative source for NIP-11 information)
|
||||||
|
#define RELAY_NAME "C-Relay"
|
||||||
|
#define RELAY_DESCRIPTION "High-performance C Nostr relay with SQLite storage"
|
||||||
|
#define RELAY_CONTACT ""
|
||||||
|
#define RELAY_SOFTWARE "https://git.laantungir.net/laantungir/c-relay.git"
|
||||||
|
#define RELAY_VERSION VERSION // Use the same version as the build
|
||||||
|
#define SUPPORTED_NIPS "1,2,4,9,11,12,13,15,16,20,22,33,40,42,50,70"
|
||||||
|
#define LANGUAGE_TAGS ""
|
||||||
|
#define RELAY_COUNTRIES ""
|
||||||
|
#define POSTING_POLICY ""
|
||||||
|
#define PAYMENTS_URL ""
|
||||||
|
|
||||||
|
#endif /* MAIN_H */
|
||||||
204
src/nip011.c
204
src/nip011.c
@@ -34,76 +34,213 @@ extern unified_config_cache_t g_unified_cache;
|
|||||||
/////////////////////////////////////////////////////////////////////////////////////////
|
/////////////////////////////////////////////////////////////////////////////////////////
|
||||||
/////////////////////////////////////////////////////////////////////////////////////////
|
/////////////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
// Helper function to parse comma-separated string into cJSON array
|
||||||
|
cJSON* parse_comma_separated_array(const char* csv_string) {
|
||||||
|
log_info("parse_comma_separated_array called");
|
||||||
|
if (!csv_string || strlen(csv_string) == 0) {
|
||||||
|
log_info("Empty or null csv_string, returning empty array");
|
||||||
|
return cJSON_CreateArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
log_info("Creating cJSON array");
|
||||||
|
cJSON* array = cJSON_CreateArray();
|
||||||
|
if (!array) {
|
||||||
|
log_info("Failed to create cJSON array");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
log_info("Duplicating csv_string");
|
||||||
|
char* csv_copy = strdup(csv_string);
|
||||||
|
if (!csv_copy) {
|
||||||
|
log_info("Failed to duplicate csv_string");
|
||||||
|
cJSON_Delete(array);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
log_info("Starting token parsing");
|
||||||
|
char* token = strtok(csv_copy, ",");
|
||||||
|
while (token) {
|
||||||
|
log_info("Processing token");
|
||||||
|
// Trim whitespace
|
||||||
|
while (*token == ' ') token++;
|
||||||
|
char* end = token + strlen(token) - 1;
|
||||||
|
while (end > token && *end == ' ') *end-- = '\0';
|
||||||
|
|
||||||
|
if (strlen(token) > 0) {
|
||||||
|
log_info("Token has content, parsing");
|
||||||
|
// Try to parse as number first (for supported_nips)
|
||||||
|
char* endptr;
|
||||||
|
long num = strtol(token, &endptr, 10);
|
||||||
|
if (*endptr == '\0') {
|
||||||
|
log_info("Token is number, adding to array");
|
||||||
|
// It's a number
|
||||||
|
cJSON_AddItemToArray(array, cJSON_CreateNumber(num));
|
||||||
|
} else {
|
||||||
|
log_info("Token is string, adding to array");
|
||||||
|
// It's a string
|
||||||
|
cJSON_AddItemToArray(array, cJSON_CreateString(token));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log_info("Token is empty, skipping");
|
||||||
|
}
|
||||||
|
token = strtok(NULL, ",");
|
||||||
|
}
|
||||||
|
|
||||||
|
log_info("Freeing csv_copy");
|
||||||
|
free(csv_copy);
|
||||||
|
log_info("Returning parsed array");
|
||||||
|
return array;
|
||||||
|
}
|
||||||
|
|
||||||
// Initialize relay information using configuration system
|
// Initialize relay information using configuration system
|
||||||
void init_relay_info() {
|
void init_relay_info() {
|
||||||
|
log_info("Initializing relay information from configuration...");
|
||||||
|
|
||||||
// Get all config values first (without holding mutex to avoid deadlock)
|
// Get all config values first (without holding mutex to avoid deadlock)
|
||||||
|
// Note: These may be dynamically allocated strings that need to be freed
|
||||||
|
log_info("Fetching relay configuration values...");
|
||||||
const char* relay_name = get_config_value("relay_name");
|
const char* relay_name = get_config_value("relay_name");
|
||||||
|
log_info("relay_name fetched");
|
||||||
const char* relay_description = get_config_value("relay_description");
|
const char* relay_description = get_config_value("relay_description");
|
||||||
|
log_info("relay_description fetched");
|
||||||
const char* relay_software = get_config_value("relay_software");
|
const char* relay_software = get_config_value("relay_software");
|
||||||
|
log_info("relay_software fetched");
|
||||||
const char* relay_version = get_config_value("relay_version");
|
const char* relay_version = get_config_value("relay_version");
|
||||||
|
log_info("relay_version fetched");
|
||||||
const char* relay_contact = get_config_value("relay_contact");
|
const char* relay_contact = get_config_value("relay_contact");
|
||||||
|
log_info("relay_contact fetched");
|
||||||
const char* relay_pubkey = get_config_value("relay_pubkey");
|
const char* relay_pubkey = get_config_value("relay_pubkey");
|
||||||
|
log_info("relay_pubkey fetched");
|
||||||
|
const char* supported_nips_csv = get_config_value("supported_nips");
|
||||||
|
log_info("supported_nips fetched");
|
||||||
|
const char* language_tags_csv = get_config_value("language_tags");
|
||||||
|
log_info("language_tags fetched");
|
||||||
|
const char* relay_countries_csv = get_config_value("relay_countries");
|
||||||
|
log_info("relay_countries fetched");
|
||||||
|
const char* posting_policy = get_config_value("posting_policy");
|
||||||
|
log_info("posting_policy fetched");
|
||||||
|
const char* payments_url = get_config_value("payments_url");
|
||||||
|
log_info("payments_url fetched");
|
||||||
|
|
||||||
// Get config values for limitations
|
// Get config values for limitations
|
||||||
|
log_info("Fetching limitation configuration values...");
|
||||||
int max_message_length = get_config_int("max_message_length", 16384);
|
int max_message_length = get_config_int("max_message_length", 16384);
|
||||||
|
log_info("max_message_length fetched");
|
||||||
int max_subscriptions_per_client = get_config_int("max_subscriptions_per_client", 20);
|
int max_subscriptions_per_client = get_config_int("max_subscriptions_per_client", 20);
|
||||||
|
log_info("max_subscriptions_per_client fetched");
|
||||||
int max_limit = get_config_int("max_limit", 5000);
|
int max_limit = get_config_int("max_limit", 5000);
|
||||||
|
log_info("max_limit fetched");
|
||||||
int max_event_tags = get_config_int("max_event_tags", 100);
|
int max_event_tags = get_config_int("max_event_tags", 100);
|
||||||
|
log_info("max_event_tags fetched");
|
||||||
int max_content_length = get_config_int("max_content_length", 8196);
|
int max_content_length = get_config_int("max_content_length", 8196);
|
||||||
|
log_info("max_content_length fetched");
|
||||||
int default_limit = get_config_int("default_limit", 500);
|
int default_limit = get_config_int("default_limit", 500);
|
||||||
|
log_info("default_limit fetched");
|
||||||
int admin_enabled = get_config_bool("admin_enabled", 0);
|
int admin_enabled = get_config_bool("admin_enabled", 0);
|
||||||
|
log_info("admin_enabled fetched");
|
||||||
|
|
||||||
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||||
|
|
||||||
// Update relay information fields
|
// Update relay information fields
|
||||||
|
log_info("Storing string values in cache...");
|
||||||
if (relay_name) {
|
if (relay_name) {
|
||||||
|
log_info("Storing relay_name");
|
||||||
strncpy(g_unified_cache.relay_info.name, relay_name, sizeof(g_unified_cache.relay_info.name) - 1);
|
strncpy(g_unified_cache.relay_info.name, relay_name, sizeof(g_unified_cache.relay_info.name) - 1);
|
||||||
|
free((char*)relay_name); // Free dynamically allocated string
|
||||||
|
log_info("relay_name stored and freed");
|
||||||
} else {
|
} else {
|
||||||
|
log_info("Using default relay_name");
|
||||||
strncpy(g_unified_cache.relay_info.name, "C Nostr Relay", sizeof(g_unified_cache.relay_info.name) - 1);
|
strncpy(g_unified_cache.relay_info.name, "C Nostr Relay", sizeof(g_unified_cache.relay_info.name) - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (relay_description) {
|
if (relay_description) {
|
||||||
|
log_info("Storing relay_description");
|
||||||
strncpy(g_unified_cache.relay_info.description, relay_description, sizeof(g_unified_cache.relay_info.description) - 1);
|
strncpy(g_unified_cache.relay_info.description, relay_description, sizeof(g_unified_cache.relay_info.description) - 1);
|
||||||
|
free((char*)relay_description); // Free dynamically allocated string
|
||||||
|
log_info("relay_description stored and freed");
|
||||||
} else {
|
} else {
|
||||||
|
log_info("Using default relay_description");
|
||||||
strncpy(g_unified_cache.relay_info.description, "A high-performance Nostr relay implemented in C with SQLite storage", sizeof(g_unified_cache.relay_info.description) - 1);
|
strncpy(g_unified_cache.relay_info.description, "A high-performance Nostr relay implemented in C with SQLite storage", sizeof(g_unified_cache.relay_info.description) - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (relay_software) {
|
if (relay_software) {
|
||||||
|
log_info("Storing relay_software");
|
||||||
strncpy(g_unified_cache.relay_info.software, relay_software, sizeof(g_unified_cache.relay_info.software) - 1);
|
strncpy(g_unified_cache.relay_info.software, relay_software, sizeof(g_unified_cache.relay_info.software) - 1);
|
||||||
|
free((char*)relay_software); // Free dynamically allocated string
|
||||||
|
log_info("relay_software stored and freed");
|
||||||
} else {
|
} else {
|
||||||
|
log_info("Using default relay_software");
|
||||||
strncpy(g_unified_cache.relay_info.software, "https://git.laantungir.net/laantungir/c-relay.git", sizeof(g_unified_cache.relay_info.software) - 1);
|
strncpy(g_unified_cache.relay_info.software, "https://git.laantungir.net/laantungir/c-relay.git", sizeof(g_unified_cache.relay_info.software) - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (relay_version) {
|
if (relay_version) {
|
||||||
|
log_info("Storing relay_version");
|
||||||
strncpy(g_unified_cache.relay_info.version, relay_version, sizeof(g_unified_cache.relay_info.version) - 1);
|
strncpy(g_unified_cache.relay_info.version, relay_version, sizeof(g_unified_cache.relay_info.version) - 1);
|
||||||
|
free((char*)relay_version); // Free dynamically allocated string
|
||||||
|
log_info("relay_version stored and freed");
|
||||||
} else {
|
} else {
|
||||||
|
log_info("Using default relay_version");
|
||||||
strncpy(g_unified_cache.relay_info.version, "0.2.0", sizeof(g_unified_cache.relay_info.version) - 1);
|
strncpy(g_unified_cache.relay_info.version, "0.2.0", sizeof(g_unified_cache.relay_info.version) - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (relay_contact) {
|
if (relay_contact) {
|
||||||
|
log_info("Storing relay_contact");
|
||||||
strncpy(g_unified_cache.relay_info.contact, relay_contact, sizeof(g_unified_cache.relay_info.contact) - 1);
|
strncpy(g_unified_cache.relay_info.contact, relay_contact, sizeof(g_unified_cache.relay_info.contact) - 1);
|
||||||
|
free((char*)relay_contact); // Free dynamically allocated string
|
||||||
|
log_info("relay_contact stored and freed");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (relay_pubkey) {
|
if (relay_pubkey) {
|
||||||
|
log_info("Storing relay_pubkey");
|
||||||
strncpy(g_unified_cache.relay_info.pubkey, relay_pubkey, sizeof(g_unified_cache.relay_info.pubkey) - 1);
|
strncpy(g_unified_cache.relay_info.pubkey, relay_pubkey, sizeof(g_unified_cache.relay_info.pubkey) - 1);
|
||||||
|
free((char*)relay_pubkey); // Free dynamically allocated string
|
||||||
|
log_info("relay_pubkey stored and freed");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize supported NIPs array
|
if (posting_policy) {
|
||||||
g_unified_cache.relay_info.supported_nips = cJSON_CreateArray();
|
log_info("Storing posting_policy");
|
||||||
if (g_unified_cache.relay_info.supported_nips) {
|
strncpy(g_unified_cache.relay_info.posting_policy, posting_policy, sizeof(g_unified_cache.relay_info.posting_policy) - 1);
|
||||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(1)); // NIP-01: Basic protocol
|
free((char*)posting_policy); // Free dynamically allocated string
|
||||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(9)); // NIP-09: Event deletion
|
log_info("posting_policy stored and freed");
|
||||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(11)); // NIP-11: Relay information
|
}
|
||||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(13)); // NIP-13: Proof of Work
|
|
||||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(15)); // NIP-15: EOSE
|
if (payments_url) {
|
||||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(20)); // NIP-20: Command results
|
log_info("Storing payments_url");
|
||||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(40)); // NIP-40: Expiration Timestamp
|
strncpy(g_unified_cache.relay_info.payments_url, payments_url, sizeof(g_unified_cache.relay_info.payments_url) - 1);
|
||||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(42)); // NIP-42: Authentication
|
free((char*)payments_url); // Free dynamically allocated string
|
||||||
|
log_info("payments_url stored and freed");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize supported NIPs array from config
|
||||||
|
log_info("Initializing supported_nips array");
|
||||||
|
if (supported_nips_csv) {
|
||||||
|
log_info("Parsing supported_nips from config");
|
||||||
|
g_unified_cache.relay_info.supported_nips = parse_comma_separated_array(supported_nips_csv);
|
||||||
|
log_info("supported_nips parsed successfully");
|
||||||
|
free((char*)supported_nips_csv); // Free dynamically allocated string
|
||||||
|
log_info("supported_nips_csv freed");
|
||||||
|
} else {
|
||||||
|
log_info("Using default supported_nips");
|
||||||
|
// Fallback to default supported NIPs
|
||||||
|
g_unified_cache.relay_info.supported_nips = cJSON_CreateArray();
|
||||||
|
if (g_unified_cache.relay_info.supported_nips) {
|
||||||
|
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(1)); // NIP-01: Basic protocol
|
||||||
|
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(9)); // NIP-09: Event deletion
|
||||||
|
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(11)); // NIP-11: Relay information
|
||||||
|
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(13)); // NIP-13: Proof of Work
|
||||||
|
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(15)); // NIP-15: EOSE
|
||||||
|
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(20)); // NIP-20: Command results
|
||||||
|
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(40)); // NIP-40: Expiration Timestamp
|
||||||
|
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(42)); // NIP-42: Authentication
|
||||||
|
}
|
||||||
|
log_info("Default supported_nips created");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize server limitations using configuration
|
// Initialize server limitations using configuration
|
||||||
|
log_info("Initializing server limitations");
|
||||||
g_unified_cache.relay_info.limitation = cJSON_CreateObject();
|
g_unified_cache.relay_info.limitation = cJSON_CreateObject();
|
||||||
if (g_unified_cache.relay_info.limitation) {
|
if (g_unified_cache.relay_info.limitation) {
|
||||||
|
log_info("Adding limitation fields");
|
||||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_message_length", max_message_length);
|
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_message_length", max_message_length);
|
||||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_subscriptions", max_subscriptions_per_client);
|
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_subscriptions", max_subscriptions_per_client);
|
||||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_limit", max_limit);
|
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_limit", max_limit);
|
||||||
@@ -117,29 +254,58 @@ void init_relay_info() {
|
|||||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "created_at_lower_limit", 0);
|
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "created_at_lower_limit", 0);
|
||||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "created_at_upper_limit", 2147483647);
|
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "created_at_upper_limit", 2147483647);
|
||||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "default_limit", default_limit);
|
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "default_limit", default_limit);
|
||||||
|
log_info("Limitation fields added");
|
||||||
|
} else {
|
||||||
|
log_info("Failed to create limitation object");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize empty retention policies (can be configured later)
|
// Initialize empty retention policies (can be configured later)
|
||||||
|
log_info("Initializing retention policies");
|
||||||
g_unified_cache.relay_info.retention = cJSON_CreateArray();
|
g_unified_cache.relay_info.retention = cJSON_CreateArray();
|
||||||
|
|
||||||
// Initialize language tags - set to global for now
|
// Initialize language tags from config
|
||||||
g_unified_cache.relay_info.language_tags = cJSON_CreateArray();
|
log_info("Initializing language_tags");
|
||||||
if (g_unified_cache.relay_info.language_tags) {
|
if (language_tags_csv) {
|
||||||
cJSON_AddItemToArray(g_unified_cache.relay_info.language_tags, cJSON_CreateString("*"));
|
log_info("Parsing language_tags from config");
|
||||||
|
g_unified_cache.relay_info.language_tags = parse_comma_separated_array(language_tags_csv);
|
||||||
|
log_info("language_tags parsed successfully");
|
||||||
|
free((char*)language_tags_csv); // Free dynamically allocated string
|
||||||
|
log_info("language_tags_csv freed");
|
||||||
|
} else {
|
||||||
|
log_info("Using default language_tags");
|
||||||
|
// Fallback to global
|
||||||
|
g_unified_cache.relay_info.language_tags = cJSON_CreateArray();
|
||||||
|
if (g_unified_cache.relay_info.language_tags) {
|
||||||
|
cJSON_AddItemToArray(g_unified_cache.relay_info.language_tags, cJSON_CreateString("*"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize relay countries - set to global for now
|
// Initialize relay countries from config
|
||||||
g_unified_cache.relay_info.relay_countries = cJSON_CreateArray();
|
log_info("Initializing relay_countries");
|
||||||
if (g_unified_cache.relay_info.relay_countries) {
|
if (relay_countries_csv) {
|
||||||
cJSON_AddItemToArray(g_unified_cache.relay_info.relay_countries, cJSON_CreateString("*"));
|
log_info("Parsing relay_countries from config");
|
||||||
|
g_unified_cache.relay_info.relay_countries = parse_comma_separated_array(relay_countries_csv);
|
||||||
|
log_info("relay_countries parsed successfully");
|
||||||
|
free((char*)relay_countries_csv); // Free dynamically allocated string
|
||||||
|
log_info("relay_countries_csv freed");
|
||||||
|
} else {
|
||||||
|
log_info("Using default relay_countries");
|
||||||
|
// Fallback to global
|
||||||
|
g_unified_cache.relay_info.relay_countries = cJSON_CreateArray();
|
||||||
|
if (g_unified_cache.relay_info.relay_countries) {
|
||||||
|
cJSON_AddItemToArray(g_unified_cache.relay_info.relay_countries, cJSON_CreateString("*"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize content tags as empty array
|
// Initialize content tags as empty array
|
||||||
|
log_info("Initializing tags");
|
||||||
g_unified_cache.relay_info.tags = cJSON_CreateArray();
|
g_unified_cache.relay_info.tags = cJSON_CreateArray();
|
||||||
|
|
||||||
// Initialize fees as empty object (no payment required by default)
|
// Initialize fees as empty object (no payment required by default)
|
||||||
|
log_info("Initializing fees");
|
||||||
g_unified_cache.relay_info.fees = cJSON_CreateObject();
|
g_unified_cache.relay_info.fees = cJSON_CreateObject();
|
||||||
|
|
||||||
|
log_info("Unlocking cache mutex");
|
||||||
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||||
|
|
||||||
log_success("Relay information initialized with default values");
|
log_success("Relay information initialized with default values");
|
||||||
|
|||||||
@@ -60,6 +60,7 @@ void init_pow_config() {
|
|||||||
g_unified_cache.pow_config.enabled = 0;
|
g_unified_cache.pow_config.enabled = 0;
|
||||||
log_info("PoW validation disabled via configuration");
|
log_info("PoW validation disabled via configuration");
|
||||||
}
|
}
|
||||||
|
free((char*)pow_mode); // Free dynamically allocated string
|
||||||
} else {
|
} else {
|
||||||
// Default to basic mode
|
// Default to basic mode
|
||||||
g_unified_cache.pow_config.validation_flags = NOSTR_POW_VALIDATE_BASIC;
|
g_unified_cache.pow_config.validation_flags = NOSTR_POW_VALIDATE_BASIC;
|
||||||
|
|||||||
@@ -214,9 +214,11 @@ int ginxsom_request_validator_init(const char *db_path, const char *app_name) {
|
|||||||
|
|
||||||
const char* nip42_timeout = get_config_value("nip42_challenge_timeout");
|
const char* nip42_timeout = get_config_value("nip42_challenge_timeout");
|
||||||
g_challenge_manager.timeout_seconds = nip42_timeout ? atoi(nip42_timeout) : 600;
|
g_challenge_manager.timeout_seconds = nip42_timeout ? atoi(nip42_timeout) : 600;
|
||||||
|
if (nip42_timeout) free((char*)nip42_timeout);
|
||||||
|
|
||||||
const char* nip42_tolerance = get_config_value("nip42_time_tolerance");
|
const char* nip42_tolerance = get_config_value("nip42_time_tolerance");
|
||||||
g_challenge_manager.time_tolerance_seconds = nip42_tolerance ? atoi(nip42_tolerance) : 300;
|
g_challenge_manager.time_tolerance_seconds = nip42_tolerance ? atoi(nip42_tolerance) : 300;
|
||||||
|
if (nip42_tolerance) free((char*)nip42_tolerance);
|
||||||
|
|
||||||
g_challenge_manager.last_cleanup = time(NULL);
|
g_challenge_manager.last_cleanup = time(NULL);
|
||||||
|
|
||||||
@@ -232,13 +234,20 @@ int ginxsom_request_validator_init(const char *db_path, const char *app_name) {
|
|||||||
int nostr_auth_rules_enabled(void) {
|
int nostr_auth_rules_enabled(void) {
|
||||||
// Use unified cache from config.c
|
// Use unified cache from config.c
|
||||||
const char* auth_enabled = get_config_value("auth_enabled");
|
const char* auth_enabled = get_config_value("auth_enabled");
|
||||||
|
int result = 0;
|
||||||
if (auth_enabled && strcmp(auth_enabled, "true") == 0) {
|
if (auth_enabled && strcmp(auth_enabled, "true") == 0) {
|
||||||
return 1;
|
result = 1;
|
||||||
}
|
}
|
||||||
|
if (auth_enabled) free((char*)auth_enabled);
|
||||||
|
|
||||||
// Also check legacy key
|
// Also check legacy key
|
||||||
const char* auth_rules_enabled = get_config_value("auth_rules_enabled");
|
const char* auth_rules_enabled = get_config_value("auth_rules_enabled");
|
||||||
return (auth_rules_enabled && strcmp(auth_rules_enabled, "true") == 0) ? 1 : 0;
|
if (auth_rules_enabled && strcmp(auth_rules_enabled, "true") == 0) {
|
||||||
|
result = 1;
|
||||||
|
}
|
||||||
|
if (auth_rules_enabled) free((char*)auth_rules_enabled);
|
||||||
|
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
///////////////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////////////
|
||||||
@@ -344,9 +353,11 @@ int nostr_validate_unified_request(const char* json_string, size_t json_length)
|
|||||||
const char* nip42_enabled = get_config_value("nip42_auth_enabled");
|
const char* nip42_enabled = get_config_value("nip42_auth_enabled");
|
||||||
if (nip42_enabled && strcmp(nip42_enabled, "false") == 0) {
|
if (nip42_enabled && strcmp(nip42_enabled, "false") == 0) {
|
||||||
validator_debug_log("VALIDATOR_DEBUG: STEP 8 FAILED - NIP-42 is disabled\n");
|
validator_debug_log("VALIDATOR_DEBUG: STEP 8 FAILED - NIP-42 is disabled\n");
|
||||||
|
free((char*)nip42_enabled);
|
||||||
cJSON_Delete(event);
|
cJSON_Delete(event);
|
||||||
return NOSTR_ERROR_NIP42_DISABLED;
|
return NOSTR_ERROR_NIP42_DISABLED;
|
||||||
}
|
}
|
||||||
|
if (nip42_enabled) free((char*)nip42_enabled);
|
||||||
|
|
||||||
// TODO: Implement full NIP-42 challenge validation
|
// TODO: Implement full NIP-42 challenge validation
|
||||||
// For now, accept all valid NIP-42 events
|
// For now, accept all valid NIP-42 events
|
||||||
|
|||||||
335
src/websockets.c
335
src/websockets.c
@@ -74,6 +74,7 @@ int is_event_expired(cJSON* event, time_t current_time);
|
|||||||
|
|
||||||
// Forward declarations for subscription handling
|
// Forward declarations for subscription handling
|
||||||
int handle_req_message(const char* sub_id, cJSON* filters, struct lws *wsi, struct per_session_data *pss);
|
int handle_req_message(const char* sub_id, cJSON* filters, struct lws *wsi, struct per_session_data *pss);
|
||||||
|
int handle_count_message(const char* sub_id, cJSON* filters, struct lws *wsi, struct per_session_data *pss);
|
||||||
|
|
||||||
// Forward declarations for NOTICE message support
|
// Forward declarations for NOTICE message support
|
||||||
void send_notice_message(struct lws* wsi, const char* message);
|
void send_notice_message(struct lws* wsi, const char* message);
|
||||||
@@ -414,6 +415,54 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
// Cleanup event JSON string
|
// Cleanup event JSON string
|
||||||
free(event_json_str);
|
free(event_json_str);
|
||||||
|
|
||||||
|
// Check for NIP-70 protected events
|
||||||
|
if (result == 0) {
|
||||||
|
// Check if event has protected tag ["-"]
|
||||||
|
int is_protected_event = 0;
|
||||||
|
cJSON* tags = cJSON_GetObjectItem(event, "tags");
|
||||||
|
if (tags && cJSON_IsArray(tags)) {
|
||||||
|
cJSON* tag = NULL;
|
||||||
|
cJSON_ArrayForEach(tag, tags) {
|
||||||
|
if (cJSON_IsArray(tag) && cJSON_GetArraySize(tag) >= 1) {
|
||||||
|
cJSON* tag_name = cJSON_GetArrayItem(tag, 0);
|
||||||
|
if (tag_name && cJSON_IsString(tag_name) &&
|
||||||
|
strcmp(cJSON_GetStringValue(tag_name), "-") == 0) {
|
||||||
|
is_protected_event = 1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (is_protected_event) {
|
||||||
|
// Check if protected events are enabled using unified cache
|
||||||
|
int protected_events_enabled = g_unified_cache.nip70_protected_events_enabled;
|
||||||
|
|
||||||
|
if (!protected_events_enabled) {
|
||||||
|
// Protected events not supported
|
||||||
|
result = -1;
|
||||||
|
strncpy(error_message, "blocked: protected events not supported", sizeof(error_message) - 1);
|
||||||
|
error_message[sizeof(error_message) - 1] = '\0';
|
||||||
|
log_warning("Protected event rejected: protected events not enabled");
|
||||||
|
} else {
|
||||||
|
// Protected events enabled - check authentication
|
||||||
|
cJSON* pubkey_obj = cJSON_GetObjectItem(event, "pubkey");
|
||||||
|
const char* event_pubkey = pubkey_obj ? cJSON_GetStringValue(pubkey_obj) : NULL;
|
||||||
|
|
||||||
|
if (!pss || !pss->authenticated ||
|
||||||
|
!event_pubkey || strcmp(pss->authenticated_pubkey, event_pubkey) != 0) {
|
||||||
|
// Not authenticated or pubkey mismatch
|
||||||
|
result = -1;
|
||||||
|
strncpy(error_message, "auth-required: protected event requires authentication", sizeof(error_message) - 1);
|
||||||
|
error_message[sizeof(error_message) - 1] = '\0';
|
||||||
|
log_warning("Protected event rejected: authentication required");
|
||||||
|
} else {
|
||||||
|
log_info("Protected event accepted: authenticated publisher");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Check for admin events (kind 23456) and intercept them
|
// Check for admin events (kind 23456) and intercept them
|
||||||
if (result == 0) {
|
if (result == 0) {
|
||||||
cJSON* kind_obj = cJSON_GetObjectItem(event, "kind");
|
cJSON* kind_obj = cJSON_GetObjectItem(event, "kind");
|
||||||
@@ -619,6 +668,41 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
}
|
}
|
||||||
cJSON_Delete(eose_response);
|
cJSON_Delete(eose_response);
|
||||||
}
|
}
|
||||||
|
} else if (strcmp(msg_type, "COUNT") == 0) {
|
||||||
|
// Check NIP-42 authentication for COUNT requests if required
|
||||||
|
if (pss && pss->nip42_auth_required_subscriptions && !pss->authenticated) {
|
||||||
|
if (!pss->auth_challenge_sent) {
|
||||||
|
send_nip42_auth_challenge(wsi, pss);
|
||||||
|
} else {
|
||||||
|
send_notice_message(wsi, "NIP-42 authentication required for count requests");
|
||||||
|
log_warning("COUNT rejected: NIP-42 authentication required");
|
||||||
|
}
|
||||||
|
cJSON_Delete(json);
|
||||||
|
free(message);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle COUNT message
|
||||||
|
cJSON* sub_id = cJSON_GetArrayItem(json, 1);
|
||||||
|
|
||||||
|
if (sub_id && cJSON_IsString(sub_id)) {
|
||||||
|
const char* subscription_id = cJSON_GetStringValue(sub_id);
|
||||||
|
|
||||||
|
// Create array of filter objects from position 2 onwards
|
||||||
|
cJSON* filters = cJSON_CreateArray();
|
||||||
|
int json_size = cJSON_GetArraySize(json);
|
||||||
|
for (int i = 2; i < json_size; i++) {
|
||||||
|
cJSON* filter = cJSON_GetArrayItem(json, i);
|
||||||
|
if (filter) {
|
||||||
|
cJSON_AddItemToArray(filters, cJSON_Duplicate(filter, 1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
handle_count_message(subscription_id, filters, wsi, pss);
|
||||||
|
|
||||||
|
// Clean up the filters array we created
|
||||||
|
cJSON_Delete(filters);
|
||||||
|
}
|
||||||
} else if (strcmp(msg_type, "CLOSE") == 0) {
|
} else if (strcmp(msg_type, "CLOSE") == 0) {
|
||||||
// Handle CLOSE message
|
// Handle CLOSE message
|
||||||
cJSON* sub_id = cJSON_GetArrayItem(json, 1);
|
cJSON* sub_id = cJSON_GetArrayItem(json, 1);
|
||||||
@@ -899,3 +983,254 @@ int start_websocket_relay(int port_override, int strict_port) {
|
|||||||
log_success("WebSocket relay shut down cleanly");
|
log_success("WebSocket relay shut down cleanly");
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle NIP-45 COUNT message
|
||||||
|
int handle_count_message(const char* sub_id, cJSON* filters, struct lws *wsi, struct per_session_data *pss) {
|
||||||
|
(void)pss; // Suppress unused parameter warning
|
||||||
|
log_info("Handling COUNT message for subscription");
|
||||||
|
|
||||||
|
if (!cJSON_IsArray(filters)) {
|
||||||
|
log_error("COUNT filters is not an array");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int total_count = 0;
|
||||||
|
|
||||||
|
// Process each filter in the array
|
||||||
|
for (int i = 0; i < cJSON_GetArraySize(filters); i++) {
|
||||||
|
cJSON* filter = cJSON_GetArrayItem(filters, i);
|
||||||
|
if (!filter || !cJSON_IsObject(filter)) {
|
||||||
|
log_warning("Invalid filter object in COUNT");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build SQL COUNT query based on filter - exclude ephemeral events (kinds 20000-29999) from historical queries
|
||||||
|
char sql[1024] = "SELECT COUNT(*) FROM events WHERE 1=1 AND (kind < 20000 OR kind >= 30000)";
|
||||||
|
char* sql_ptr = sql + strlen(sql);
|
||||||
|
int remaining = sizeof(sql) - strlen(sql);
|
||||||
|
|
||||||
|
// Note: Expiration filtering will be done at application level
|
||||||
|
// after retrieving events to ensure compatibility with all SQLite versions
|
||||||
|
|
||||||
|
// Handle kinds filter
|
||||||
|
cJSON* kinds = cJSON_GetObjectItem(filter, "kinds");
|
||||||
|
if (kinds && cJSON_IsArray(kinds)) {
|
||||||
|
int kind_count = cJSON_GetArraySize(kinds);
|
||||||
|
if (kind_count > 0) {
|
||||||
|
snprintf(sql_ptr, remaining, " AND kind IN (");
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
|
||||||
|
for (int k = 0; k < kind_count; k++) {
|
||||||
|
cJSON* kind = cJSON_GetArrayItem(kinds, k);
|
||||||
|
if (cJSON_IsNumber(kind)) {
|
||||||
|
if (k > 0) {
|
||||||
|
snprintf(sql_ptr, remaining, ",");
|
||||||
|
sql_ptr++;
|
||||||
|
remaining--;
|
||||||
|
}
|
||||||
|
snprintf(sql_ptr, remaining, "%d", (int)cJSON_GetNumberValue(kind));
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
snprintf(sql_ptr, remaining, ")");
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle authors filter
|
||||||
|
cJSON* authors = cJSON_GetObjectItem(filter, "authors");
|
||||||
|
if (authors && cJSON_IsArray(authors)) {
|
||||||
|
int author_count = cJSON_GetArraySize(authors);
|
||||||
|
if (author_count > 0) {
|
||||||
|
snprintf(sql_ptr, remaining, " AND pubkey IN (");
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
|
||||||
|
for (int a = 0; a < author_count; a++) {
|
||||||
|
cJSON* author = cJSON_GetArrayItem(authors, a);
|
||||||
|
if (cJSON_IsString(author)) {
|
||||||
|
if (a > 0) {
|
||||||
|
snprintf(sql_ptr, remaining, ",");
|
||||||
|
sql_ptr++;
|
||||||
|
remaining--;
|
||||||
|
}
|
||||||
|
snprintf(sql_ptr, remaining, "'%s'", cJSON_GetStringValue(author));
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
snprintf(sql_ptr, remaining, ")");
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle ids filter
|
||||||
|
cJSON* ids = cJSON_GetObjectItem(filter, "ids");
|
||||||
|
if (ids && cJSON_IsArray(ids)) {
|
||||||
|
int id_count = cJSON_GetArraySize(ids);
|
||||||
|
if (id_count > 0) {
|
||||||
|
snprintf(sql_ptr, remaining, " AND id IN (");
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
|
||||||
|
for (int i = 0; i < id_count; i++) {
|
||||||
|
cJSON* id = cJSON_GetArrayItem(ids, i);
|
||||||
|
if (cJSON_IsString(id)) {
|
||||||
|
if (i > 0) {
|
||||||
|
snprintf(sql_ptr, remaining, ",");
|
||||||
|
sql_ptr++;
|
||||||
|
remaining--;
|
||||||
|
}
|
||||||
|
snprintf(sql_ptr, remaining, "'%s'", cJSON_GetStringValue(id));
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
snprintf(sql_ptr, remaining, ")");
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle tag filters (#e, #p, #t, etc.)
|
||||||
|
cJSON* filter_item = NULL;
|
||||||
|
cJSON_ArrayForEach(filter_item, filter) {
|
||||||
|
const char* filter_key = filter_item->string;
|
||||||
|
if (filter_key && filter_key[0] == '#' && strlen(filter_key) > 1) {
|
||||||
|
// This is a tag filter like "#e", "#p", etc.
|
||||||
|
const char* tag_name = filter_key + 1; // Get the tag name (e, p, t, type, etc.)
|
||||||
|
|
||||||
|
if (cJSON_IsArray(filter_item)) {
|
||||||
|
int tag_value_count = cJSON_GetArraySize(filter_item);
|
||||||
|
if (tag_value_count > 0) {
|
||||||
|
// Use EXISTS with JSON extraction to check for matching tags
|
||||||
|
snprintf(sql_ptr, remaining, " AND EXISTS (SELECT 1 FROM json_each(json(tags)) WHERE json_extract(value, '$[0]') = '%s' AND json_extract(value, '$[1]') IN (", tag_name);
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
|
||||||
|
for (int i = 0; i < tag_value_count; i++) {
|
||||||
|
cJSON* tag_value = cJSON_GetArrayItem(filter_item, i);
|
||||||
|
if (cJSON_IsString(tag_value)) {
|
||||||
|
if (i > 0) {
|
||||||
|
snprintf(sql_ptr, remaining, ",");
|
||||||
|
sql_ptr++;
|
||||||
|
remaining--;
|
||||||
|
}
|
||||||
|
snprintf(sql_ptr, remaining, "'%s'", cJSON_GetStringValue(tag_value));
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
snprintf(sql_ptr, remaining, "))");
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle search filter (NIP-50)
|
||||||
|
cJSON* search = cJSON_GetObjectItem(filter, "search");
|
||||||
|
if (search && cJSON_IsString(search)) {
|
||||||
|
const char* search_term = cJSON_GetStringValue(search);
|
||||||
|
if (search_term && strlen(search_term) > 0) {
|
||||||
|
// Search in both content and tag values using LIKE
|
||||||
|
// Escape single quotes in search term for SQL safety
|
||||||
|
char escaped_search[256];
|
||||||
|
size_t escaped_len = 0;
|
||||||
|
for (size_t i = 0; search_term[i] && escaped_len < sizeof(escaped_search) - 1; i++) {
|
||||||
|
if (search_term[i] == '\'') {
|
||||||
|
escaped_search[escaped_len++] = '\'';
|
||||||
|
escaped_search[escaped_len++] = '\'';
|
||||||
|
} else {
|
||||||
|
escaped_search[escaped_len++] = search_term[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
escaped_search[escaped_len] = '\0';
|
||||||
|
|
||||||
|
// Add search conditions for content and tags
|
||||||
|
// Use tags LIKE to search within the JSON string representation of tags
|
||||||
|
snprintf(sql_ptr, remaining, " AND (content LIKE '%%%s%%' OR tags LIKE '%%\"%s\"%%')",
|
||||||
|
escaped_search, escaped_search);
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle since filter
|
||||||
|
cJSON* since = cJSON_GetObjectItem(filter, "since");
|
||||||
|
if (since && cJSON_IsNumber(since)) {
|
||||||
|
snprintf(sql_ptr, remaining, " AND created_at >= %ld", (long)cJSON_GetNumberValue(since));
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle until filter
|
||||||
|
cJSON* until = cJSON_GetObjectItem(filter, "until");
|
||||||
|
if (until && cJSON_IsNumber(until)) {
|
||||||
|
snprintf(sql_ptr, remaining, " AND created_at <= %ld", (long)cJSON_GetNumberValue(until));
|
||||||
|
sql_ptr += strlen(sql_ptr);
|
||||||
|
remaining = sizeof(sql) - strlen(sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Debug: Log the SQL query being executed
|
||||||
|
char debug_msg[1280];
|
||||||
|
snprintf(debug_msg, sizeof(debug_msg), "Executing COUNT SQL: %s", sql);
|
||||||
|
log_info(debug_msg);
|
||||||
|
|
||||||
|
// Execute count query
|
||||||
|
sqlite3_stmt* stmt;
|
||||||
|
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
||||||
|
if (rc != SQLITE_OK) {
|
||||||
|
char error_msg[256];
|
||||||
|
snprintf(error_msg, sizeof(error_msg), "Failed to prepare COUNT query: %s", sqlite3_errmsg(g_db));
|
||||||
|
log_error(error_msg);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
int filter_count = 0;
|
||||||
|
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||||
|
filter_count = sqlite3_column_int(stmt, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
char count_debug[128];
|
||||||
|
snprintf(count_debug, sizeof(count_debug), "Filter %d returned count: %d", i + 1, filter_count);
|
||||||
|
log_info(count_debug);
|
||||||
|
|
||||||
|
sqlite3_finalize(stmt);
|
||||||
|
total_count += filter_count;
|
||||||
|
}
|
||||||
|
|
||||||
|
char total_debug[128];
|
||||||
|
snprintf(total_debug, sizeof(total_debug), "Total COUNT result: %d", total_count);
|
||||||
|
log_info(total_debug);
|
||||||
|
|
||||||
|
// Send COUNT response - NIP-45 format: ["COUNT", <subscription_id>, {"count": <count>}]
|
||||||
|
cJSON* count_response = cJSON_CreateArray();
|
||||||
|
cJSON_AddItemToArray(count_response, cJSON_CreateString("COUNT"));
|
||||||
|
cJSON_AddItemToArray(count_response, cJSON_CreateString(sub_id));
|
||||||
|
|
||||||
|
// Create count object as per NIP-45 specification
|
||||||
|
cJSON* count_obj = cJSON_CreateObject();
|
||||||
|
cJSON_AddNumberToObject(count_obj, "count", total_count);
|
||||||
|
cJSON_AddItemToArray(count_response, count_obj);
|
||||||
|
|
||||||
|
char *count_str = cJSON_Print(count_response);
|
||||||
|
if (count_str) {
|
||||||
|
size_t count_len = strlen(count_str);
|
||||||
|
unsigned char *buf = malloc(LWS_PRE + count_len);
|
||||||
|
if (buf) {
|
||||||
|
memcpy(buf + LWS_PRE, count_str, count_len);
|
||||||
|
lws_write(wsi, buf + LWS_PRE, count_len, LWS_WRITE_TEXT);
|
||||||
|
free(buf);
|
||||||
|
}
|
||||||
|
free(count_str);
|
||||||
|
}
|
||||||
|
cJSON_Delete(count_response);
|
||||||
|
|
||||||
|
return total_count;
|
||||||
|
}
|
||||||
|
|||||||
133
test_dynamic_config.sh
Executable file
133
test_dynamic_config.sh
Executable file
@@ -0,0 +1,133 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Test dynamic config updates without restart
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Configuration from relay startup
|
||||||
|
ADMIN_PRIVKEY="ddea442930976541e199a05248eb6cd92f2a65ba366a883a8f6880add9bdc9c9"
|
||||||
|
RELAY_PUBKEY="1bd4a5e2e32401737f8c16cc0dfa89b93f25f395770a2896fe78c9fb61582dfc"
|
||||||
|
RELAY_URL="ws://localhost:8888"
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
RED='\033[0;31m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
log_info() {
|
||||||
|
echo -e "${BLUE}[INFO]${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_success() {
|
||||||
|
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
log_error() {
|
||||||
|
echo -e "${RED}[ERROR]${NC} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if nak is available
|
||||||
|
if ! command -v nak &> /dev/null; then
|
||||||
|
log_error "nak command not found. Please install nak first."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Testing dynamic config updates without restart..."
|
||||||
|
|
||||||
|
# Test 1: Check current NIP-11 info
|
||||||
|
log_info "Checking current NIP-11 relay info..."
|
||||||
|
CURRENT_DESC=$(curl -s -H "Accept: application/nostr+json" http://localhost:8888 | jq -r '.description')
|
||||||
|
log_info "Current description: $CURRENT_DESC"
|
||||||
|
|
||||||
|
# Test 2: Update relay description dynamically
|
||||||
|
NEW_DESC="Dynamic Config Test - Updated at $(date)"
|
||||||
|
log_info "Updating relay description to: $NEW_DESC"
|
||||||
|
|
||||||
|
COMMAND="[\"config_update\", [{\"key\": \"relay_description\", \"value\": \"$NEW_DESC\", \"data_type\": \"string\", \"category\": \"relay\"}]]"
|
||||||
|
|
||||||
|
# Encrypt the command
|
||||||
|
ENCRYPTED_COMMAND=$(nak encrypt "$COMMAND" --sec "$ADMIN_PRIVKEY" --recipient-pubkey "$RELAY_PUBKEY")
|
||||||
|
|
||||||
|
if [ -z "$ENCRYPTED_COMMAND" ]; then
|
||||||
|
log_error "Failed to encrypt config update command"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create admin event
|
||||||
|
ADMIN_EVENT=$(nak event \
|
||||||
|
--kind 23456 \
|
||||||
|
--content "$ENCRYPTED_COMMAND" \
|
||||||
|
--sec "$ADMIN_PRIVKEY" \
|
||||||
|
--tag "p=$RELAY_PUBKEY")
|
||||||
|
|
||||||
|
# Send the admin command
|
||||||
|
log_info "Sending config update command..."
|
||||||
|
ADMIN_RESULT=$(echo "$ADMIN_EVENT" | nak event "$RELAY_URL")
|
||||||
|
|
||||||
|
if echo "$ADMIN_RESULT" | grep -q "error\|failed\|denied"; then
|
||||||
|
log_error "Failed to send config update: $ADMIN_RESULT"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_success "Config update command sent successfully"
|
||||||
|
|
||||||
|
# Wait for processing
|
||||||
|
sleep 3
|
||||||
|
|
||||||
|
# Test 3: Check if NIP-11 info updated without restart
|
||||||
|
log_info "Checking if NIP-11 info was updated without restart..."
|
||||||
|
UPDATED_DESC=$(curl -s -H "Accept: application/nostr+json" http://localhost:8888 | jq -r '.description')
|
||||||
|
|
||||||
|
if [ "$UPDATED_DESC" = "$NEW_DESC" ]; then
|
||||||
|
log_success "SUCCESS: Relay description updated dynamically without restart!"
|
||||||
|
log_success "Old: $CURRENT_DESC"
|
||||||
|
log_success "New: $UPDATED_DESC"
|
||||||
|
else
|
||||||
|
log_error "FAILED: Relay description was not updated"
|
||||||
|
log_error "Expected: $NEW_DESC"
|
||||||
|
log_error "Got: $UPDATED_DESC"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 4: Test another dynamic config - max_subscriptions_per_client
|
||||||
|
log_info "Testing another dynamic config: max_subscriptions_per_client"
|
||||||
|
|
||||||
|
# Get current value from database
|
||||||
|
OLD_LIMIT=$(sqlite3 build/*.db "SELECT value FROM config WHERE key = 'max_subscriptions_per_client';" 2>/dev/null || echo "25")
|
||||||
|
log_info "Current max_subscriptions_per_client: $OLD_LIMIT"
|
||||||
|
|
||||||
|
NEW_LIMIT=50
|
||||||
|
|
||||||
|
COMMAND2="[\"config_update\", [{\"key\": \"max_subscriptions_per_client\", \"value\": \"$NEW_LIMIT\", \"data_type\": \"integer\", \"category\": \"limits\"}]]"
|
||||||
|
|
||||||
|
ENCRYPTED_COMMAND2=$(nak encrypt "$COMMAND2" --sec "$ADMIN_PRIVKEY" --recipient-pubkey "$RELAY_PUBKEY")
|
||||||
|
|
||||||
|
ADMIN_EVENT2=$(nak event \
|
||||||
|
--kind 23456 \
|
||||||
|
--content "$ENCRYPTED_COMMAND2" \
|
||||||
|
--sec "$ADMIN_PRIVKEY" \
|
||||||
|
--tag "p=$RELAY_PUBKEY")
|
||||||
|
|
||||||
|
log_info "Updating max_subscriptions_per_client to $NEW_LIMIT..."
|
||||||
|
ADMIN_RESULT2=$(echo "$ADMIN_EVENT2" | nak event "$RELAY_URL")
|
||||||
|
|
||||||
|
if echo "$ADMIN_RESULT2" | grep -q "error\|failed\|denied"; then
|
||||||
|
log_error "Failed to send second config update: $ADMIN_RESULT2"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
sleep 3
|
||||||
|
|
||||||
|
# Check updated value from database
|
||||||
|
UPDATED_LIMIT=$(sqlite3 build/*.db "SELECT value FROM config WHERE key = 'max_subscriptions_per_client';" 2>/dev/null || echo "25")
|
||||||
|
|
||||||
|
if [ "$UPDATED_LIMIT" = "$NEW_LIMIT" ]; then
|
||||||
|
log_success "SUCCESS: max_subscriptions_per_client updated dynamically!"
|
||||||
|
log_success "Old: $OLD_LIMIT, New: $UPDATED_LIMIT"
|
||||||
|
else
|
||||||
|
log_error "FAILED: max_subscriptions_per_client was not updated"
|
||||||
|
log_error "Expected: $NEW_LIMIT, Got: $UPDATED_LIMIT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_success "Dynamic config update testing completed successfully!"
|
||||||
97
test_nip50_search.sh
Normal file
97
test_nip50_search.sh
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Test script for NIP-50 search functionality
|
||||||
|
# This script tests the new search field in filter objects
|
||||||
|
|
||||||
|
echo "=== Testing NIP-50 Search Functionality ==="
|
||||||
|
|
||||||
|
# Function to send WebSocket message and capture response
|
||||||
|
send_ws_message() {
|
||||||
|
local message="$1"
|
||||||
|
echo "Sending: $message"
|
||||||
|
echo "$message" | websocat ws://127.0.0.1:8888 --text --no-close --one-message 2>/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to publish an event
|
||||||
|
publish_event() {
|
||||||
|
local content="$1"
|
||||||
|
local kind="${2:-1}"
|
||||||
|
local tags="${3:-[]}"
|
||||||
|
|
||||||
|
# Create event JSON
|
||||||
|
local event="[\"EVENT\", {\"id\": \"\", \"pubkey\": \"\", \"created_at\": $(date +%s), \"kind\": $kind, \"tags\": $tags, \"content\": \"$content\", \"sig\": \"\"}]"
|
||||||
|
|
||||||
|
# Send the event
|
||||||
|
send_ws_message "$event"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to search for events
|
||||||
|
search_events() {
|
||||||
|
local search_term="$1"
|
||||||
|
local sub_id="${2:-search_test}"
|
||||||
|
|
||||||
|
# Create search filter
|
||||||
|
local filter="{\"search\": \"$search_term\"}"
|
||||||
|
local req="[\"REQ\", \"$sub_id\", $filter]"
|
||||||
|
|
||||||
|
# Send the search request
|
||||||
|
send_ws_message "$req"
|
||||||
|
|
||||||
|
# Wait a moment for response
|
||||||
|
sleep 0.5
|
||||||
|
|
||||||
|
# Send CLOSE to end subscription
|
||||||
|
local close="[\"CLOSE\", \"$sub_id\"]"
|
||||||
|
send_ws_message "$close"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to count events with search
|
||||||
|
count_events() {
|
||||||
|
local search_term="$1"
|
||||||
|
local sub_id="${2:-count_test}"
|
||||||
|
|
||||||
|
# Create count filter with search
|
||||||
|
local filter="{\"search\": \"$search_term\"}"
|
||||||
|
local count_req="[\"COUNT\", \"$sub_id\", $filter]"
|
||||||
|
|
||||||
|
# Send the count request
|
||||||
|
send_ws_message "$count_req"
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "Publishing test events with searchable content..."
|
||||||
|
|
||||||
|
# Publish some test events with different content
|
||||||
|
publish_event "This is a test message about Bitcoin"
|
||||||
|
publish_event "Another message about Lightning Network"
|
||||||
|
publish_event "Nostr protocol discussion"
|
||||||
|
publish_event "Random content without keywords"
|
||||||
|
publish_event "Bitcoin and Lightning are great technologies"
|
||||||
|
publish_event "Discussion about Nostr and Bitcoin integration"
|
||||||
|
|
||||||
|
echo "Waiting for events to be stored..."
|
||||||
|
sleep 2
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Testing search functionality..."
|
||||||
|
|
||||||
|
echo "1. Searching for 'Bitcoin':"
|
||||||
|
search_events "Bitcoin"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "2. Searching for 'Nostr':"
|
||||||
|
search_events "Nostr"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "3. Searching for 'Lightning':"
|
||||||
|
search_events "Lightning"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "4. Testing COUNT with search:"
|
||||||
|
count_events "Bitcoin"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "5. Testing COUNT with search for 'Nostr':"
|
||||||
|
count_events "Nostr"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== NIP-50 Search Test Complete ==="
|
||||||
450
tests/45_nip_test.sh
Executable file
450
tests/45_nip_test.sh
Executable file
@@ -0,0 +1,450 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# NIP-45 COUNT Message Test - Test counting functionality
|
||||||
|
# Tests COUNT messages with various filters to verify correct event counting
|
||||||
|
|
||||||
|
set -e # Exit on any error
|
||||||
|
|
||||||
|
# Color constants
|
||||||
|
RED='\033[31m'
|
||||||
|
GREEN='\033[32m'
|
||||||
|
YELLOW='\033[33m'
|
||||||
|
BLUE='\033[34m'
|
||||||
|
BOLD='\033[1m'
|
||||||
|
RESET='\033[0m'
|
||||||
|
|
||||||
|
# Test configuration
|
||||||
|
RELAY_URL="ws://127.0.0.1:8888"
|
||||||
|
TEST_PRIVATE_KEY="nsec1j4c6269y9w0q2er2xjw8sv2ehyrtfxq3jwgdlxj6qfn8z4gjsq5qfvfk99"
|
||||||
|
|
||||||
|
# Print functions
|
||||||
|
print_header() {
|
||||||
|
echo -e "${BLUE}${BOLD}=== $1 ===${RESET}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_step() {
|
||||||
|
echo -e "${YELLOW}[STEP]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_success() {
|
||||||
|
echo -e "${GREEN}✓${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_error() {
|
||||||
|
echo -e "${RED}✗${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_info() {
|
||||||
|
echo -e "${BLUE}[INFO]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_warning() {
|
||||||
|
echo -e "${YELLOW}[WARNING]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Global arrays to store event IDs for counting tests
|
||||||
|
declare -a REGULAR_EVENT_IDS=()
|
||||||
|
declare -a REPLACEABLE_EVENT_IDS=()
|
||||||
|
declare -a EPHEMERAL_EVENT_IDS=()
|
||||||
|
declare -a ADDRESSABLE_EVENT_IDS=()
|
||||||
|
|
||||||
|
# Baseline counts from existing events in relay
|
||||||
|
BASELINE_TOTAL=0
|
||||||
|
BASELINE_KIND1=0
|
||||||
|
BASELINE_KIND0=0
|
||||||
|
BASELINE_KIND30001=0
|
||||||
|
BASELINE_AUTHOR=0
|
||||||
|
BASELINE_TYPE_REGULAR=0
|
||||||
|
BASELINE_TEST_NIP45=0
|
||||||
|
BASELINE_KINDS_01=0
|
||||||
|
BASELINE_COMBINED=0
|
||||||
|
|
||||||
|
# Helper function to publish event and extract ID
|
||||||
|
publish_event() {
|
||||||
|
local event_json="$1"
|
||||||
|
local event_type="$2"
|
||||||
|
local description="$3"
|
||||||
|
|
||||||
|
# Extract event ID
|
||||||
|
local event_id=$(echo "$event_json" | jq -r '.id' 2>/dev/null)
|
||||||
|
if [[ "$event_id" == "null" || -z "$event_id" ]]; then
|
||||||
|
print_error "Could not extract event ID from $description"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_info "Publishing $description..."
|
||||||
|
|
||||||
|
# Create EVENT message in Nostr format
|
||||||
|
local event_message="[\"EVENT\",$event_json]"
|
||||||
|
|
||||||
|
# Publish to relay
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo "$event_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||||
|
else
|
||||||
|
print_error "websocat not found - required for testing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
if [[ "$response" == *"Connection failed"* ]]; then
|
||||||
|
print_error "Failed to connect to relay for $description"
|
||||||
|
return 1
|
||||||
|
elif [[ "$response" == *"true"* ]]; then
|
||||||
|
print_success "$description uploaded (ID: ${event_id:0:16}...)"
|
||||||
|
|
||||||
|
# Store event ID in appropriate array
|
||||||
|
case "$event_type" in
|
||||||
|
"regular") REGULAR_EVENT_IDS+=("$event_id") ;;
|
||||||
|
"replaceable") REPLACEABLE_EVENT_IDS+=("$event_id") ;;
|
||||||
|
"ephemeral") EPHEMERAL_EVENT_IDS+=("$event_id") ;;
|
||||||
|
"addressable") ADDRESSABLE_EVENT_IDS+=("$event_id") ;;
|
||||||
|
esac
|
||||||
|
echo # Add blank line for readability
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_warning "$description might have failed: $response"
|
||||||
|
echo # Add blank line for readability
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper function to get baseline count for a filter (before publishing test events)
|
||||||
|
get_baseline_count() {
|
||||||
|
local filter="$1"
|
||||||
|
|
||||||
|
# Create COUNT message
|
||||||
|
local count_message="[\"COUNT\",\"baseline\",$filter]"
|
||||||
|
|
||||||
|
# Send COUNT message and get response
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo "$count_message" | timeout 3s websocat "$RELAY_URL" 2>/dev/null || echo "")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse COUNT response
|
||||||
|
if [[ -n "$response" ]]; then
|
||||||
|
local count_result=$(echo "$response" | grep '"COUNT"' | head -1)
|
||||||
|
if [[ -n "$count_result" ]]; then
|
||||||
|
local count=$(echo "$count_result" | jq -r '.[2].count' 2>/dev/null)
|
||||||
|
if [[ "$count" =~ ^[0-9]+$ ]]; then
|
||||||
|
echo "$count"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "0" # Default to 0 if we can't get the count
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper function to send COUNT message and check response
|
||||||
|
test_count() {
|
||||||
|
local sub_id="$1"
|
||||||
|
local filter="$2"
|
||||||
|
local description="$3"
|
||||||
|
local expected_count="$4"
|
||||||
|
|
||||||
|
print_step "Testing COUNT: $description"
|
||||||
|
|
||||||
|
# Create COUNT message
|
||||||
|
local count_message="[\"COUNT\",\"$sub_id\",$filter]"
|
||||||
|
|
||||||
|
print_info "Sending filter: $filter"
|
||||||
|
|
||||||
|
# Send COUNT message and get response
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo "$count_message" | timeout 3s websocat "$RELAY_URL" 2>/dev/null || echo "")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse COUNT response
|
||||||
|
local count_result=""
|
||||||
|
if [[ -n "$response" ]]; then
|
||||||
|
# Look for COUNT response: ["COUNT","sub_id",{"count":N}]
|
||||||
|
count_result=$(echo "$response" | grep '"COUNT"' | head -1)
|
||||||
|
if [[ -n "$count_result" ]]; then
|
||||||
|
local actual_count=$(echo "$count_result" | jq -r '.[2].count' 2>/dev/null)
|
||||||
|
if [[ "$actual_count" =~ ^[0-9]+$ ]]; then
|
||||||
|
print_info "Received count: $actual_count"
|
||||||
|
|
||||||
|
# Check if count matches expected
|
||||||
|
if [[ "$expected_count" == "any" ]]; then
|
||||||
|
print_success "$description - Count: $actual_count"
|
||||||
|
return 0
|
||||||
|
elif [[ "$actual_count" -eq "$expected_count" ]]; then
|
||||||
|
print_success "$description - Expected: $expected_count, Got: $actual_count"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "$description - Expected: $expected_count, Got: $actual_count"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "$description - Invalid count response: $count_result"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "$description - No COUNT response received"
|
||||||
|
print_error "Raw response: $response"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "$description - No response from relay"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main test function
|
||||||
|
run_count_test() {
|
||||||
|
print_header "NIP-45 COUNT Message Test"
|
||||||
|
|
||||||
|
# Check dependencies
|
||||||
|
print_step "Checking dependencies..."
|
||||||
|
if ! command -v nak &> /dev/null; then
|
||||||
|
print_error "nak command not found"
|
||||||
|
print_info "Please install nak: go install github.com/fiatjaf/nak@latest"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if ! command -v websocat &> /dev/null; then
|
||||||
|
print_error "websocat command not found"
|
||||||
|
print_info "Please install websocat for testing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if ! command -v jq &> /dev/null; then
|
||||||
|
print_error "jq command not found"
|
||||||
|
print_info "Please install jq for JSON processing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
print_success "All dependencies found"
|
||||||
|
|
||||||
|
print_header "PHASE 0: Establishing Baseline Counts"
|
||||||
|
|
||||||
|
# Get baseline counts BEFORE publishing any test events
|
||||||
|
print_step "Getting baseline counts from existing events in relay..."
|
||||||
|
|
||||||
|
BASELINE_TOTAL=$(get_baseline_count '{}' "total events")
|
||||||
|
BASELINE_KIND1=$(get_baseline_count '{"kinds":[1]}' "kind 1 events")
|
||||||
|
BASELINE_KIND0=$(get_baseline_count '{"kinds":[0]}' "kind 0 events")
|
||||||
|
BASELINE_KIND30001=$(get_baseline_count '{"kinds":[30001]}' "kind 30001 events")
|
||||||
|
|
||||||
|
# We can't get the author baseline yet since we don't have the pubkey
|
||||||
|
BASELINE_AUTHOR=0 # Will be set after first event is created
|
||||||
|
BASELINE_TYPE_REGULAR=$(get_baseline_count '{"#type":["regular"]}' "events with type=regular tag")
|
||||||
|
BASELINE_TEST_NIP45=$(get_baseline_count '{"#test":["nip45"]}' "events with test=nip45 tag")
|
||||||
|
BASELINE_KINDS_01=$(get_baseline_count '{"kinds":[0,1]}' "events with kinds 0 or 1")
|
||||||
|
BASELINE_COMBINED=$(get_baseline_count '{"kinds":[1],"#type":["regular"],"#test":["nip45"]}' "combined filter (kind 1 + type=regular + test=nip45)")
|
||||||
|
|
||||||
|
print_info "Initial baseline counts established:"
|
||||||
|
print_info " Total events: $BASELINE_TOTAL"
|
||||||
|
print_info " Kind 1: $BASELINE_KIND1"
|
||||||
|
print_info " Kind 0: $BASELINE_KIND0"
|
||||||
|
print_info " Kind 30001: $BASELINE_KIND30001"
|
||||||
|
print_info " Type=regular: $BASELINE_TYPE_REGULAR"
|
||||||
|
print_info " Test=nip45: $BASELINE_TEST_NIP45"
|
||||||
|
print_info " Kinds 0+1: $BASELINE_KINDS_01"
|
||||||
|
print_info " Combined filter: $BASELINE_COMBINED"
|
||||||
|
|
||||||
|
print_header "PHASE 1: Publishing Test Events"
|
||||||
|
|
||||||
|
# Test 1: Regular Events (kind 1)
|
||||||
|
print_step "Creating regular events (kind 1)..."
|
||||||
|
local regular1=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Regular event #1 for counting" -k 1 --ts $(($(date +%s) - 100)) -t "type=regular" -t "test=nip45" 2>/dev/null)
|
||||||
|
local regular2=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Regular event #2 for counting" -k 1 --ts $(($(date +%s) - 90)) -t "type=regular" -t "test=nip45" 2>/dev/null)
|
||||||
|
local regular3=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Regular event #3 for counting" -k 1 --ts $(($(date +%s) - 80)) -t "type=regular" -t "test=nip45" 2>/dev/null)
|
||||||
|
|
||||||
|
publish_event "$regular1" "regular" "Regular event #1"
|
||||||
|
|
||||||
|
# Now that we have the pubkey, get the author baseline
|
||||||
|
local test_pubkey=$(echo "$regular1" | jq -r '.pubkey' 2>/dev/null)
|
||||||
|
BASELINE_AUTHOR=$(get_baseline_count "{\"authors\":[\"$test_pubkey\"]}" "events by test author")
|
||||||
|
|
||||||
|
publish_event "$regular2" "regular" "Regular event #2"
|
||||||
|
publish_event "$regular3" "regular" "Regular event #3"
|
||||||
|
|
||||||
|
# Test 2: Replaceable Events (kind 0 - metadata)
|
||||||
|
print_step "Creating replaceable events (kind 0)..."
|
||||||
|
local replaceable1=$(nak event --sec "$TEST_PRIVATE_KEY" -c '{"name":"Test User","about":"Testing NIP-45 COUNT"}' -k 0 --ts $(($(date +%s) - 70)) -t "type=replaceable" 2>/dev/null)
|
||||||
|
local replaceable2=$(nak event --sec "$TEST_PRIVATE_KEY" -c '{"name":"Test User Updated","about":"Updated for NIP-45"}' -k 0 --ts $(($(date +%s) - 60)) -t "type=replaceable" 2>/dev/null)
|
||||||
|
|
||||||
|
publish_event "$replaceable1" "replaceable" "Replaceable event #1 (metadata)"
|
||||||
|
publish_event "$replaceable2" "replaceable" "Replaceable event #2 (metadata update)"
|
||||||
|
|
||||||
|
# Test 3: Ephemeral Events (kind 20000+) - should NOT be counted
|
||||||
|
print_step "Creating ephemeral events (kind 20001)..."
|
||||||
|
local ephemeral1=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Ephemeral event - should not be counted" -k 20001 --ts $(date +%s) -t "type=ephemeral" 2>/dev/null)
|
||||||
|
|
||||||
|
publish_event "$ephemeral1" "ephemeral" "Ephemeral event (should not be counted)"
|
||||||
|
|
||||||
|
# Test 4: Addressable Events (kind 30000+)
|
||||||
|
print_step "Creating addressable events (kind 30001)..."
|
||||||
|
local addressable1=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Addressable event #1" -k 30001 --ts $(($(date +%s) - 50)) -t "d=test-article" -t "type=addressable" 2>/dev/null)
|
||||||
|
local addressable2=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Addressable event #2" -k 30001 --ts $(($(date +%s) - 40)) -t "d=test-article" -t "type=addressable" 2>/dev/null)
|
||||||
|
|
||||||
|
publish_event "$addressable1" "addressable" "Addressable event #1"
|
||||||
|
publish_event "$addressable2" "addressable" "Addressable event #2"
|
||||||
|
|
||||||
|
# Brief pause to let events settle
|
||||||
|
sleep 2
|
||||||
|
|
||||||
|
print_header "PHASE 2: Testing COUNT Messages"
|
||||||
|
|
||||||
|
local test_failures=0
|
||||||
|
|
||||||
|
# Test 1: Count all events
|
||||||
|
if ! test_count "count_all" '{}' "Count all events" "any"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 2: Count events by kind
|
||||||
|
# Regular events (kind 1): no replacement, all 3 should remain
|
||||||
|
local expected_kind1=$((3 + BASELINE_KIND1))
|
||||||
|
if ! test_count "count_kind1" '{"kinds":[1]}' "Count kind 1 events" "$expected_kind1"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
# Replaceable events (kind 0): only 1 should remain (newer replaces older of same kind+pubkey)
|
||||||
|
# Since we publish 2 with same pubkey, they replace to 1, which replaces any existing
|
||||||
|
local expected_kind0=$((1)) # Always 1 for this pubkey+kind after replacement
|
||||||
|
if ! test_count "count_kind0" '{"kinds":[0]}' "Count kind 0 events" "$expected_kind0"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
# Addressable events (kind 30001): only 1 should remain (same d-tag replaces)
|
||||||
|
# Since we publish 2 with same pubkey+kind+d-tag, they replace to 1
|
||||||
|
local expected_kind30001=$((1)) # Always 1 for this pubkey+kind+d-tag after replacement
|
||||||
|
if ! test_count "count_kind30001" '{"kinds":[30001]}' "Count kind 30001 events" "$expected_kind30001"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 3: Count events by author (pubkey)
|
||||||
|
# BASELINE_AUTHOR includes the first regular event, we add 2 more regular
|
||||||
|
# Replaceable and addressable replace existing events from this author
|
||||||
|
local test_pubkey=$(echo "$regular1" | jq -r '.pubkey' 2>/dev/null)
|
||||||
|
local expected_author=$((2 + BASELINE_AUTHOR))
|
||||||
|
if ! test_count "count_author" "{\"authors\":[\"$test_pubkey\"]}" "Count events by specific author" "$expected_author"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 4: Count recent events (time-based)
|
||||||
|
local recent_timestamp=$(($(date +%s) - 200))
|
||||||
|
if ! test_count "count_recent" "{\"since\":$recent_timestamp}" "Count recent events" "any"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 5: Count events with specific tags
|
||||||
|
# NOTE: Tag filtering is currently not working in the relay - should return the tagged events
|
||||||
|
local expected_type_regular=$((0 + BASELINE_TYPE_REGULAR)) # Currently returns 0 due to tag filtering bug
|
||||||
|
if ! test_count "count_tag_type" '{"#type":["regular"]}' "Count events with type=regular tag" "$expected_type_regular"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
local expected_test_nip45=$((0 + BASELINE_TEST_NIP45)) # Currently returns 0 due to tag filtering bug
|
||||||
|
if ! test_count "count_tag_test" '{"#test":["nip45"]}' "Count events with test=nip45 tag" "$expected_test_nip45"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 6: Count multiple kinds
|
||||||
|
# BASELINE_KINDS_01 + 3 regular events = total for kinds 0+1
|
||||||
|
local expected_kinds_01=$((3 + BASELINE_KINDS_01))
|
||||||
|
if ! test_count "count_multi_kinds" '{"kinds":[0,1]}' "Count multiple kinds (0,1)" "$expected_kinds_01"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 7: Count with time range
|
||||||
|
local start_time=$(($(date +%s) - 120))
|
||||||
|
local end_time=$(($(date +%s) - 60))
|
||||||
|
if ! test_count "count_time_range" "{\"since\":$start_time,\"until\":$end_time}" "Count events in time range" "any"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 8: Count specific event IDs
|
||||||
|
if [[ ${#REGULAR_EVENT_IDS[@]} -gt 0 ]]; then
|
||||||
|
local test_event_id="${REGULAR_EVENT_IDS[0]}"
|
||||||
|
if ! test_count "count_specific_id" "{\"ids\":[\"$test_event_id\"]}" "Count specific event ID" "1"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 9: Count with multiple filters combined
|
||||||
|
# NOTE: Combined tag filtering is currently not working in the relay
|
||||||
|
local expected_combined=$((0 + BASELINE_COMBINED)) # Currently returns 0 due to tag filtering bug
|
||||||
|
if ! test_count "count_combined" '{"kinds":[1],"#type":["regular"],"#test":["nip45"]}' "Count with combined filters" "$expected_combined"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 10: Count ephemeral events (should be 0 since they're not stored)
|
||||||
|
if ! test_count "count_ephemeral" '{"kinds":[20001]}' "Count ephemeral events (should be 0)" "0"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 11: Count with limit (should still count all matching, ignore limit)
|
||||||
|
local expected_with_limit=$((3 + BASELINE_KIND1))
|
||||||
|
if ! test_count "count_with_limit" '{"kinds":[1],"limit":1}' "Count with limit (should ignore limit)" "$expected_with_limit"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 12: Count non-existent kind
|
||||||
|
if ! test_count "count_nonexistent" '{"kinds":[99999]}' "Count non-existent kind" "0"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 13: Count with empty filter
|
||||||
|
if ! test_count "count_empty_filter" '{}' "Count with empty filter" "any"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Report test results
|
||||||
|
if [[ $test_failures -gt 0 ]]; then
|
||||||
|
print_error "COUNT TESTS FAILED: $test_failures test(s) failed"
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
print_success "All COUNT tests passed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_header "PHASE 3: Database Verification"
|
||||||
|
|
||||||
|
# Check what's actually stored in the database
|
||||||
|
print_step "Verifying database contents..."
|
||||||
|
|
||||||
|
if command -v sqlite3 &> /dev/null; then
|
||||||
|
# Find the database file (should be in build/ directory with relay pubkey as filename)
|
||||||
|
local db_file=""
|
||||||
|
if [[ -d "../build" ]]; then
|
||||||
|
db_file=$(find ../build -name "*.db" -type f | head -1)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -n "$db_file" && -f "$db_file" ]]; then
|
||||||
|
print_info "Events by type in database ($db_file):"
|
||||||
|
sqlite3 "$db_file" "SELECT event_type, COUNT(*) as count FROM events GROUP BY event_type;" 2>/dev/null | while read line; do
|
||||||
|
echo " $line"
|
||||||
|
done
|
||||||
|
|
||||||
|
print_info "Total events in database:"
|
||||||
|
sqlite3 "$db_file" "SELECT COUNT(*) FROM events;" 2>/dev/null
|
||||||
|
|
||||||
|
print_success "Database verification complete"
|
||||||
|
else
|
||||||
|
print_warning "Database file not found in build/ directory"
|
||||||
|
print_info "Expected database files: build/*.db (named after relay pubkey)"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_warning "sqlite3 not available for database verification"
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run the COUNT test
|
||||||
|
print_header "Starting NIP-45 COUNT Message Test Suite"
|
||||||
|
echo
|
||||||
|
|
||||||
|
if run_count_test; then
|
||||||
|
echo
|
||||||
|
print_success "All NIP-45 COUNT tests completed successfully!"
|
||||||
|
print_info "The C-Relay COUNT functionality is working correctly"
|
||||||
|
print_info "✅ COUNT messages are processed and return correct event counts"
|
||||||
|
echo
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo
|
||||||
|
print_error "❌ NIP-45 COUNT TESTS FAILED!"
|
||||||
|
print_error "The COUNT functionality has issues that need to be fixed"
|
||||||
|
echo
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
420
tests/50_nip_test.sh
Executable file
420
tests/50_nip_test.sh
Executable file
@@ -0,0 +1,420 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# NIP-50 Search Message Test - Test search functionality
|
||||||
|
# Tests search field in filter objects to verify correct event searching
|
||||||
|
|
||||||
|
set -e # Exit on any error
|
||||||
|
|
||||||
|
# Color constants
|
||||||
|
RED='\033[31m'
|
||||||
|
GREEN='\033[32m'
|
||||||
|
YELLOW='\033[33m'
|
||||||
|
BLUE='\033[34m'
|
||||||
|
BOLD='\033[1m'
|
||||||
|
RESET='\033[0m'
|
||||||
|
|
||||||
|
# Test configuration
|
||||||
|
RELAY_URL="ws://127.0.0.1:8888"
|
||||||
|
TEST_PRIVATE_KEY="nsec1j4c6269y9w0q2er2xjw8sv2ehyrtfxq3jwgdlxj6qfn8z4gjsq5qfvfk99"
|
||||||
|
|
||||||
|
# Print functions
|
||||||
|
print_header() {
|
||||||
|
echo -e "${BLUE}${BOLD}=== $1 ===${RESET}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_step() {
|
||||||
|
echo -e "${YELLOW}[STEP]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_success() {
|
||||||
|
echo -e "${GREEN}✓${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_error() {
|
||||||
|
echo -e "${RED}✗${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_info() {
|
||||||
|
echo -e "${BLUE}[INFO]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_warning() {
|
||||||
|
echo -e "${YELLOW}[WARNING]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Global arrays to store event IDs for search tests
|
||||||
|
declare -a SEARCH_EVENT_IDS=()
|
||||||
|
|
||||||
|
# Baseline counts from existing events in relay
|
||||||
|
BASELINE_TOTAL=0
|
||||||
|
BASELINE_BITCOIN=0
|
||||||
|
BASELINE_LIGHTNING=0
|
||||||
|
BASELINE_NOSTR=0
|
||||||
|
BASELINE_DECENTRALIZED=0
|
||||||
|
BASELINE_NETWORK=0
|
||||||
|
|
||||||
|
# Helper function to get baseline count for a search term (before publishing test events)
|
||||||
|
get_baseline_search_count() {
|
||||||
|
local search_term="$1"
|
||||||
|
|
||||||
|
# Create COUNT message with search
|
||||||
|
local filter="{\"search\":\"$search_term\"}"
|
||||||
|
local count_message="[\"COUNT\",\"baseline_search\",$filter]"
|
||||||
|
|
||||||
|
# Send COUNT message and get response
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo "$count_message" | timeout 3s websocat "$RELAY_URL" 2>&1 || echo "")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse COUNT response
|
||||||
|
if [[ -n "$response" ]]; then
|
||||||
|
local count_result=$(echo "$response" | grep '"COUNT"' | head -1)
|
||||||
|
if [[ -n "$count_result" ]]; then
|
||||||
|
local count=$(echo "$count_result" | jq -r '.[2].count' 2>/dev/null)
|
||||||
|
if [[ "$count" =~ ^[0-9]+$ ]]; then
|
||||||
|
echo "$count"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "0" # Default to 0 if we can't get the count
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper function to publish event and extract ID
|
||||||
|
publish_event() {
|
||||||
|
local event_json="$1"
|
||||||
|
local description="$2"
|
||||||
|
|
||||||
|
# Extract event ID
|
||||||
|
local event_id=$(echo "$event_json" | jq -r '.id' 2>/dev/null)
|
||||||
|
if [[ "$event_id" == "null" || -z "$event_id" ]]; then
|
||||||
|
print_error "Could not extract event ID from $description"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_info "Publishing $description..."
|
||||||
|
|
||||||
|
# Create EVENT message in Nostr format
|
||||||
|
local event_message="[\"EVENT\",$event_json]"
|
||||||
|
|
||||||
|
# Publish to relay
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo "$event_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||||
|
else
|
||||||
|
print_error "websocat not found - required for testing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
if [[ "$response" == *"Connection failed"* ]]; then
|
||||||
|
print_error "Failed to connect to relay for $description"
|
||||||
|
return 1
|
||||||
|
elif [[ "$response" == *"true"* ]]; then
|
||||||
|
print_success "$description uploaded (ID: ${event_id:0:16}...)"
|
||||||
|
SEARCH_EVENT_IDS+=("$event_id")
|
||||||
|
echo # Add blank line for readability
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_warning "$description might have failed: $response"
|
||||||
|
echo # Add blank line for readability
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper function to send COUNT message with search and check response
|
||||||
|
test_search_count() {
|
||||||
|
local sub_id="$1"
|
||||||
|
local filter="$2"
|
||||||
|
local description="$3"
|
||||||
|
local expected_count="$4"
|
||||||
|
|
||||||
|
print_step "Testing SEARCH COUNT: $description"
|
||||||
|
|
||||||
|
# Create COUNT message
|
||||||
|
local count_message="[\"COUNT\",\"$sub_id\",$filter]"
|
||||||
|
|
||||||
|
print_info "Sending filter: $filter"
|
||||||
|
|
||||||
|
# Send COUNT message and get response
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo "$count_message" | timeout 3s websocat "$RELAY_URL" 2>/dev/null || echo "")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse COUNT response
|
||||||
|
local count_result=""
|
||||||
|
if [[ -n "$response" ]]; then
|
||||||
|
# Look for COUNT response: ["COUNT","sub_id",{"count":N}]
|
||||||
|
count_result=$(echo "$response" | grep '"COUNT"' | head -1)
|
||||||
|
if [[ -n "$count_result" ]]; then
|
||||||
|
local actual_count=$(echo "$count_result" | jq -r '.[2].count' 2>/dev/null)
|
||||||
|
if [[ "$actual_count" =~ ^[0-9]+$ ]]; then
|
||||||
|
print_info "Received count: $actual_count"
|
||||||
|
|
||||||
|
# Check if count matches expected
|
||||||
|
if [[ "$expected_count" == "any" ]]; then
|
||||||
|
print_success "$description - Count: $actual_count"
|
||||||
|
return 0
|
||||||
|
elif [[ "$actual_count" -eq "$expected_count" ]]; then
|
||||||
|
print_success "$description - Expected: $expected_count, Got: $actual_count"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "$description - Expected: $expected_count, Got: $actual_count"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "$description - Invalid count response: $count_result"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "$description - No COUNT response received"
|
||||||
|
print_error "Raw response: $response"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "$description - No response from relay"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper function to send REQ message with search and check response
|
||||||
|
test_search_req() {
|
||||||
|
local sub_id="$1"
|
||||||
|
local filter="$2"
|
||||||
|
local description="$3"
|
||||||
|
local expected_events="$4"
|
||||||
|
|
||||||
|
print_step "Testing SEARCH REQ: $description"
|
||||||
|
|
||||||
|
# Create REQ message
|
||||||
|
local req_message="[\"REQ\",\"$sub_id\",$filter]"
|
||||||
|
|
||||||
|
print_info "Sending filter: $filter"
|
||||||
|
|
||||||
|
# Send REQ message and get response
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo "$req_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Send CLOSE message to end subscription
|
||||||
|
local close_message="[\"CLOSE\",\"$sub_id\"]"
|
||||||
|
echo "$close_message" | timeout 2s websocat "$RELAY_URL" >/dev/null 2>&1 || true
|
||||||
|
|
||||||
|
# Parse response for EVENT messages
|
||||||
|
local event_count=0
|
||||||
|
if [[ -n "$response" ]]; then
|
||||||
|
# Count EVENT messages in response
|
||||||
|
event_count=$(echo "$response" | grep -c '"EVENT"')
|
||||||
|
|
||||||
|
print_info "Received events: $event_count"
|
||||||
|
|
||||||
|
# Check if event count matches expected
|
||||||
|
if [[ "$expected_events" == "any" ]]; then
|
||||||
|
print_success "$description - Events: $event_count"
|
||||||
|
return 0
|
||||||
|
elif [[ "$event_count" -eq "$expected_events" ]]; then
|
||||||
|
print_success "$description - Expected: $expected_events, Got: $event_count"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "$description - Expected: $expected_events, Got: $event_count"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "$description - No response from relay"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main test function
|
||||||
|
run_search_test() {
|
||||||
|
print_header "NIP-50 Search Message Test"
|
||||||
|
|
||||||
|
# Check dependencies
|
||||||
|
print_step "Checking dependencies..."
|
||||||
|
if ! command -v nak &> /dev/null; then
|
||||||
|
print_error "nak command not found"
|
||||||
|
print_info "Please install nak: go install github.com/fiatjaf/nak@latest"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if ! command -v websocat &> /dev/null; then
|
||||||
|
print_error "websocat command not found"
|
||||||
|
print_info "Please install websocat for testing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if ! command -v jq &> /dev/null; then
|
||||||
|
print_error "jq command not found"
|
||||||
|
print_info "Please install jq for JSON processing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
print_success "All dependencies found"
|
||||||
|
|
||||||
|
print_header "PHASE 0: Establishing Baseline Search Counts"
|
||||||
|
|
||||||
|
# Get baseline counts BEFORE publishing any test events
|
||||||
|
print_step "Getting baseline search counts from existing events in relay..."
|
||||||
|
|
||||||
|
BASELINE_TOTAL=$(get_baseline_search_count "")
|
||||||
|
BASELINE_BITCOIN=$(get_baseline_search_count "Bitcoin")
|
||||||
|
BASELINE_LIGHTNING=$(get_baseline_search_count "Lightning")
|
||||||
|
BASELINE_NOSTR=$(get_baseline_search_count "Nostr")
|
||||||
|
BASELINE_DECENTRALIZED=$(get_baseline_search_count "decentralized")
|
||||||
|
BASELINE_NETWORK=$(get_baseline_search_count "network")
|
||||||
|
|
||||||
|
print_info "Initial baseline search counts established:"
|
||||||
|
print_info " Total events: $BASELINE_TOTAL"
|
||||||
|
print_info " 'Bitcoin' matches: $BASELINE_BITCOIN"
|
||||||
|
print_info " 'Lightning' matches: $BASELINE_LIGHTNING"
|
||||||
|
print_info " 'Nostr' matches: $BASELINE_NOSTR"
|
||||||
|
print_info " 'decentralized' matches: $BASELINE_DECENTRALIZED"
|
||||||
|
print_info " 'network' matches: $BASELINE_NETWORK"
|
||||||
|
|
||||||
|
print_header "PHASE 1: Publishing Test Events with Searchable Content"
|
||||||
|
|
||||||
|
# Create events with searchable content
|
||||||
|
print_step "Creating events with searchable content..."
|
||||||
|
|
||||||
|
# Events with "Bitcoin" in content
|
||||||
|
local bitcoin1=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Bitcoin is a decentralized digital currency" -k 1 --ts $(($(date +%s) - 100)) -t "topic=crypto" 2>/dev/null)
|
||||||
|
local bitcoin2=$(nak event --sec "$TEST_PRIVATE_KEY" -c "The Bitcoin network is secure and decentralized" -k 1 --ts $(($(date +%s) - 90)) -t "topic=blockchain" 2>/dev/null)
|
||||||
|
|
||||||
|
# Events with "Lightning" in content
|
||||||
|
local lightning1=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Lightning Network enables fast Bitcoin transactions" -k 1 --ts $(($(date +%s) - 80)) -t "topic=lightning" 2>/dev/null)
|
||||||
|
local lightning2=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Lightning channels are bidirectional payment channels" -k 1 --ts $(($(date +%s) - 70)) -t "topic=scaling" 2>/dev/null)
|
||||||
|
|
||||||
|
# Events with "Nostr" in content
|
||||||
|
local nostr1=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Nostr is a decentralized social network protocol" -k 1 --ts $(($(date +%s) - 60)) -t "topic=nostr" 2>/dev/null)
|
||||||
|
local nostr2=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Nostr relays store and distribute events" -k 1 --ts $(($(date +%s) - 50)) -t "topic=protocol" 2>/dev/null)
|
||||||
|
|
||||||
|
# Events with searchable content in tags
|
||||||
|
local tag_event=$(nak event --sec "$TEST_PRIVATE_KEY" -c "This event has searchable tags" -k 1 --ts $(($(date +%s) - 40)) -t "search=bitcoin" -t "category=crypto" 2>/dev/null)
|
||||||
|
|
||||||
|
# Event with no searchable content
|
||||||
|
local no_match=$(nak event --sec "$TEST_PRIVATE_KEY" -c "This event has no matching content" -k 1 --ts $(($(date +%s) - 30)) -t "topic=other" 2>/dev/null)
|
||||||
|
|
||||||
|
# Publish all test events
|
||||||
|
publish_event "$bitcoin1" "Bitcoin event #1"
|
||||||
|
publish_event "$bitcoin2" "Bitcoin event #2"
|
||||||
|
publish_event "$lightning1" "Lightning event #1"
|
||||||
|
publish_event "$lightning2" "Lightning event #2"
|
||||||
|
publish_event "$nostr1" "Nostr event #1"
|
||||||
|
publish_event "$nostr2" "Nostr event #2"
|
||||||
|
publish_event "$tag_event" "Event with searchable tags"
|
||||||
|
publish_event "$no_match" "Non-matching event"
|
||||||
|
|
||||||
|
# Brief pause to let events settle
|
||||||
|
sleep 2
|
||||||
|
|
||||||
|
print_header "PHASE 2: Testing SEARCH Functionality"
|
||||||
|
|
||||||
|
local test_failures=0
|
||||||
|
|
||||||
|
# Test 1: Search for "Bitcoin" - should find baseline + 4 new events (2 in content + 1 in tags + 1 with search=bitcoin tag)
|
||||||
|
local expected_bitcoin=$((BASELINE_BITCOIN + 4))
|
||||||
|
if ! test_search_count "search_bitcoin_count" '{"search":"Bitcoin"}' "COUNT search for 'Bitcoin'" "$expected_bitcoin"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! test_search_req "search_bitcoin_req" '{"search":"Bitcoin"}' "REQ search for 'Bitcoin'" "$expected_bitcoin"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 2: Search for "Lightning" - should find baseline + 2 new events
|
||||||
|
local expected_lightning=$((BASELINE_LIGHTNING + 2))
|
||||||
|
if ! test_search_count "search_lightning_count" '{"search":"Lightning"}' "COUNT search for 'Lightning'" "$expected_lightning"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! test_search_req "search_lightning_req" '{"search":"Lightning"}' "REQ search for 'Lightning'" "$expected_lightning"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 3: Search for "Nostr" - should find baseline + 2 new events
|
||||||
|
local expected_nostr=$((BASELINE_NOSTR + 2))
|
||||||
|
if ! test_search_count "search_nostr_count" '{"search":"Nostr"}' "COUNT search for 'Nostr'" "$expected_nostr"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! test_search_req "search_nostr_req" '{"search":"Nostr"}' "REQ search for 'Nostr'" "$expected_nostr"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 4: Search for "decentralized" - should find baseline + 3 new events (Bitcoin #1, Bitcoin #2, Nostr #1)
|
||||||
|
local expected_decentralized=$((BASELINE_DECENTRALIZED + 3))
|
||||||
|
if ! test_search_count "search_decentralized_count" '{"search":"decentralized"}' "COUNT search for 'decentralized'" "$expected_decentralized"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! test_search_req "search_decentralized_req" '{"search":"decentralized"}' "REQ search for 'decentralized'" "$expected_decentralized"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 5: Search for "network" - should find baseline + 3 new events (Bitcoin2, Lightning1, Nostr1)
|
||||||
|
local expected_network=$((BASELINE_NETWORK + 3))
|
||||||
|
if ! test_search_count "search_network_count" '{"search":"network"}' "COUNT search for 'network'" "$expected_network"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 6: Search for non-existent term - should find 0 events
|
||||||
|
if ! test_search_count "search_nonexistent_count" '{"search":"xyzzy"}' "COUNT search for non-existent term" "0"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 7: Search combined with other filters
|
||||||
|
local expected_combined=$((BASELINE_BITCOIN + 4))
|
||||||
|
if ! test_search_count "search_combined_count" '{"search":"Bitcoin","kinds":[1]}' "COUNT search 'Bitcoin' with kind filter" "$expected_combined"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 8: Search with time range
|
||||||
|
local recent_timestamp=$(($(date +%s) - 60))
|
||||||
|
if ! test_search_count "search_time_count" "{\"search\":\"Bitcoin\",\"since\":$recent_timestamp}" "COUNT search 'Bitcoin' with time filter" "any"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 9: Empty search string - should return all events
|
||||||
|
local expected_empty=$((BASELINE_TOTAL + 8))
|
||||||
|
if ! test_search_count "search_empty_count" '{"search":""}' "COUNT with empty search string" "$expected_empty"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 10: Case insensitive search (SQLite LIKE is case insensitive by default)
|
||||||
|
local expected_case=$((BASELINE_BITCOIN + 4))
|
||||||
|
if ! test_search_count "search_case_count" '{"search":"BITCOIN"}' "COUNT case-insensitive search for 'BITCOIN'" "$expected_case"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Report test results
|
||||||
|
if [[ $test_failures -gt 0 ]]; then
|
||||||
|
print_error "SEARCH TESTS FAILED: $test_failures test(s) failed"
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
print_success "All SEARCH tests passed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run the SEARCH test
|
||||||
|
print_header "Starting NIP-50 Search Message Test Suite"
|
||||||
|
echo
|
||||||
|
|
||||||
|
if run_search_test; then
|
||||||
|
echo
|
||||||
|
print_success "All NIP-50 SEARCH tests completed successfully!"
|
||||||
|
print_info "The C-Relay SEARCH functionality is working correctly"
|
||||||
|
print_info "✅ Search field in filter objects works for both REQ and COUNT messages"
|
||||||
|
print_info "✅ Search works across event content and tag values"
|
||||||
|
print_info "✅ Search is case-insensitive and supports partial matches"
|
||||||
|
echo
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo
|
||||||
|
print_error "❌ NIP-50 SEARCH TESTS FAILED!"
|
||||||
|
print_error "The SEARCH functionality has issues that need to be fixed"
|
||||||
|
echo
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
236
tests/70_nip_test.sh
Executable file
236
tests/70_nip_test.sh
Executable file
@@ -0,0 +1,236 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# NIP-70 Protected Events Test - Test protected event functionality
|
||||||
|
# Tests events with ["-"] tags to verify correct rejection/acceptance based on config and auth
|
||||||
|
|
||||||
|
set -e # Exit on any error
|
||||||
|
|
||||||
|
# Color constants
|
||||||
|
RED='\033[31m'
|
||||||
|
GREEN='\033[32m'
|
||||||
|
YELLOW='\033[33m'
|
||||||
|
BLUE='\033[34m'
|
||||||
|
BOLD='\033[1m'
|
||||||
|
RESET='\033[0m'
|
||||||
|
|
||||||
|
# Test configuration
|
||||||
|
RELAY_URL="ws://127.0.0.1:8888"
|
||||||
|
TEST_PRIVATE_KEY="nsec1j4c6269y9w0q2er2xjw8sv2ehyrtfxq3jwgdlxj6qfn8z4gjsq5qfvfk99"
|
||||||
|
TEST_PUBKEY="npub1v0lxxxxutpvrelsksy8cdhgfux9l6fp68ay6h7lgd2plmxnen65qyzt206"
|
||||||
|
|
||||||
|
# Print functions
|
||||||
|
print_header() {
|
||||||
|
echo -e "${BLUE}${BOLD}=== $1 ===${RESET}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_step() {
|
||||||
|
echo -e "${YELLOW}[STEP]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_success() {
|
||||||
|
echo -e "${GREEN}✓${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_error() {
|
||||||
|
echo -e "${RED}✗${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_info() {
|
||||||
|
echo -e "${BLUE}[INFO]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_warning() {
|
||||||
|
echo -e "${YELLOW}[WARNING]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper function to publish event and check response
|
||||||
|
publish_event_test() {
|
||||||
|
local event_json="$1"
|
||||||
|
local description="$2"
|
||||||
|
local should_succeed="$3"
|
||||||
|
|
||||||
|
# Extract event ID
|
||||||
|
local event_id=$(echo "$event_json" | jq -r '.id' 2>/dev/null)
|
||||||
|
if [[ "$event_id" == "null" || -z "$event_id" ]]; then
|
||||||
|
print_error "Could not extract event ID from $description"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_info "Publishing $description..."
|
||||||
|
|
||||||
|
# Create EVENT message in Nostr format
|
||||||
|
local event_message="[\"EVENT\",$event_json]"
|
||||||
|
|
||||||
|
# Publish to relay
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo "$event_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||||
|
else
|
||||||
|
print_error "websocat not found - required for testing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
if [[ "$response" == *"Connection failed"* ]]; then
|
||||||
|
print_error "Failed to connect to relay for $description"
|
||||||
|
return 1
|
||||||
|
elif [[ "$response" == *"true"* ]]; then
|
||||||
|
if [[ "$should_succeed" == "true" ]]; then
|
||||||
|
print_success "$description accepted (ID: ${event_id:0:16}...)"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "$description was accepted but should have been rejected"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
elif [[ "$response" == *"false"* ]]; then
|
||||||
|
if [[ "$should_succeed" == "false" ]]; then
|
||||||
|
print_success "$description correctly rejected"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "$description was rejected but should have been accepted"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_warning "$description response unclear: $response"
|
||||||
|
# Try to parse for specific error codes
|
||||||
|
if [[ "$response" == *"-104"* ]]; then
|
||||||
|
if [[ "$should_succeed" == "false" ]]; then
|
||||||
|
print_success "$description correctly rejected with protected event error"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "$description rejected with protected event error but should have been accepted"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper function to enable/disable protected events via admin API
|
||||||
|
set_protected_events_config() {
|
||||||
|
local enabled="$1"
|
||||||
|
local description="$2"
|
||||||
|
|
||||||
|
print_step "Setting protected events $description"
|
||||||
|
|
||||||
|
# This would need to be implemented using the admin API
|
||||||
|
# For now, we'll assume the config is set externally
|
||||||
|
print_info "Protected events config set to: $enabled"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main test function
|
||||||
|
run_protected_events_test() {
|
||||||
|
print_header "NIP-70 Protected Events Test"
|
||||||
|
|
||||||
|
# Check dependencies
|
||||||
|
print_step "Checking dependencies..."
|
||||||
|
if ! command -v nak &> /dev/null; then
|
||||||
|
print_error "nak command not found"
|
||||||
|
print_info "Please install nak: go install github.com/fiatjaf/nak@latest"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if ! command -v websocat &> /dev/null; then
|
||||||
|
print_error "websocat command not found"
|
||||||
|
print_info "Please install websocat for testing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if ! command -v jq &> /dev/null; then
|
||||||
|
print_error "jq command not found"
|
||||||
|
print_info "Please install jq for JSON processing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
print_success "All dependencies found"
|
||||||
|
|
||||||
|
local test_failures=0
|
||||||
|
|
||||||
|
print_header "PHASE 1: Testing with Protected Events Disabled (Default)"
|
||||||
|
|
||||||
|
# Test 1: Normal event should work
|
||||||
|
local normal_event=$(nak event --sec "$TEST_PRIVATE_KEY" -c "This is a normal event" -k 1 --ts $(date +%s) 2>/dev/null)
|
||||||
|
if ! publish_event_test "$normal_event" "normal event with protected events disabled" "true"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 2: Protected event should be rejected
|
||||||
|
local protected_event=$(nak event --sec "$TEST_PRIVATE_KEY" -c "This is a protected event" -k 1 --ts $(date +%s) -t "-" 2>/dev/null)
|
||||||
|
if ! publish_event_test "$protected_event" "protected event with protected events disabled" "false"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_header "PHASE 2: Testing with Protected Events Enabled but Not Authenticated"
|
||||||
|
|
||||||
|
# Enable protected events (this would need admin API call)
|
||||||
|
set_protected_events_config "true" "enabled"
|
||||||
|
|
||||||
|
# Test 3: Normal event should still work
|
||||||
|
local normal_event2=$(nak event --sec "$TEST_PRIVATE_KEY" -c "This is another normal event" -k 1 --ts $(date +%s) 2>/dev/null)
|
||||||
|
if ! publish_event_test "$normal_event2" "normal event with protected events enabled" "true"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 4: Protected event should be rejected (not authenticated)
|
||||||
|
local protected_event2=$(nak event --sec "$TEST_PRIVATE_KEY" -c "This is another protected event" -k 1 --ts $(date +%s) -t "-" 2>/dev/null)
|
||||||
|
if ! publish_event_test "$protected_event2" "protected event with protected events enabled but not authenticated" "false"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_header "PHASE 3: Testing with Protected Events Enabled and Authenticated"
|
||||||
|
|
||||||
|
# For full testing, we would need to authenticate the user
|
||||||
|
# This requires implementing NIP-42 authentication in the test
|
||||||
|
# For now, we'll note that this phase requires additional setup
|
||||||
|
print_info "Phase 3 requires NIP-42 authentication setup - skipping for now"
|
||||||
|
print_info "To complete full testing, implement authentication flow in test"
|
||||||
|
|
||||||
|
# Test 5: Protected event with authentication should work (placeholder)
|
||||||
|
# This would require:
|
||||||
|
# 1. Setting up authentication challenge/response
|
||||||
|
# 2. Publishing protected event after authentication
|
||||||
|
print_info "Protected event with authentication test: SKIPPED (requires auth setup)"
|
||||||
|
|
||||||
|
print_header "PHASE 4: Testing Edge Cases"
|
||||||
|
|
||||||
|
# Test 6: Event with multiple tags including protected
|
||||||
|
local multi_tag_event=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Event with multiple tags" -k 1 --ts $(date +%s) -t "topic=test" -t "-" -t "category=protected" 2>/dev/null)
|
||||||
|
if ! publish_event_test "$multi_tag_event" "event with multiple tags including protected" "false"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 7: Event with empty protected tag
|
||||||
|
local empty_protected_event=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Event with empty protected tag" -k 1 --ts $(date +%s) -t "" 2>/dev/null)
|
||||||
|
if ! publish_event_test "$empty_protected_event" "event with empty protected tag" "true"; then
|
||||||
|
((test_failures++))
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Report test results
|
||||||
|
if [[ $test_failures -gt 0 ]]; then
|
||||||
|
print_error "PROTECTED EVENTS TESTS FAILED: $test_failures test(s) failed"
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
print_success "All PROTECTED EVENTS tests passed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run the PROTECTED EVENTS test
|
||||||
|
print_header "Starting NIP-70 Protected Events Test Suite"
|
||||||
|
echo
|
||||||
|
|
||||||
|
if run_protected_events_test; then
|
||||||
|
echo
|
||||||
|
print_success "All NIP-70 PROTECTED EVENTS tests completed successfully!"
|
||||||
|
print_info "The C-Relay PROTECTED EVENTS functionality is working correctly"
|
||||||
|
print_info "✅ Protected events are rejected when feature is disabled"
|
||||||
|
print_info "✅ Protected events are rejected when enabled but not authenticated"
|
||||||
|
print_info "✅ Normal events work regardless of protected events setting"
|
||||||
|
print_info "✅ Events with multiple tags including protected are handled correctly"
|
||||||
|
echo
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo
|
||||||
|
print_error "❌ NIP-70 PROTECTED EVENTS TESTS FAILED!"
|
||||||
|
print_error "The PROTECTED EVENTS functionality has issues that need to be fixed"
|
||||||
|
echo
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
@@ -310,8 +310,51 @@ else
|
|||||||
print_failure "Relay failed to start for network test"
|
print_failure "Relay failed to start for network test"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# TEST 10: Multiple Startup Attempts (Port Conflict)
|
# TEST 10: Port Override with Admin/Relay Key Overrides
|
||||||
print_test_header "Test 10: Port Conflict Handling"
|
print_test_header "Test 10: Port Override with -a/-r Flags"
|
||||||
|
|
||||||
|
cleanup_test_files
|
||||||
|
|
||||||
|
# Generate test keys (64 hex chars each)
|
||||||
|
TEST_ADMIN_PUBKEY="1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"
|
||||||
|
TEST_RELAY_PRIVKEY="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
|
||||||
|
|
||||||
|
print_info "Testing port override with -p 9999 -a $TEST_ADMIN_PUBKEY -r $TEST_RELAY_PRIVKEY"
|
||||||
|
|
||||||
|
# Start relay with port override and key overrides
|
||||||
|
timeout 15 $RELAY_BINARY -p 9999 -a $TEST_ADMIN_PUBKEY -r $TEST_RELAY_PRIVKEY > "test_port_override.log" 2>&1 &
|
||||||
|
relay_pid=$!
|
||||||
|
sleep 5
|
||||||
|
|
||||||
|
if kill -0 $relay_pid 2>/dev/null; then
|
||||||
|
# Check if relay bound to port 9999 (not default 8888)
|
||||||
|
if netstat -tln 2>/dev/null | grep -q ":9999"; then
|
||||||
|
print_success "Relay successfully bound to overridden port 9999"
|
||||||
|
else
|
||||||
|
print_failure "Relay not bound to overridden port 9999"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check that relay started successfully
|
||||||
|
if check_relay_startup "test_port_override.log"; then
|
||||||
|
print_success "Relay startup completed with overrides"
|
||||||
|
else
|
||||||
|
print_failure "Relay failed to complete startup with overrides"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check that admin keys were NOT generated (since -a was provided)
|
||||||
|
if ! check_admin_keys "test_port_override.log"; then
|
||||||
|
print_success "Admin keys not generated (correctly using provided -a key)"
|
||||||
|
else
|
||||||
|
print_failure "Admin keys generated despite -a override"
|
||||||
|
fi
|
||||||
|
|
||||||
|
stop_relay_test $relay_pid
|
||||||
|
else
|
||||||
|
print_failure "Relay failed to start with port/key overrides"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# TEST 11: Multiple Startup Attempts (Port Conflict)
|
||||||
|
print_test_header "Test 11: Port Conflict Handling"
|
||||||
|
|
||||||
relay_pid1=$(start_relay_test "port_conflict_1" 10)
|
relay_pid1=$(start_relay_test "port_conflict_1" 10)
|
||||||
sleep 2
|
sleep 2
|
||||||
|
|||||||
@@ -166,6 +166,81 @@ add_to_blacklist() {
|
|||||||
sleep 3
|
sleep 3
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Send admin command to add user to whitelist
|
||||||
|
add_to_whitelist() {
|
||||||
|
local pubkey="$1"
|
||||||
|
log_info "Adding pubkey to whitelist: ${pubkey:0:16}..."
|
||||||
|
|
||||||
|
# Create the admin command
|
||||||
|
COMMAND="[\"whitelist\", \"pubkey\", \"$pubkey\"]"
|
||||||
|
|
||||||
|
# Encrypt the command using NIP-44
|
||||||
|
ENCRYPTED_COMMAND=$(nak encrypt "$COMMAND" \
|
||||||
|
--sec "$ADMIN_PRIVKEY" \
|
||||||
|
--recipient-pubkey "$RELAY_PUBKEY")
|
||||||
|
|
||||||
|
if [ -z "$ENCRYPTED_COMMAND" ]; then
|
||||||
|
log_error "Failed to encrypt admin command"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create admin event
|
||||||
|
ADMIN_EVENT=$(nak event \
|
||||||
|
--kind 23456 \
|
||||||
|
--content "$ENCRYPTED_COMMAND" \
|
||||||
|
--sec "$ADMIN_PRIVKEY" \
|
||||||
|
--tag "p=$RELAY_PUBKEY")
|
||||||
|
|
||||||
|
# Post admin event
|
||||||
|
ADMIN_RESULT=$(echo "$ADMIN_EVENT" | nak event "$RELAY_URL")
|
||||||
|
|
||||||
|
if echo "$ADMIN_RESULT" | grep -q "error\|failed\|denied"; then
|
||||||
|
log_error "Failed to send admin command: $ADMIN_RESULT"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_success "Admin command sent successfully - user added to whitelist"
|
||||||
|
# Wait for the relay to process the admin command
|
||||||
|
sleep 3
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clear all auth rules
|
||||||
|
clear_auth_rules() {
|
||||||
|
log_info "Clearing all auth rules..."
|
||||||
|
|
||||||
|
# Create the admin command
|
||||||
|
COMMAND="[\"system_command\", \"clear_all_auth_rules\"]"
|
||||||
|
|
||||||
|
# Encrypt the command using NIP-44
|
||||||
|
ENCRYPTED_COMMAND=$(nak encrypt "$COMMAND" \
|
||||||
|
--sec "$ADMIN_PRIVKEY" \
|
||||||
|
--recipient-pubkey "$RELAY_PUBKEY")
|
||||||
|
|
||||||
|
if [ -z "$ENCRYPTED_COMMAND" ]; then
|
||||||
|
log_error "Failed to encrypt admin command"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create admin event
|
||||||
|
ADMIN_EVENT=$(nak event \
|
||||||
|
--kind 23456 \
|
||||||
|
--content "$ENCRYPTED_COMMAND" \
|
||||||
|
--sec "$ADMIN_PRIVKEY" \
|
||||||
|
--tag "p=$RELAY_PUBKEY")
|
||||||
|
|
||||||
|
# Post admin event
|
||||||
|
ADMIN_RESULT=$(echo "$ADMIN_EVENT" | nak event "$RELAY_URL")
|
||||||
|
|
||||||
|
if echo "$ADMIN_RESULT" | grep -q "error\|failed\|denied"; then
|
||||||
|
log_error "Failed to send admin command: $ADMIN_RESULT"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_success "Admin command sent successfully - all auth rules cleared"
|
||||||
|
# Wait for the relay to process the admin command
|
||||||
|
sleep 3
|
||||||
|
}
|
||||||
|
|
||||||
# Test 2: Try to post after blacklisting
|
# Test 2: Try to post after blacklisting
|
||||||
test_blacklist_post() {
|
test_blacklist_post() {
|
||||||
log_info "=== TEST 2: Attempt to post event after blacklisting ==="
|
log_info "=== TEST 2: Attempt to post event after blacklisting ==="
|
||||||
@@ -199,6 +274,92 @@ test_blacklist_post() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Test 3: Test whitelist functionality
|
||||||
|
test_whitelist_functionality() {
|
||||||
|
log_info "=== TEST 3: Test whitelist functionality ==="
|
||||||
|
|
||||||
|
# Generate a second test keypair for whitelist testing
|
||||||
|
log_info "Generating second test keypair for whitelist testing..."
|
||||||
|
WHITELIST_PRIVKEY=$(nak key generate 2>/dev/null)
|
||||||
|
WHITELIST_PUBKEY=$(nak key public "$WHITELIST_PRIVKEY" 2>/dev/null)
|
||||||
|
|
||||||
|
if [ -z "$WHITELIST_PUBKEY" ]; then
|
||||||
|
log_error "Failed to generate whitelist test keypair"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_success "Generated whitelist test keypair: ${WHITELIST_PUBKEY:0:16}..."
|
||||||
|
|
||||||
|
# Clear all auth rules first
|
||||||
|
if ! clear_auth_rules; then
|
||||||
|
log_error "Failed to clear auth rules for whitelist test"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add the whitelist user to whitelist
|
||||||
|
if ! add_to_whitelist "$WHITELIST_PUBKEY"; then
|
||||||
|
log_error "Failed to add whitelist user"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 3a: Original test user should be blocked (not whitelisted)
|
||||||
|
log_info "Testing that non-whitelisted user is blocked..."
|
||||||
|
local timestamp=$(date +%s)
|
||||||
|
local content="Non-whitelisted test event at timestamp $timestamp"
|
||||||
|
|
||||||
|
NON_WHITELIST_EVENT=$(nak event \
|
||||||
|
--kind 1 \
|
||||||
|
--content "$content" \
|
||||||
|
--sec "$TEST_PRIVKEY" \
|
||||||
|
--tag 't=whitelist-test')
|
||||||
|
|
||||||
|
POST_RESULT=$(echo "$NON_WHITELIST_EVENT" | nak event "$RELAY_URL" 2>&1)
|
||||||
|
|
||||||
|
if echo "$POST_RESULT" | grep -q "error\|failed\|denied\|blocked"; then
|
||||||
|
log_success "Non-whitelisted user correctly blocked"
|
||||||
|
else
|
||||||
|
log_error "Non-whitelisted user was not blocked - whitelist may not be working"
|
||||||
|
log_error "Post result: $POST_RESULT"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test 3b: Whitelisted user should be allowed
|
||||||
|
log_info "Testing that whitelisted user can post..."
|
||||||
|
content="Whitelisted test event at timestamp $timestamp"
|
||||||
|
|
||||||
|
WHITELIST_EVENT=$(nak event \
|
||||||
|
--kind 1 \
|
||||||
|
--content "$content" \
|
||||||
|
--sec "$WHITELIST_PRIVKEY" \
|
||||||
|
--tag 't=whitelist-test')
|
||||||
|
|
||||||
|
POST_RESULT=$(echo "$WHITELIST_EVENT" | nak event "$RELAY_URL" 2>&1)
|
||||||
|
|
||||||
|
if echo "$POST_RESULT" | grep -q "error\|failed\|denied\|blocked"; then
|
||||||
|
log_error "Whitelisted user was blocked - whitelist not working correctly"
|
||||||
|
log_error "Post result: $POST_RESULT"
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
log_success "Whitelisted user can post successfully"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify the whitelisted event can be retrieved
|
||||||
|
WHITELIST_EVENT_ID=$(echo "$WHITELIST_EVENT" | jq -r '.id')
|
||||||
|
sleep 2
|
||||||
|
|
||||||
|
RETRIEVE_RESULT=$(nak req \
|
||||||
|
--id "$WHITELIST_EVENT_ID" \
|
||||||
|
"$RELAY_URL")
|
||||||
|
|
||||||
|
if echo "$RETRIEVE_RESULT" | grep -q "$WHITELIST_EVENT_ID"; then
|
||||||
|
log_success "Whitelisted event successfully retrieved"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
log_error "Failed to retrieve whitelisted event"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# Main test function
|
# Main test function
|
||||||
main() {
|
main() {
|
||||||
log_info "Starting C-Relay Whitelist/Blacklist Test"
|
log_info "Starting C-Relay Whitelist/Blacklist Test"
|
||||||
@@ -237,6 +398,14 @@ main() {
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Test 3: Test whitelist functionality
|
||||||
|
if test_whitelist_functionality; then
|
||||||
|
log_success "TEST 3 PASSED: Whitelist functionality works correctly"
|
||||||
|
else
|
||||||
|
log_error "TEST 3 FAILED: Whitelist functionality not working"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
log_success "All tests passed! Whitelist/blacklist functionality is working correctly."
|
log_success "All tests passed! Whitelist/blacklist functionality is working correctly."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user