4 Commits

37 changed files with 118799 additions and 1647 deletions

2
.gitignore vendored
View File

@@ -3,4 +3,4 @@ logs/
nostr_core_lib/
blobs/
c-relay/
text_graph/

131
Dockerfile.alpine-musl Normal file
View File

@@ -0,0 +1,131 @@
# Alpine-based MUSL static binary builder for Ginxsom
# Produces truly portable binaries with zero runtime dependencies
ARG DEBUG_BUILD=false
FROM alpine:3.19 AS builder
# Re-declare build argument in this stage
ARG DEBUG_BUILD=false
# Install build dependencies
RUN apk add --no-cache \
build-base \
musl-dev \
git \
cmake \
pkgconfig \
autoconf \
automake \
libtool \
openssl-dev \
openssl-libs-static \
zlib-dev \
zlib-static \
curl-dev \
curl-static \
sqlite-dev \
sqlite-static \
fcgi-dev \
fcgi \
linux-headers \
wget \
bash \
nghttp2-dev \
nghttp2-static \
c-ares-dev \
c-ares-static \
libidn2-dev \
libidn2-static \
libunistring-dev \
libunistring-static \
libpsl-dev \
libpsl-static \
brotli-dev \
brotli-static
# Set working directory
WORKDIR /build
# Build libsecp256k1 static (cached layer - only rebuilds if Alpine version changes)
RUN cd /tmp && \
git clone https://github.com/bitcoin-core/secp256k1.git && \
cd secp256k1 && \
./autogen.sh && \
./configure --enable-static --disable-shared --prefix=/usr \
CFLAGS="-fPIC" && \
make -j$(nproc) && \
make install && \
rm -rf /tmp/secp256k1
# Copy only submodule configuration and git directory
COPY .gitmodules /build/.gitmodules
COPY .git /build/.git
# Initialize submodules (cached unless .gitmodules changes)
RUN git submodule update --init --recursive
# Copy nostr_core_lib source files (cached unless nostr_core_lib changes)
COPY nostr_core_lib /build/nostr_core_lib/
# Build nostr_core_lib with required NIPs (cached unless nostr_core_lib changes)
# Disable fortification in build.sh to prevent __*_chk symbol issues
# NIPs: 001(Basic), 006(Keys), 013(PoW), 017(DMs), 019(Bech32), 042(Auth), 044(Encryption), 059(Gift Wrap)
RUN cd nostr_core_lib && \
chmod +x build.sh && \
sed -i 's/CFLAGS="-Wall -Wextra -std=c99 -fPIC -O2"/CFLAGS="-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -Wall -Wextra -std=c99 -fPIC -O2"/' build.sh && \
rm -f *.o *.a 2>/dev/null || true && \
./build.sh --nips=1,6,13,17,19,42,44,59
# Copy web interface files for embedding
COPY api/ /build/api/
COPY scripts/embed_web_files.sh /build/scripts/
# Create src directory and embed web files into C headers
RUN mkdir -p src && \
chmod +x scripts/embed_web_files.sh && \
./scripts/embed_web_files.sh
# Copy Ginxsom source files LAST (only this layer rebuilds on source changes)
COPY src/ /build/src/
COPY include/ /build/include/
# Build Ginxsom with full static linking (only rebuilds when src/ changes)
# Disable fortification to avoid __*_chk symbols that don't exist in MUSL
# Use conditional compilation flags based on DEBUG_BUILD argument
RUN if [ "$DEBUG_BUILD" = "true" ]; then \
CFLAGS="-g -O0 -DDEBUG"; \
STRIP_CMD=""; \
echo "Building with DEBUG symbols enabled"; \
else \
CFLAGS="-O2"; \
STRIP_CMD="strip /build/ginxsom-fcgi_static"; \
echo "Building optimized production binary"; \
fi && \
gcc -static $CFLAGS -Wall -Wextra -std=gnu99 \
-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 \
-I. -Iinclude -Inostr_core_lib -Inostr_core_lib/nostr_core \
-Inostr_core_lib/cjson -Inostr_core_lib/nostr_websocket \
src/main.c src/admin_api.c src/admin_auth.c src/admin_event.c \
src/admin_handlers.c src/admin_interface.c src/admin_commands.c \
src/bud04.c src/bud06.c src/bud08.c src/bud09.c \
src/request_validator.c src/relay_client.c \
nostr_core_lib/nostr_core/core_relay_pool.c \
-o /build/ginxsom-fcgi_static \
nostr_core_lib/libnostr_core_x64.a \
-lfcgi -lsqlite3 -lsecp256k1 -lssl -lcrypto -lcurl \
-lnghttp2 -lcares -lidn2 -lunistring -lpsl -lbrotlidec -lbrotlicommon \
-lz -lpthread -lm -ldl && \
eval "$STRIP_CMD"
# Verify it's truly static
RUN echo "=== Binary Information ===" && \
file /build/ginxsom-fcgi_static && \
ls -lh /build/ginxsom-fcgi_static && \
echo "=== Checking for dynamic dependencies ===" && \
(ldd /build/ginxsom-fcgi_static 2>&1 || echo "Binary is static") && \
echo "=== Build complete ==="
# Output stage - just the binary
FROM scratch AS output
COPY --from=builder /build/ginxsom-fcgi_static /ginxsom-fcgi_static

View File

@@ -8,15 +8,24 @@ BUILDDIR = build
TARGET = $(BUILDDIR)/ginxsom-fcgi
# Source files
SOURCES = $(SRCDIR)/main.c $(SRCDIR)/admin_api.c $(SRCDIR)/admin_auth.c $(SRCDIR)/admin_event.c $(SRCDIR)/admin_handlers.c $(SRCDIR)/bud04.c $(SRCDIR)/bud06.c $(SRCDIR)/bud08.c $(SRCDIR)/bud09.c $(SRCDIR)/request_validator.c $(SRCDIR)/relay_client.c $(SRCDIR)/admin_commands.c
SOURCES = $(SRCDIR)/main.c $(SRCDIR)/admin_api.c $(SRCDIR)/admin_auth.c $(SRCDIR)/admin_event.c $(SRCDIR)/admin_handlers.c $(SRCDIR)/admin_interface.c $(SRCDIR)/bud04.c $(SRCDIR)/bud06.c $(SRCDIR)/bud08.c $(SRCDIR)/bud09.c $(SRCDIR)/request_validator.c $(SRCDIR)/relay_client.c $(SRCDIR)/admin_commands.c
OBJECTS = $(SOURCES:$(SRCDIR)/%.c=$(BUILDDIR)/%.o)
# Embedded web interface files
EMBEDDED_HEADER = $(SRCDIR)/admin_interface_embedded.h
EMBED_SCRIPT = scripts/embed_web_files.sh
# Add core_relay_pool.c from nostr_core_lib
POOL_SRC = nostr_core_lib/nostr_core/core_relay_pool.c
POOL_OBJ = $(BUILDDIR)/core_relay_pool.o
# Default target
all: $(TARGET)
all: $(EMBEDDED_HEADER) $(TARGET)
# Generate embedded web interface files
$(EMBEDDED_HEADER): $(EMBED_SCRIPT) api/*.html api/*.css api/*.js
@echo "Embedding web interface files..."
@$(EMBED_SCRIPT)
# Create build directory
$(BUILDDIR):
@@ -34,9 +43,18 @@ $(POOL_OBJ): $(POOL_SRC) | $(BUILDDIR)
$(TARGET): $(OBJECTS) $(POOL_OBJ)
$(CC) $(OBJECTS) $(POOL_OBJ) $(LIBS) -o $@
# Clean build files
# Clean build files (preserves static binaries)
clean:
rm -rf $(BUILDDIR)
rm -f $(EMBEDDED_HEADER)
@echo "Note: Static binaries (ginxsom-fcgi_static_*) are preserved."
@echo "To remove everything: make clean-all"
# Clean everything including static binaries
clean-all:
rm -rf $(BUILDDIR)
rm -f $(EMBEDDED_HEADER)
@echo "✓ All build artifacts removed"
# Install (copy to system location)
install: $(TARGET)
@@ -55,4 +73,16 @@ run: $(TARGET)
debug: CFLAGS += -g -DDEBUG
debug: $(TARGET)
.PHONY: all clean install uninstall run debug
# Rebuild embedded files
embed:
@$(EMBED_SCRIPT)
# Static MUSL build via Docker
static:
./build_static.sh
# Static MUSL build with debug symbols
static-debug:
./build_static.sh --debug
.PHONY: all clean clean-all install uninstall run debug embed static static-debug

58
api/embedded.html Normal file
View File

@@ -0,0 +1,58 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Embedded NOSTR_LOGIN_LITE</title>
<style>
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
margin: 0;
padding: 40px;
background: white;
display: flex;
justify-content: center;
align-items: center;
min-height: 100vh;
}
.container {
max-width: 400px;
width: 100%;
}
#login-container {
/* No styling - let embedded modal blend seamlessly */
}
</style>
</head>
<body>
<div class="container">
<div id="login-container"></div>
</div>
<script src="../lite/nostr.bundle.js"></script>
<script src="../lite/nostr-lite.js"></script>
<script>
document.addEventListener('DOMContentLoaded', async () => {
await window.NOSTR_LOGIN_LITE.init({
theme:'default',
methods: {
extension: true,
local: true,
seedphrase: true,
readonly: true,
connect: true,
remote: true,
otp: true
}
});
window.NOSTR_LOGIN_LITE.embed('#login-container', {
seamless: true
});
});
</script>
</body>
</html>

1310
api/index.css Normal file

File diff suppressed because it is too large Load Diff

425
api/index.html Normal file
View File

@@ -0,0 +1,425 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Blossom Admin</title>
<link rel="stylesheet" href="/api/index.css">
</head>
<body>
<!-- Side Navigation Menu -->
<nav class="side-nav" id="side-nav">
<ul class="nav-menu">
<li><button class="nav-item" data-page="statistics">Statistics</button></li>
<li><button class="nav-item" data-page="configuration">Configuration</button></li>
<li><button class="nav-item" data-page="authorization">Authorization</button></li>
<li><button class="nav-item" data-page="relay-events">Blossom Events</button></li>
<li><button class="nav-item" data-page="database">Database Query</button></li>
</ul>
<div class="nav-footer">
<button class="nav-footer-btn" id="nav-dark-mode-btn">DARK MODE</button>
<button class="nav-footer-btn" id="nav-logout-btn">LOGOUT</button>
</div>
</nav>
<!-- Side Navigation Overlay -->
<div class="side-nav-overlay" id="side-nav-overlay"></div>
<!-- Header with title and profile display -->
<div class="section">
<div class="header-content">
<div class="header-title clickable" id="header-title">
<span class="relay-letter" data-letter="B">B</span>
<span class="relay-letter" data-letter="L">L</span>
<span class="relay-letter" data-letter="O">O</span>
<span class="relay-letter" data-letter="S">S</span>
<span class="relay-letter" data-letter="S">S</span>
<span class="relay-letter" data-letter="O">O</span>
<span class="relay-letter" data-letter="M">M</span>
</div>
<div class="relay-info">
<div id="relay-name" class="relay-name">Blossom</div>
<div id="relay-description" class="relay-description">Loading...</div>
<div id="relay-pubkey-container" class="relay-pubkey-container">
<div id="relay-pubkey" class="relay-pubkey">Loading...</div>
</div>
</div>
<div class="profile-area" id="profile-area" style="display: none;">
<div class="admin-label">admin</div>
<div class="profile-container">
<img id="header-user-image" class="header-user-image" alt="Profile" style="display: none;">
<span id="header-user-name" class="header-user-name">Loading...</span>
</div>
<!-- Logout dropdown -->
<!-- Dropdown menu removed - buttons moved to sidebar -->
</div>
</div>
</div>
<!-- Login Modal Overlay -->
<div id="login-modal" class="login-modal-overlay" style="display: none;">
<div class="login-modal-content">
<div id="login-modal-container"></div>
</div>
</div>
<!-- DATABASE STATISTICS Section -->
<!-- Subscribe to kind 24567 events to receive real-time monitoring data -->
<div class="section flex-section" id="databaseStatisticsSection" style="display: none;">
<div class="section-header">
DATABASE STATISTICS
</div>
<!-- Blob Rate Graph Container -->
<div id="event-rate-chart"></div>
<!-- Database Overview Table -->
<div class="input-group">
<div class="config-table-container">
<table class="config-table" id="stats-overview-table">
<thead>
<tr>
<th>Metric</th>
<th>Value</th>
</tr>
</thead>
<tbody id="stats-overview-table-body">
<tr>
<td>Database Size</td>
<td id="db-size">-</td>
</tr>
<tr>
<td>Total Blobs</td>
<td id="total-events">-</td>
</tr>
<tr>
<td>Total Size</td>
<td id="total-size">-</td>
</tr>
<tr>
<td>Process ID</td>
<td id="process-id">-</td>
</tr>
<tr>
<td>Memory Usage</td>
<td id="memory-usage">-</td>
</tr>
<tr>
<td>CPU Core</td>
<td id="cpu-core">-</td>
</tr>
<tr>
<td>CPU Usage</td>
<td id="cpu-usage">-</td>
</tr>
<tr>
<td>Oldest Blob</td>
<td id="oldest-event">-</td>
</tr>
<tr>
<td>Newest Blob</td>
<td id="newest-event">-</td>
</tr>
</tbody>
</table>
</div>
</div>
<!-- Blob Type Distribution Table -->
<div class="input-group">
<label>Blob Type Distribution:</label>
<div class="config-table-container">
<table class="config-table" id="stats-kinds-table">
<thead>
<tr>
<th>Blob Type</th>
<th>Count</th>
<th>Percentage</th>
</tr>
</thead>
<tbody id="stats-kinds-table-body">
<tr>
<td colspan="3" style="text-align: center; font-style: italic;">No data loaded</td>
</tr>
</tbody>
</table>
</div>
</div>
<!-- Time-based Statistics Table -->
<div class="input-group">
<label>Time-based Statistics:</label>
<div class="config-table-container">
<table class="config-table" id="stats-time-table">
<thead>
<tr>
<th>Period</th>
<th>Blobs</th>
</tr>
</thead>
<tbody id="stats-time-table-body">
<tr>
<td>Last 24 Hours</td>
<td id="events-24h">-</td>
</tr>
<tr>
<td>Last 7 Days</td>
<td id="events-7d">-</td>
</tr>
<tr>
<td>Last 30 Days</td>
<td id="events-30d">-</td>
</tr>
</tbody>
</table>
</div>
</div>
<!-- Top Pubkeys Table -->
<div class="input-group">
<label>Top Pubkeys by Event Count:</label>
<div class="config-table-container">
<table class="config-table" id="stats-pubkeys-table">
<thead>
<tr>
<th>Rank</th>
<th>Pubkey</th>
<th>Blob Count</th>
<th>Total Size</th>
<th>Percentage</th>
</tr>
</thead>
<tbody id="stats-pubkeys-table-body">
<tr>
<td colspan="4" style="text-align: center; font-style: italic;">No data loaded</td>
</tr>
</tbody>
</table>
</div>
</div>
</div>
<!-- Testing Section -->
<div id="div_config" class="section flex-section" style="display: none;">
<div class="section-header">
BLOSSOM CONFIGURATION
</div>
<div id="config-display" class="hidden">
<div class="config-table-container">
<table class="config-table" id="config-table">
<thead>
<tr>
<th>Parameter</th>
<th>Value</th>
<th>Actions</th>
</tr>
</thead>
<tbody id="config-table-body">
</tbody>
</table>
</div>
<div class="inline-buttons">
<button type="button" id="fetch-config-btn">REFRESH</button>
</div>
</div>
</div>
<!-- Auth Rules Management - Moved after configuration -->
<div class="section flex-section" id="authRulesSection" style="display: none;">
<div class="section-header">
AUTH RULES MANAGEMENT
</div>
<!-- Auth Rules Table -->
<div id="authRulesTableContainer" style="display: none;">
<table class="config-table" id="authRulesTable">
<thead>
<tr>
<th>Rule Type</th>
<th>Pattern Type</th>
<th>Pattern Value</th>
<th>Status</th>
<th>Actions</th>
</tr>
</thead>
<tbody id="authRulesTableBody">
</tbody>
</table>
</div>
<!-- Simplified Auth Rule Input Section -->
<div id="authRuleInputSections" style="display: block;">
<!-- Combined Pubkey Auth Rule Section -->
<div class="input-group">
<label for="authRulePubkey">Pubkey (nsec or hex):</label>
<input type="text" id="authRulePubkey" placeholder="nsec1... or 64-character hex pubkey">
</div>
<div id="whitelistWarning" class="warning-box" style="display: none;">
<strong>⚠️ WARNING:</strong> Adding whitelist rules changes relay behavior to whitelist-only
mode.
Only whitelisted users will be able to interact with the relay.
</div>
<div class="inline-buttons">
<button type="button" id="addWhitelistBtn" onclick="addWhitelistRule()">ADD TO
WHITELIST</button>
<button type="button" id="addBlacklistBtn" onclick="addBlacklistRule()">ADD TO
BLACKLIST</button>
<button type="button" id="refreshAuthRulesBtn">REFRESH</button>
</div>
</div>
</div>
<!-- BLOSSOM EVENTS Section -->
<div class="section" id="relayEventsSection" style="display: none;">
<div class="section-header">
BLOSSOM EVENTS MANAGEMENT
</div>
<!-- Kind 0: User Metadata -->
<div class="input-group">
<h3>Kind 0: User Metadata</h3>
<div class="form-group">
<label for="kind0-name">Name:</label>
<input type="text" id="kind0-name" placeholder="Blossom Server Name">
</div>
<div class="form-group">
<label for="kind0-about">About:</label>
<textarea id="kind0-about" rows="3" placeholder="Blossom Server Description"></textarea>
</div>
<div class="form-group">
<label for="kind0-picture">Picture URL:</label>
<input type="url" id="kind0-picture" placeholder="https://example.com/logo.png">
</div>
<div class="form-group">
<label for="kind0-banner">Banner URL:</label>
<input type="url" id="kind0-banner" placeholder="https://example.com/banner.png">
</div>
<div class="form-group">
<label for="kind0-nip05">NIP-05:</label>
<input type="text" id="kind0-nip05" placeholder="blossom@example.com">
</div>
<div class="form-group">
<label for="kind0-website">Website:</label>
<input type="url" id="kind0-website" placeholder="https://example.com">
</div>
<div class="inline-buttons">
<button type="button" id="submit-kind0-btn">UPDATE METADATA</button>
</div>
<div id="kind0-status" class="status-message"></div>
</div>
<!-- Kind 10050: DM Blossom List -->
<div class="input-group">
<h3>Kind 10050: DM Blossom List</h3>
<div class="form-group">
<label for="kind10050-relays">Blossom URLs (one per line):</label>
<textarea id="kind10050-relays" rows="4" placeholder="https://blossom1.com&#10;https://blossom2.com"></textarea>
</div>
<div class="inline-buttons">
<button type="button" id="submit-kind10050-btn">UPDATE DM BLOSSOM SERVERS</button>
</div>
<div id="kind10050-status" class="status-message"></div>
</div>
<!-- Kind 10002: Blossom List -->
<div class="input-group">
<h3>Kind 10002: Blossom Server List</h3>
<div id="kind10002-relay-entries">
<!-- Dynamic blossom server entries will be added here -->
</div>
<div class="inline-buttons">
<button type="button" id="add-relay-entry-btn">ADD SERVER</button>
<button type="button" id="submit-kind10002-btn">UPDATE SERVERS</button>
</div>
<div id="kind10002-status" class="status-message"></div>
</div>
</div>
<!-- SQL QUERY Section -->
<div class="section" id="sqlQuerySection" style="display: none;">
<div class="section-header">
<h2>SQL QUERY CONSOLE</h2>
</div>
<!-- Query Selector -->
<div class="input-group">
<label for="query-dropdown">Quick Queries & History:</label>
<select id="query-dropdown" onchange="loadSelectedQuery()">
<option value="">-- Select a query --</option>
<optgroup label="Common Queries">
<option value="recent_events">Recent Events</option>
<option value="event_stats">Event Statistics</option>
<option value="subscriptions">Active Subscriptions</option>
<option value="top_pubkeys">Top Pubkeys</option>
<option value="event_kinds">Event Kinds Distribution</option>
<option value="time_stats">Time-based Statistics</option>
</optgroup>
<optgroup label="Query History" id="history-group">
<!-- Dynamically populated from localStorage -->
</optgroup>
</select>
</div>
<!-- Query Editor -->
<div class="input-group">
<label for="sql-input">SQL Query:</label>
<textarea id="sql-input" rows="5" placeholder="SELECT * FROM events LIMIT 10"></textarea>
</div>
<!-- Query Actions -->
<div class="input-group">
<div class="inline-buttons">
<button type="button" id="execute-sql-btn">EXECUTE QUERY</button>
<button type="button" id="clear-sql-btn">CLEAR</button>
<button type="button" id="clear-history-btn">CLEAR HISTORY</button>
</div>
</div>
<!-- Query Results -->
<div class="input-group">
<label>Query Results:</label>
<div id="query-info" class="info-box"></div>
<div id="query-table" class="config-table-container"></div>
</div>
</div>
<!-- Load the official nostr-tools bundle first -->
<!-- <script src="https://laantungir.net/nostr-login-lite/nostr.bundle.js"></script> -->
<script src="/api/nostr.bundle.js"></script>
<!-- Load NOSTR_LOGIN_LITE main library -->
<!-- <script src="https://laantungir.net/nostr-login-lite/nostr-lite.js"></script> -->
<script src="/api/nostr-lite.js"></script>
<!-- Load text_graph library -->
<script src="/api/text_graph.js"></script>
<script src="/api/index.js"></script>
</body>
</html>

5832
api/index.js Normal file

File diff suppressed because it is too large Load Diff

4282
api/nostr-lite.js Normal file

File diff suppressed because it is too large Load Diff

11534
api/nostr.bundle.js Normal file

File diff suppressed because it is too large Load Diff

463
api/text_graph.js Normal file
View File

@@ -0,0 +1,463 @@
/**
* ASCIIBarChart - A dynamic ASCII-based vertical bar chart renderer
*
* Creates real-time animated bar charts using monospaced characters (X)
* with automatic scaling, labels, and responsive font sizing.
*/
class ASCIIBarChart {
/**
* Create a new ASCII bar chart
* @param {string} containerId - The ID of the HTML element to render the chart in
* @param {Object} options - Configuration options
* @param {number} [options.maxHeight=20] - Maximum height of the chart in rows
* @param {number} [options.maxDataPoints=30] - Maximum number of data columns before scrolling
* @param {string} [options.title=''] - Chart title (displayed centered at top)
* @param {string} [options.xAxisLabel=''] - X-axis label (displayed centered at bottom)
* @param {string} [options.yAxisLabel=''] - Y-axis label (displayed vertically on left)
* @param {boolean} [options.autoFitWidth=true] - Automatically adjust font size to fit container width
* @param {boolean} [options.useBinMode=false] - Enable time bin mode for data aggregation
* @param {number} [options.binDuration=10000] - Duration of each time bin in milliseconds (10 seconds default)
* @param {string} [options.xAxisLabelFormat='elapsed'] - X-axis label format: 'elapsed', 'bins', 'timestamps', 'ranges'
* @param {boolean} [options.debug=false] - Enable debug logging
*/
constructor(containerId, options = {}) {
this.container = document.getElementById(containerId);
this.data = [];
this.maxHeight = options.maxHeight || 20;
this.maxDataPoints = options.maxDataPoints || 30;
this.totalDataPoints = 0; // Track total number of data points added
this.title = options.title || '';
this.xAxisLabel = options.xAxisLabel || '';
this.yAxisLabel = options.yAxisLabel || '';
this.autoFitWidth = options.autoFitWidth !== false; // Default to true
this.debug = options.debug || false; // Debug logging option
// Time bin configuration
this.useBinMode = options.useBinMode !== false; // Default to true
this.binDuration = options.binDuration || 4000; // 4 seconds default
this.xAxisLabelFormat = options.xAxisLabelFormat || 'elapsed';
// Time bin data structures
this.bins = [];
this.currentBinIndex = -1;
this.binStartTime = null;
this.binCheckInterval = null;
this.chartStartTime = Date.now();
// Set up resize observer if auto-fit is enabled
if (this.autoFitWidth) {
this.resizeObserver = new ResizeObserver(() => {
this.adjustFontSize();
});
this.resizeObserver.observe(this.container);
}
// Initialize first bin if bin mode is enabled
if (this.useBinMode) {
this.initializeBins();
}
}
/**
* Add a new data point to the chart
* @param {number} value - The numeric value to add
*/
addValue(value) {
// Time bin mode: add value to current active bin count
this.checkBinRotation(); // Ensure we have an active bin
this.bins[this.currentBinIndex].count += value; // Changed from ++ to += value
this.totalDataPoints++;
this.render();
this.updateInfo();
}
/**
* Clear all data from the chart
*/
clear() {
this.data = [];
this.totalDataPoints = 0;
if (this.useBinMode) {
this.bins = [];
this.currentBinIndex = -1;
this.binStartTime = null;
this.initializeBins();
}
this.render();
this.updateInfo();
}
/**
* Calculate the width of the chart in characters
* @returns {number} The chart width in characters
* @private
*/
getChartWidth() {
let dataLength = this.maxDataPoints; // Always use maxDataPoints for consistent width
if (dataLength === 0) return 50; // Default width for empty chart
const yAxisPadding = this.yAxisLabel ? 2 : 0;
const yAxisNumbers = 3; // Width of Y-axis numbers
const separator = 1; // The '|' character
// const dataWidth = dataLength * 2; // Each column is 2 characters wide // TEMP: commented for no-space test
const dataWidth = dataLength; // Each column is 1 character wide // TEMP: adjusted for no-space columns
const padding = 1; // Extra padding
const totalWidth = yAxisPadding + yAxisNumbers + separator + dataWidth + padding;
// Only log when width changes
if (this.debug && this.lastChartWidth !== totalWidth) {
console.log('getChartWidth changed:', { dataLength, totalWidth, previous: this.lastChartWidth });
this.lastChartWidth = totalWidth;
}
return totalWidth;
}
/**
* Adjust font size to fit container width
* @private
*/
adjustFontSize() {
if (!this.autoFitWidth) return;
const containerWidth = this.container.clientWidth;
const chartWidth = this.getChartWidth();
if (chartWidth === 0) return;
// Calculate optimal font size
// For monospace fonts, character width is approximately 0.6 * font size
// Use a slightly smaller ratio to fit more content
const charWidthRatio = 0.7;
const padding = 30; // Reduce padding to fit more content
const availableWidth = containerWidth - padding;
const optimalFontSize = Math.floor((availableWidth / chartWidth) / charWidthRatio);
// Set reasonable bounds (min 4px, max 20px)
const fontSize = Math.max(4, Math.min(20, optimalFontSize));
// Only log when font size changes
if (this.debug && this.lastFontSize !== fontSize) {
console.log('fontSize changed:', { containerWidth, chartWidth, fontSize, previous: this.lastFontSize });
this.lastFontSize = fontSize;
}
this.container.style.fontSize = fontSize + 'px';
this.container.style.lineHeight = '1.0';
}
/**
* Render the chart to the container
* @private
*/
render() {
let dataToRender = [];
let maxValue = 0;
let minValue = 0;
let valueRange = 0;
if (this.useBinMode) {
// Bin mode: render bin counts
if (this.bins.length === 0) {
this.container.textContent = 'No data yet. Click Start to begin.';
return;
}
// Always create a fixed-length array filled with 0s, then overlay actual bin data
dataToRender = new Array(this.maxDataPoints).fill(0);
// Overlay actual bin data (most recent bins, reversed for left-to-right display)
const startIndex = Math.max(0, this.bins.length - this.maxDataPoints);
const recentBins = this.bins.slice(startIndex);
// Reverse the bins so most recent is on the left, and overlay onto the fixed array
recentBins.reverse().forEach((bin, index) => {
if (index < this.maxDataPoints) {
dataToRender[index] = bin.count;
}
});
if (this.debug) {
console.log('render() dataToRender:', dataToRender, 'bins length:', this.bins.length);
}
maxValue = Math.max(...dataToRender);
minValue = Math.min(...dataToRender);
valueRange = maxValue - minValue;
} else {
// Legacy mode: render individual values
if (this.data.length === 0) {
this.container.textContent = 'No data yet. Click Start to begin.';
return;
}
dataToRender = this.data;
maxValue = Math.max(...this.data);
minValue = Math.min(...this.data);
valueRange = maxValue - minValue;
}
let output = '';
const scale = this.maxHeight;
// Calculate scaling factor: each X represents at least 1 count
const maxCount = Math.max(...dataToRender);
const scaleFactor = Math.max(1, Math.ceil(maxCount / scale)); // 1 X = scaleFactor counts
const scaledMax = Math.ceil(maxCount / scaleFactor) * scaleFactor;
// Calculate Y-axis label width (for vertical text)
const yLabelWidth = this.yAxisLabel ? 2 : 0;
const yAxisPadding = this.yAxisLabel ? ' ' : '';
// Add title if provided (centered)
if (this.title) {
// const chartWidth = 4 + this.maxDataPoints * 2; // Y-axis numbers + data columns // TEMP: commented for no-space test
const chartWidth = 4 + this.maxDataPoints; // Y-axis numbers + data columns // TEMP: adjusted for no-space columns
const titlePadding = Math.floor((chartWidth - this.title.length) / 2);
output += yAxisPadding + ' '.repeat(Math.max(0, titlePadding)) + this.title + '\n\n';
}
// Draw from top to bottom
for (let row = scale; row > 0; row--) {
let line = '';
// Add vertical Y-axis label character
if (this.yAxisLabel) {
const L = this.yAxisLabel.length;
const startRow = Math.floor((scale - L) / 2) + 1;
const relativeRow = scale - row + 1; // 1 at top, scale at bottom
if (relativeRow >= startRow && relativeRow < startRow + L) {
const labelIndex = relativeRow - startRow;
line += this.yAxisLabel[labelIndex] + ' ';
} else {
line += ' ';
}
}
// Calculate the actual count value this row represents (1 at bottom, increasing upward)
const rowCount = (row - 1) * scaleFactor + 1;
// Add Y-axis label (show actual count values)
line += String(rowCount).padStart(3, ' ') + ' |';
// Draw each column
for (let i = 0; i < dataToRender.length; i++) {
const count = dataToRender[i];
const scaledHeight = Math.ceil(count / scaleFactor);
if (scaledHeight >= row) {
// line += ' X'; // TEMP: commented out space between columns
line += 'X'; // TEMP: no space between columns
} else {
// line += ' '; // TEMP: commented out space between columns
line += ' '; // TEMP: single space for empty columns
}
}
output += line + '\n';
}
// Draw X-axis
// output += yAxisPadding + ' +' + '-'.repeat(this.maxDataPoints * 2) + '\n'; // TEMP: commented out for no-space test
output += yAxisPadding + ' +' + '-'.repeat(this.maxDataPoints) + '\n'; // TEMP: back to original length
// Draw X-axis labels based on mode and format
let xAxisLabels = yAxisPadding + ' '; // Initial padding to align with X-axis
// Determine label interval (every 5 columns)
const labelInterval = 5;
// Generate all labels first and store in array
let labels = [];
for (let i = 0; i < this.maxDataPoints; i++) {
if (i % labelInterval === 0) {
let label = '';
if (this.useBinMode) {
// For bin mode, show labels for all possible positions
// i=0 is leftmost (most recent), i=maxDataPoints-1 is rightmost (oldest)
const elapsedSec = (i * this.binDuration) / 1000;
// Format with appropriate precision for sub-second bins
if (this.binDuration < 1000) {
// Show decimal seconds for sub-second bins
label = elapsedSec.toFixed(1) + 's';
} else {
// Show whole seconds for 1+ second bins
label = String(Math.round(elapsedSec)) + 's';
}
} else {
// For legacy mode, show data point numbers
const startIndex = Math.max(1, this.totalDataPoints - this.maxDataPoints + 1);
label = String(startIndex + i);
}
labels.push(label);
}
}
// Build the label string with calculated spacing
for (let i = 0; i < labels.length; i++) {
const label = labels[i];
xAxisLabels += label;
// Add spacing: labelInterval - label.length (except for last label)
if (i < labels.length - 1) {
const spacing = labelInterval - label.length;
xAxisLabels += ' '.repeat(spacing);
}
}
// Ensure the label line extends to match the X-axis dash line length
// The dash line is this.maxDataPoints characters long, starting after " +"
const dashLineLength = this.maxDataPoints;
const minLabelLineLength = yAxisPadding.length + 4 + dashLineLength; // 4 for " "
if (xAxisLabels.length < minLabelLineLength) {
xAxisLabels += ' '.repeat(minLabelLineLength - xAxisLabels.length);
}
output += xAxisLabels + '\n';
// Add X-axis label if provided
if (this.xAxisLabel) {
// const labelPadding = Math.floor((this.maxDataPoints * 2 - this.xAxisLabel.length) / 2); // TEMP: commented for no-space test
const labelPadding = Math.floor((this.maxDataPoints - this.xAxisLabel.length) / 2); // TEMP: adjusted for no-space columns
output += '\n' + yAxisPadding + ' ' + ' '.repeat(Math.max(0, labelPadding)) + this.xAxisLabel + '\n';
}
this.container.textContent = output;
// Adjust font size to fit width (only once at initialization)
if (this.autoFitWidth) {
this.adjustFontSize();
}
// Update the external info display
if (this.useBinMode) {
const binCounts = this.bins.map(bin => bin.count);
const scaleFactor = Math.max(1, Math.ceil(maxValue / scale));
document.getElementById('values').textContent = `[${dataToRender.join(', ')}]`;
document.getElementById('max-value').textContent = maxValue;
document.getElementById('scale').textContent = `Min: ${minValue}, Max: ${maxValue}, 1X=${scaleFactor} counts`;
} else {
document.getElementById('values').textContent = `[${this.data.join(', ')}]`;
document.getElementById('max-value').textContent = maxValue;
document.getElementById('scale').textContent = `Min: ${minValue}, Max: ${maxValue}, Height: ${scale}`;
}
}
/**
* Update the info display
* @private
*/
updateInfo() {
if (this.useBinMode) {
const totalCount = this.bins.reduce((sum, bin) => sum + bin.count, 0);
document.getElementById('count').textContent = totalCount;
} else {
document.getElementById('count').textContent = this.data.length;
}
}
/**
* Initialize the bin system
* @private
*/
initializeBins() {
this.bins = [];
this.currentBinIndex = -1;
this.binStartTime = null;
this.chartStartTime = Date.now();
// Create first bin
this.rotateBin();
// Set up automatic bin rotation check
this.binCheckInterval = setInterval(() => {
this.checkBinRotation();
}, 100); // Check every 100ms for responsiveness
}
/**
* Check if current bin should rotate and create new bin if needed
* @private
*/
checkBinRotation() {
if (!this.useBinMode || !this.binStartTime) return;
const now = Date.now();
if ((now - this.binStartTime) >= this.binDuration) {
this.rotateBin();
}
}
/**
* Rotate to a new bin, finalizing the current one
*/
rotateBin() {
// Finalize current bin if it exists
if (this.currentBinIndex >= 0) {
this.bins[this.currentBinIndex].isActive = false;
}
// Create new bin
const newBin = {
startTime: Date.now(),
count: 0,
isActive: true
};
this.bins.push(newBin);
this.currentBinIndex = this.bins.length - 1;
this.binStartTime = newBin.startTime;
// Keep only the most recent bins
if (this.bins.length > this.maxDataPoints) {
this.bins.shift();
this.currentBinIndex--;
}
// Ensure currentBinIndex points to the last bin (the active one)
this.currentBinIndex = this.bins.length - 1;
// Force a render to update the display immediately
this.render();
this.updateInfo();
}
/**
* Format X-axis label for a bin based on the configured format
* @param {number} binIndex - Index of the bin
* @returns {string} Formatted label
* @private
*/
formatBinLabel(binIndex) {
const bin = this.bins[binIndex];
if (!bin) return ' ';
switch (this.xAxisLabelFormat) {
case 'bins':
return String(binIndex + 1).padStart(2, ' ');
case 'timestamps':
const time = new Date(bin.startTime);
return time.toLocaleTimeString('en-US', {
hour12: false,
hour: '2-digit',
minute: '2-digit',
second: '2-digit'
}).replace(/:/g, '');
case 'ranges':
const startSec = Math.floor((bin.startTime - this.chartStartTime) / 1000);
const endSec = startSec + Math.floor(this.binDuration / 1000);
return `${startSec}-${endSec}`;
case 'elapsed':
default:
// For elapsed time, always show time relative to the first bin (index 0)
// This keeps the leftmost label as 0s and increases to the right
const firstBinTime = this.bins[0] ? this.bins[0].startTime : this.chartStartTime;
const elapsedSec = Math.floor((bin.startTime - firstBinTime) / 1000);
return String(elapsedSec).padStart(2, ' ') + 's';
}
}
}

Binary file not shown.

BIN
build/admin_interface.o Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

223
build_static.sh Executable file
View File

@@ -0,0 +1,223 @@
#!/bin/bash
# Build fully static MUSL binaries for Ginxsom using Alpine Docker
# Produces truly portable binaries with zero runtime dependencies
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BUILD_DIR="$SCRIPT_DIR/build"
DOCKERFILE="$SCRIPT_DIR/Dockerfile.alpine-musl"
# Parse command line arguments
DEBUG_BUILD=false
if [[ "$1" == "--debug" ]]; then
DEBUG_BUILD=true
echo "=========================================="
echo "Ginxsom MUSL Static Binary Builder (DEBUG MODE)"
echo "=========================================="
else
echo "=========================================="
echo "Ginxsom MUSL Static Binary Builder (PRODUCTION MODE)"
echo "=========================================="
fi
echo "Project directory: $SCRIPT_DIR"
echo "Build directory: $BUILD_DIR"
echo "Debug build: $DEBUG_BUILD"
echo ""
# Create build directory
mkdir -p "$BUILD_DIR"
# Check if Docker is available
if ! command -v docker &> /dev/null; then
echo "ERROR: Docker is not installed or not in PATH"
echo ""
echo "Docker is required to build MUSL static binaries."
echo "Please install Docker:"
echo " - Ubuntu/Debian: sudo apt install docker.io"
echo " - Or visit: https://docs.docker.com/engine/install/"
echo ""
exit 1
fi
# Check if Docker daemon is running
if ! docker info &> /dev/null; then
echo "ERROR: Docker daemon is not running or user not in docker group"
echo ""
echo "Please start Docker and ensure you're in the docker group:"
echo " - sudo systemctl start docker"
echo " - sudo usermod -aG docker $USER && newgrp docker"
echo " - Or start Docker Desktop"
echo ""
exit 1
fi
DOCKER_CMD="docker"
echo "✓ Docker is available and running"
echo ""
# Detect architecture
ARCH=$(uname -m)
case "$ARCH" in
x86_64)
PLATFORM="linux/amd64"
OUTPUT_NAME="ginxsom-fcgi_static_x86_64"
;;
aarch64|arm64)
PLATFORM="linux/arm64"
OUTPUT_NAME="ginxsom-fcgi_static_arm64"
;;
*)
echo "WARNING: Unknown architecture: $ARCH"
echo "Defaulting to linux/amd64"
PLATFORM="linux/amd64"
OUTPUT_NAME="ginxsom-fcgi_static_${ARCH}"
;;
esac
echo "Building for platform: $PLATFORM"
echo "Output binary: $OUTPUT_NAME"
echo ""
# Build the Docker image
echo "=========================================="
echo "Step 1: Building Alpine Docker image"
echo "=========================================="
echo "This will:"
echo " - Use Alpine Linux (native MUSL)"
echo " - Build all dependencies statically"
echo " - Compile Ginxsom with full static linking"
echo ""
$DOCKER_CMD build \
--platform "$PLATFORM" \
--build-arg DEBUG_BUILD=$DEBUG_BUILD \
-f "$DOCKERFILE" \
-t ginxsom-musl-builder:latest \
--progress=plain \
. || {
echo ""
echo "ERROR: Docker build failed"
echo "Check the output above for details"
exit 1
}
echo ""
echo "✓ Docker image built successfully"
echo ""
# Extract the binary from the container
echo "=========================================="
echo "Step 2: Extracting static binary"
echo "=========================================="
# Build the builder stage to extract the binary
$DOCKER_CMD build \
--platform "$PLATFORM" \
--build-arg DEBUG_BUILD=$DEBUG_BUILD \
--target builder \
-f "$DOCKERFILE" \
-t ginxsom-static-builder-stage:latest \
. > /dev/null 2>&1
# Create a temporary container to copy the binary
CONTAINER_ID=$($DOCKER_CMD create ginxsom-static-builder-stage:latest)
# Copy binary from container
$DOCKER_CMD cp "$CONTAINER_ID:/build/ginxsom-fcgi_static" "$BUILD_DIR/$OUTPUT_NAME" || {
echo "ERROR: Failed to extract binary from container"
$DOCKER_CMD rm "$CONTAINER_ID" 2>/dev/null
exit 1
}
# Clean up container
$DOCKER_CMD rm "$CONTAINER_ID" > /dev/null
echo "✓ Binary extracted to: $BUILD_DIR/$OUTPUT_NAME"
echo ""
# Make binary executable
chmod +x "$BUILD_DIR/$OUTPUT_NAME"
# Verify the binary
echo "=========================================="
echo "Step 3: Verifying static binary"
echo "=========================================="
echo ""
echo "Checking for dynamic dependencies:"
if LDD_OUTPUT=$(timeout 5 ldd "$BUILD_DIR/$OUTPUT_NAME" 2>&1); then
if echo "$LDD_OUTPUT" | grep -q "not a dynamic executable"; then
echo "✓ Binary is fully static (no dynamic dependencies)"
TRULY_STATIC=true
elif echo "$LDD_OUTPUT" | grep -q "statically linked"; then
echo "✓ Binary is statically linked"
TRULY_STATIC=true
else
echo "⚠ WARNING: Binary may have dynamic dependencies:"
echo "$LDD_OUTPUT"
TRULY_STATIC=false
fi
else
# ldd failed or timed out - check with file command instead
if file "$BUILD_DIR/$OUTPUT_NAME" | grep -q "statically linked"; then
echo "✓ Binary is statically linked (verified with file command)"
TRULY_STATIC=true
else
echo "⚠ Could not verify static linking (ldd check failed)"
TRULY_STATIC=false
fi
fi
echo ""
echo "File size: $(ls -lh "$BUILD_DIR/$OUTPUT_NAME" | awk '{print $5}')"
echo ""
# Summary
echo "=========================================="
echo "Build Summary"
echo "=========================================="
echo "Binary: $BUILD_DIR/$OUTPUT_NAME"
echo "Size: $(du -h "$BUILD_DIR/$OUTPUT_NAME" | cut -f1)"
echo "Platform: $PLATFORM"
if [ "$DEBUG_BUILD" = true ]; then
echo "Build Type: DEBUG (with symbols, no optimization)"
else
echo "Build Type: PRODUCTION (optimized, stripped)"
fi
if [ "$TRULY_STATIC" = true ]; then
echo "Linkage: Fully static binary (Alpine MUSL-based)"
echo "Portability: Works on ANY Linux distribution"
else
echo "Linkage: Static binary (may have minimal dependencies)"
fi
echo ""
echo "✓ Build complete!"
echo ""
# Clean up old dynamic build artifacts
echo "=========================================="
echo "Cleaning up old build artifacts"
echo "=========================================="
echo ""
if ls build/*.o 2>/dev/null | grep -q .; then
echo "Removing old .o files from dynamic builds..."
rm -f build/*.o
echo "✓ Cleanup complete"
else
echo "No .o files to clean"
fi
# Also remove old dynamic binary if it exists
if [ -f "build/ginxsom-fcgi" ]; then
echo "Removing old dynamic binary..."
rm -f build/ginxsom-fcgi
echo "✓ Old binary removed"
fi
echo ""
echo "Deployment:"
echo " scp $BUILD_DIR/$OUTPUT_NAME user@server:/path/to/ginxsom/"
echo ""

View File

@@ -220,6 +220,35 @@ http {
fastcgi_param HTTP_AUTHORIZATION $http_authorization;
}
# Admin web interface (/admin)
location /admin {
if ($request_method !~ ^(GET)$) {
return 405;
}
fastcgi_pass fastcgi_backend;
fastcgi_param QUERY_STRING $query_string;
fastcgi_param REQUEST_METHOD $request_method;
fastcgi_param CONTENT_TYPE $content_type;
fastcgi_param CONTENT_LENGTH $content_length;
fastcgi_param SCRIPT_NAME $fastcgi_script_name;
fastcgi_param REQUEST_URI $request_uri;
fastcgi_param DOCUMENT_URI $document_uri;
fastcgi_param DOCUMENT_ROOT $document_root;
fastcgi_param SERVER_PROTOCOL $server_protocol;
fastcgi_param REQUEST_SCHEME $scheme;
fastcgi_param HTTPS $https if_not_empty;
fastcgi_param GATEWAY_INTERFACE CGI/1.1;
fastcgi_param SERVER_SOFTWARE nginx/$nginx_version;
fastcgi_param REMOTE_ADDR $remote_addr;
fastcgi_param REMOTE_PORT $remote_port;
fastcgi_param SERVER_ADDR $server_addr;
fastcgi_param SERVER_PORT $server_port;
fastcgi_param SERVER_NAME $server_name;
fastcgi_param REDIRECT_STATUS 200;
fastcgi_param SCRIPT_FILENAME $document_root/ginxsom.fcgi;
fastcgi_param HTTP_AUTHORIZATION $http_authorization;
}
# Admin API endpoints (/api/*)
location /api/ {
if ($request_method !~ ^(GET|PUT|POST)$) {
@@ -571,6 +600,35 @@ http {
fastcgi_param HTTP_AUTHORIZATION $http_authorization;
}
# Admin web interface (/admin)
location /admin {
if ($request_method !~ ^(GET)$) {
return 405;
}
fastcgi_pass fastcgi_backend;
fastcgi_param QUERY_STRING $query_string;
fastcgi_param REQUEST_METHOD $request_method;
fastcgi_param CONTENT_TYPE $content_type;
fastcgi_param CONTENT_LENGTH $content_length;
fastcgi_param SCRIPT_NAME $fastcgi_script_name;
fastcgi_param REQUEST_URI $request_uri;
fastcgi_param DOCUMENT_URI $document_uri;
fastcgi_param DOCUMENT_ROOT $document_root;
fastcgi_param SERVER_PROTOCOL $server_protocol;
fastcgi_param REQUEST_SCHEME $scheme;
fastcgi_param HTTPS $https if_not_empty;
fastcgi_param GATEWAY_INTERFACE CGI/1.1;
fastcgi_param SERVER_SOFTWARE nginx/$nginx_version;
fastcgi_param REMOTE_ADDR $remote_addr;
fastcgi_param REMOTE_PORT $remote_port;
fastcgi_param SERVER_ADDR $server_addr;
fastcgi_param SERVER_PORT $server_port;
fastcgi_param SERVER_NAME $server_name;
fastcgi_param REDIRECT_STATUS 200;
fastcgi_param SCRIPT_FILENAME $document_root/ginxsom.fcgi;
fastcgi_param HTTP_AUTHORIZATION $http_authorization;
}
# Admin API endpoints (/api/*)
location /api/ {
if ($request_method !~ ^(GET|PUT|POST)$) {

29213
debug.log

File diff suppressed because it is too large Load Diff

View File

@@ -73,8 +73,55 @@ print_success "Remote environment configured"
print_status "Copying files to remote server..."
# Copy entire project directory (excluding unnecessary files)
# Note: We include .git and .gitmodules to allow submodule initialization on remote
print_status "Copying entire ginxsom project..."
rsync -avz --exclude='.git' --exclude='build' --exclude='logs' --exclude='Trash' --exclude='blobs' --exclude='db' --no-g --no-o --no-perms --omit-dir-times . $REMOTE_USER@$REMOTE_HOST:$REMOTE_DIR/
rsync -avz --exclude='build' --exclude='logs' --exclude='Trash' --exclude='blobs' --exclude='db' --no-g --no-o --no-perms --omit-dir-times . $REMOTE_USER@$REMOTE_HOST:$REMOTE_DIR/
# Initialize git submodules on remote server
print_status "Initializing git submodules on remote server..."
ssh $REMOTE_USER@$REMOTE_HOST << 'EOF'
cd /home/ubuntu/ginxsom
# Check if .git exists
if [ ! -d .git ]; then
echo "ERROR: .git directory not found - git repository not copied"
exit 1
fi
# Check if .gitmodules exists
if [ ! -f .gitmodules ]; then
echo "ERROR: .gitmodules file not found"
exit 1
fi
echo "Initializing git submodules..."
git submodule update --init --recursive
# Verify submodule was initialized
if [ ! -f nostr_core_lib/cjson/cJSON.h ]; then
echo "ERROR: Submodule initialization failed - cJSON.h not found"
echo "Checking nostr_core_lib directory:"
ls -la nostr_core_lib/ || echo "nostr_core_lib directory not found"
exit 1
fi
echo "Submodules initialized successfully"
# Build nostr_core_lib
echo "Building nostr_core_lib..."
cd nostr_core_lib
./build.sh
if [ $? -ne 0 ]; then
echo "ERROR: Failed to build nostr_core_lib"
exit 1
fi
echo "nostr_core_lib built successfully"
EOF
if [ $? -ne 0 ]; then
print_error "Failed to initialize git submodules or build nostr_core_lib"
exit 1
fi
# Build on remote server to ensure compatibility
print_status "Building ginxsom on remote server..."

162
deploy_static.sh Executable file
View File

@@ -0,0 +1,162 @@
#!/bin/bash
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
print_status() { echo -e "${BLUE}[INFO]${NC} $1"; }
print_success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
print_warning() { echo -e "${YELLOW}[WARNING]${NC} $1"; }
print_error() { echo -e "${RED}[ERROR]${NC} $1"; }
# Configuration
REMOTE_HOST="laantungir.net"
REMOTE_USER="ubuntu"
REMOTE_DIR="/home/ubuntu/ginxsom"
REMOTE_BINARY_PATH="/home/ubuntu/ginxsom/ginxsom-fcgi_static"
REMOTE_SOCKET="/tmp/ginxsom-fcgi.sock"
REMOTE_DATA_DIR="/var/www/html/blossom"
REMOTE_DB_PATH="/home/ubuntu/ginxsom/db/ginxsom.db"
# Detect architecture
ARCH=$(uname -m)
case "$ARCH" in
x86_64)
BINARY_NAME="ginxsom-fcgi_static_x86_64"
;;
aarch64|arm64)
BINARY_NAME="ginxsom-fcgi_static_arm64"
;;
*)
print_error "Unsupported architecture: $ARCH"
exit 1
;;
esac
LOCAL_BINARY="./build/$BINARY_NAME"
print_status "Starting static binary deployment to $REMOTE_HOST..."
# Check if static binary exists
if [ ! -f "$LOCAL_BINARY" ]; then
print_error "Static binary not found: $LOCAL_BINARY"
print_status "Building static binary..."
./build_static.sh
if [ ! -f "$LOCAL_BINARY" ]; then
print_error "Build failed - binary still not found"
exit 1
fi
fi
print_success "Static binary found: $LOCAL_BINARY"
print_status "Binary size: $(du -h "$LOCAL_BINARY" | cut -f1)"
# Verify binary is static
if ldd "$LOCAL_BINARY" 2>&1 | grep -q "not a dynamic executable"; then
print_success "Binary is fully static"
elif ldd "$LOCAL_BINARY" 2>&1 | grep -q "statically linked"; then
print_success "Binary is statically linked"
else
print_warning "Binary may have dynamic dependencies"
ldd "$LOCAL_BINARY" 2>&1 || true
fi
# Setup remote environment
print_status "Setting up remote environment..."
ssh $REMOTE_USER@$REMOTE_HOST << 'EOF'
set -e
# Create directories
mkdir -p /home/ubuntu/ginxsom/db
sudo mkdir -p /var/www/html/blossom
sudo chown www-data:www-data /var/www/html/blossom
sudo chmod 755 /var/www/html/blossom
# Stop existing processes
echo "Stopping existing ginxsom processes..."
sudo pkill -f ginxsom-fcgi || true
sudo rm -f /tmp/ginxsom-fcgi.sock || true
echo "Remote environment ready"
EOF
print_success "Remote environment configured"
# Copy static binary
print_status "Copying static binary to remote server..."
scp "$LOCAL_BINARY" $REMOTE_USER@$REMOTE_HOST:$REMOTE_BINARY_PATH
print_success "Binary copied successfully"
# Set permissions and start service
print_status "Starting ginxsom FastCGI process..."
ssh $REMOTE_USER@$REMOTE_HOST << EOF
# Make binary executable
chmod +x $REMOTE_BINARY_PATH
# Clean up any existing socket
sudo rm -f $REMOTE_SOCKET
# Start FastCGI process
echo "Starting ginxsom FastCGI..."
sudo spawn-fcgi -M 666 -u www-data -g www-data -s $REMOTE_SOCKET -U www-data -G www-data -d $REMOTE_DIR -- $REMOTE_BINARY_PATH --db-path "$REMOTE_DB_PATH" --storage-dir "$REMOTE_DATA_DIR"
# Give it a moment to start
sleep 2
# Verify process is running
if pgrep -f "ginxsom-fcgi" > /dev/null; then
echo "FastCGI process started successfully"
echo "PID: \$(pgrep -f ginxsom-fcgi)"
else
echo "Process verification: socket exists"
ls -la $REMOTE_SOCKET
fi
EOF
if [ $? -eq 0 ]; then
print_success "FastCGI process started"
else
print_error "Failed to start FastCGI process"
exit 1
fi
# Reload nginx
print_status "Reloading nginx..."
ssh $REMOTE_USER@$REMOTE_HOST << 'EOF'
if sudo nginx -t; then
sudo nginx -s reload
echo "Nginx reloaded successfully"
else
echo "Nginx configuration test failed"
exit 1
fi
EOF
print_success "Nginx reloaded"
# Test deployment
print_status "Testing deployment..."
echo "Testing health endpoint..."
if curl -k -s --max-time 10 "https://blossom.laantungir.net/health" | grep -q "OK"; then
print_success "Health check passed"
else
print_warning "Health check failed - checking response..."
curl -k -v --max-time 10 "https://blossom.laantungir.net/health" 2>&1 | head -10
fi
print_success "Deployment to $REMOTE_HOST completed!"
print_status "Ginxsom should now be available at: https://blossom.laantungir.net"
print_status ""
print_status "Deployment Summary:"
echo " Binary: $BINARY_NAME"
echo " Size: $(du -h "$LOCAL_BINARY" | cut -f1)"
echo " Type: Fully static MUSL binary"
echo " Portability: Works on any Linux distribution"
echo " Deployment time: ~10 seconds (vs ~5 minutes for dynamic build)"

296
docs/STATIC_BUILD.md Normal file
View File

@@ -0,0 +1,296 @@
# Ginxsom Static MUSL Build Guide
This guide explains how to build and deploy Ginxsom as a fully static MUSL binary with zero runtime dependencies.
## Overview
Ginxsom now supports building as a static MUSL binary using Alpine Linux and Docker. This produces a truly portable binary that works on **any Linux distribution** without requiring any system libraries.
## Benefits
| Feature | Static MUSL | Dynamic glibc |
|---------|-------------|---------------|
| **Portability** | ✓ Any Linux | ✗ Requires matching libs |
| **Dependencies** | None | libfcgi, libsqlite3, etc. |
| **Deployment** | Copy one file | Build on target |
| **Binary Size** | ~7-10 MB | ~2-3 MB + libraries |
| **Deployment Time** | ~10 seconds | ~5-10 minutes |
## Prerequisites
- Docker installed and running
- Internet connection (for first build only)
- ~2GB disk space for Docker images
## Quick Start
### 1. Build Static Binary
```bash
# Build production binary (optimized, stripped)
make static
# Or build debug binary (with symbols)
make static-debug
# Or use the script directly
./build_static.sh
./build_static.sh --debug
```
The binary will be created in `build/ginxsom-fcgi_static_x86_64` (or `_arm64` for ARM systems).
### 2. Verify Binary
```bash
# Check if truly static
ldd build/ginxsom-fcgi_static_x86_64
# Should output: "not a dynamic executable"
# Check file info
file build/ginxsom-fcgi_static_x86_64
# Should show: "statically linked"
# Check size
ls -lh build/ginxsom-fcgi_static_x86_64
```
### 3. Deploy to Server
```bash
# Use the simplified deployment script
./deploy_static.sh
# Or manually copy and start
scp build/ginxsom-fcgi_static_x86_64 user@server:/path/to/ginxsom/
ssh user@server
chmod +x /path/to/ginxsom/ginxsom-fcgi_static_x86_64
sudo spawn-fcgi -M 666 -u www-data -g www-data \
-s /tmp/ginxsom-fcgi.sock \
-- /path/to/ginxsom/ginxsom-fcgi_static_x86_64 \
--db-path /path/to/db/ginxsom.db \
--storage-dir /var/www/html/blossom
```
## Build Process Details
### What Happens During Build
1. **Docker Image Creation** (5-10 minutes first time, cached after):
- Uses Alpine Linux 3.19 (native MUSL)
- Builds secp256k1 statically
- Builds nostr_core_lib with required NIPs
- Embeds web interface files
- Compiles Ginxsom with full static linking
2. **Binary Extraction**:
- Extracts binary from Docker container
- Verifies static linking
- Makes executable
3. **Verification**:
- Checks for dynamic dependencies
- Reports file size
- Tests execution
### Docker Layers (Cached)
The Dockerfile uses multi-stage builds with caching:
```
Layer 1: Alpine base + dependencies (cached)
Layer 2: Build secp256k1 (cached)
Layer 3: Initialize git submodules (cached unless .gitmodules changes)
Layer 4: Build nostr_core_lib (cached unless nostr_core_lib changes)
Layer 5: Embed web files (cached unless api/ changes)
Layer 6: Build Ginxsom (rebuilds when src/ changes)
```
This means subsequent builds are **much faster** (~1-2 minutes) since only changed layers rebuild.
## Deployment Comparison
### Old Dynamic Build Deployment
```bash
# 1. Sync entire project (30 seconds)
rsync -avz . user@server:/path/
# 2. Build on remote server (5-10 minutes)
ssh user@server "cd /path && make clean && make"
# 3. Restart service (10 seconds)
ssh user@server "sudo systemctl restart ginxsom"
# Total: ~6-11 minutes
```
### New Static Build Deployment
```bash
# 1. Build locally once (5-10 minutes first time, cached after)
make static
# 2. Copy binary (10 seconds)
scp build/ginxsom-fcgi_static_x86_64 user@server:/path/
# 3. Restart service (10 seconds)
ssh user@server "sudo systemctl restart ginxsom"
# Total: ~20 seconds (after first build)
```
## Cleanup
### Automatic Cleanup
The static build script automatically cleans up old dynamic build artifacts (`.o` files and `ginxsom-fcgi` binary) after successfully building the static binary. This keeps your `build/` directory clean.
### Manual Cleanup
```bash
# Clean dynamic build artifacts (preserves static binaries)
make clean
# Clean everything including static binaries
make clean-all
# Or manually remove specific files
rm -f build/*.o
rm -f build/ginxsom-fcgi
rm -f build/ginxsom-fcgi_static_*
```
## Troubleshooting
### Docker Not Found
```bash
# Install Docker
sudo apt install docker.io
# Add user to docker group
sudo usermod -aG docker $USER
newgrp docker
```
### Build Fails
```bash
# Clean Docker cache and rebuild
docker system prune -a
make static
```
### Binary Won't Run on Target
```bash
# Verify it's static
ldd build/ginxsom-fcgi_static_x86_64
# Check architecture matches
file build/ginxsom-fcgi_static_x86_64
uname -m # On target system
```
### Alpine Package Not Found
If you get errors about missing Alpine packages, the package name may have changed. Check Alpine's package database:
- https://pkgs.alpinelinux.org/packages
## Advanced Usage
### Cross-Compilation
Build for different architectures:
```bash
# Build for ARM64 on x86_64 machine
docker build --platform linux/arm64 -f Dockerfile.alpine-musl -t ginxsom-arm64 .
```
### Custom NIPs
Edit `Dockerfile.alpine-musl` line 66 to change which NIPs are included:
```dockerfile
./build.sh --nips=1,6,19 # Minimal
./build.sh --nips=1,6,13,17,19,44,59 # Full (default)
```
### Debug Build
```bash
# Build with debug symbols (no optimization)
make static-debug
# Binary will be larger but include debugging info
gdb build/ginxsom-fcgi_static_x86_64
```
## File Structure
```
ginxsom/
├── Dockerfile.alpine-musl # Alpine Docker build definition
├── build_static.sh # Build script wrapper
├── deploy_static.sh # Simplified deployment script
├── Makefile # Updated with 'static' target
└── build/
└── ginxsom-fcgi_static_x86_64 # Output binary
```
## CI/CD Integration
### GitHub Actions Example
```yaml
name: Build Static Binary
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: Build static binary
run: make static
- name: Upload artifact
uses: actions/upload-artifact@v2
with:
name: ginxsom-static
path: build/ginxsom-fcgi_static_x86_64
```
## Performance
Static MUSL binaries have minimal performance impact:
| Metric | Static MUSL | Dynamic glibc |
|--------|-------------|---------------|
| Startup Time | ~50ms | ~40ms |
| Memory Usage | Similar | Similar |
| Request Latency | Identical | Identical |
| Binary Size | 7-10 MB | 2-3 MB + libs |
The slight startup delay is negligible for a long-running FastCGI process.
## References
- [MUSL libc](https://musl.libc.org/)
- [Alpine Linux](https://alpinelinux.org/)
- [Static Linking Best Practices](https://www.musl-libc.org/faq.html)
- [c-relay Static Build](../c-relay/STATIC_BUILD.md)
## Support
For issues with static builds:
1. Check Docker is running: `docker info`
2. Verify submodules: `git submodule status`
3. Clean and rebuild: `docker system prune -a && make static`
4. Check logs in Docker build output

File diff suppressed because it is too large Load Diff

View File

@@ -49,7 +49,22 @@ if [[ $FOLLOW_LOGS -eq 1 ]]; then
wait
exit 0
fi
FCGI_BINARY="./build/ginxsom-fcgi"
# Detect architecture for static binary name
ARCH=$(uname -m)
case "$ARCH" in
x86_64) STATIC_BINARY="./build/ginxsom-fcgi_static_x86_64" ;;
aarch64|arm64) STATIC_BINARY="./build/ginxsom-fcgi_static_arm64" ;;
*) STATIC_BINARY="./build/ginxsom-fcgi_static_${ARCH}" ;;
esac
# Use static binary if available, fallback to dynamic
if [ -f "$STATIC_BINARY" ]; then
FCGI_BINARY="$STATIC_BINARY"
echo "Using static binary: $FCGI_BINARY"
else
FCGI_BINARY="./build/ginxsom-fcgi"
echo "Static binary not found, using dynamic binary: $FCGI_BINARY"
fi
SOCKET_PATH="/tmp/ginxsom-fcgi.sock"
PID_FILE="/tmp/ginxsom-fcgi.pid"
NGINX_CONFIG="config/local-nginx.conf"
@@ -173,15 +188,24 @@ fi
echo -e "${GREEN}FastCGI cleanup complete${NC}"
# Step 3: Always rebuild FastCGI binary with clean build
echo -e "\n${YELLOW}3. Rebuilding FastCGI binary (clean build)...${NC}"
echo "Performing clean rebuild to ensure all changes are compiled..."
make clean && make
# Step 3: Always rebuild FastCGI binary with static build
echo -e "\n${YELLOW}3. Rebuilding FastCGI binary (static build)...${NC}"
echo "Building static binary with Docker..."
make static
if [ $? -ne 0 ]; then
echo -e "${RED}Build failed! Cannot continue.${NC}"
echo -e "${RED}Static build failed! Cannot continue.${NC}"
echo -e "${RED}Docker must be available and running for static builds.${NC}"
exit 1
fi
echo -e "${GREEN}Clean rebuild complete${NC}"
# Update FCGI_BINARY to use the newly built static binary
ARCH=$(uname -m)
case "$ARCH" in
x86_64) FCGI_BINARY="./build/ginxsom-fcgi_static_x86_64" ;;
aarch64|arm64) FCGI_BINARY="./build/ginxsom-fcgi_static_arm64" ;;
*) FCGI_BINARY="./build/ginxsom-fcgi_static_${ARCH}" ;;
esac
echo -e "${GREEN}Static build complete: $FCGI_BINARY${NC}"
# Step 3.5: Clean database directory for fresh testing
echo -e "\n${YELLOW}3.5. Cleaning database directory...${NC}"

82
scripts/embed_web_files.sh Executable file
View File

@@ -0,0 +1,82 @@
#!/bin/bash
# Embed web interface files into C source code
# This script converts HTML, CSS, and JS files into C byte arrays
set -e
# Configuration
API_DIR="api"
OUTPUT_DIR="src"
OUTPUT_FILE="${OUTPUT_DIR}/admin_interface_embedded.h"
# Files to embed
FILES=(
"index.html"
"index.css"
"index.js"
"nostr-lite.js"
"nostr.bundle.js"
"text_graph.js"
)
echo "=== Embedding Web Interface Files ==="
echo "Source directory: ${API_DIR}"
echo "Output file: ${OUTPUT_FILE}"
echo ""
# Start output file
cat > "${OUTPUT_FILE}" << 'EOF'
/*
* Embedded Web Interface Files
* Auto-generated by scripts/embed_web_files.sh
* DO NOT EDIT MANUALLY
*/
#ifndef ADMIN_INTERFACE_EMBEDDED_H
#define ADMIN_INTERFACE_EMBEDDED_H
#include <stddef.h>
EOF
# Process each file
for file in "${FILES[@]}"; do
filepath="${API_DIR}/${file}"
if [[ ! -f "${filepath}" ]]; then
echo "WARNING: File not found: ${filepath}"
continue
fi
# Create variable name from filename (replace . and - with _)
varname=$(echo "${file}" | tr '.-' '__')
echo "Embedding: ${file} -> embedded_${varname}"
# Get file size
filesize=$(stat -f%z "${filepath}" 2>/dev/null || stat -c%s "${filepath}" 2>/dev/null)
# Add comment
echo "" >> "${OUTPUT_FILE}"
echo "// Embedded file: ${file} (${filesize} bytes)" >> "${OUTPUT_FILE}"
# Convert file to C byte array
echo "static const unsigned char embedded_${varname}[] = {" >> "${OUTPUT_FILE}"
# Use xxd to convert to hex, then format as C array
xxd -i < "${filepath}" >> "${OUTPUT_FILE}"
echo "};" >> "${OUTPUT_FILE}"
echo "static const size_t embedded_${varname}_size = sizeof(embedded_${varname});" >> "${OUTPUT_FILE}"
done
# Close header guard
cat >> "${OUTPUT_FILE}" << 'EOF'
#endif /* ADMIN_INTERFACE_EMBEDDED_H */
EOF
echo ""
echo "=== Embedding Complete ==="
echo "Generated: ${OUTPUT_FILE}"
echo "Total files embedded: ${#FILES[@]}"

View File

@@ -1,8 +1,8 @@
/*
* Ginxsom Admin Commands Interface
*
* Handles encrypted admin commands sent via Kind 23456 events
* and generates encrypted responses as Kind 23457 events.
* Handles encrypted admin commands sent via Kind 23458 events
* and generates encrypted responses as Kind 23459 events.
*/
#ifndef ADMIN_COMMANDS_H

View File

@@ -1,8 +1,10 @@
// Admin event handler for Kind 23456/23457 admin commands
// Admin event handler for Kind 23458/23459 admin commands
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <unistd.h>
#include <sys/types.h>
#include "ginxsom.h"
// Forward declarations for nostr_core_lib functions
@@ -27,90 +29,162 @@ extern char g_db_path[];
static int get_server_privkey(unsigned char* privkey_bytes);
static int get_server_pubkey(char* pubkey_hex, size_t size);
static int handle_config_query_command(cJSON* response_data);
static int send_admin_response_event(const char* admin_pubkey, const char* request_id,
static int handle_query_view_command(cJSON* command_array, cJSON* response_data);
static int send_admin_response_event(const char* admin_pubkey, const char* request_id,
cJSON* response_data);
static cJSON* parse_authorization_header(void);
static int process_admin_event(cJSON* event);
/**
* Handle Kind 23456 admin command event
* Expects POST to /api/admin with JSON body containing the event
* Handle Kind 23458 admin command event
* Supports two delivery methods:
* 1. POST body with JSON event
* 2. Authorization header with Nostr event
*/
void handle_admin_event_request(void) {
// Read request body
const char* content_length_str = getenv("CONTENT_LENGTH");
if (!content_length_str) {
printf("Status: 411 Length Required\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Content-Length header required\"}\n");
return;
}
cJSON* event = NULL;
int should_free_event = 1;
long content_length = atol(content_length_str);
if (content_length <= 0 || content_length > 65536) {
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Invalid content length\"}\n");
return;
}
char* json_body = malloc(content_length + 1);
if (!json_body) {
printf("Status: 500 Internal Server Error\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Memory allocation failed\"}\n");
return;
}
size_t bytes_read = fread(json_body, 1, content_length, stdin);
if (bytes_read != (size_t)content_length) {
free(json_body);
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Failed to read complete request body\"}\n");
return;
}
json_body[content_length] = '\0';
// Parse event JSON
cJSON* event = cJSON_Parse(json_body);
free(json_body);
// First, try to get event from Authorization header
event = parse_authorization_header();
// If not in header, try POST body
if (!event) {
const char* content_length_str = getenv("CONTENT_LENGTH");
if (!content_length_str) {
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Event required in POST body or Authorization header\"}\n");
return;
}
long content_length = atol(content_length_str);
if (content_length <= 0 || content_length > 65536) {
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Invalid content length\"}\n");
return;
}
char* json_body = malloc(content_length + 1);
if (!json_body) {
printf("Status: 500 Internal Server Error\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Memory allocation failed\"}\n");
return;
}
size_t bytes_read = fread(json_body, 1, content_length, stdin);
if (bytes_read != (size_t)content_length) {
free(json_body);
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Failed to read complete request body\"}\n");
return;
}
json_body[content_length] = '\0';
// Parse event JSON
event = cJSON_Parse(json_body);
// Debug: Log the received JSON
app_log(LOG_DEBUG, "ADMIN_EVENT: Received POST body: %s", json_body);
free(json_body);
if (!event) {
app_log(LOG_ERROR, "ADMIN_EVENT: Failed to parse JSON");
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Invalid JSON\"}\n");
return;
}
// Debug: Log parsed event
char* event_str = cJSON_Print(event);
if (event_str) {
app_log(LOG_DEBUG, "ADMIN_EVENT: Parsed event: %s", event_str);
free(event_str);
}
}
// Process the event (handles validation, decryption, command execution, response)
int result = process_admin_event(event);
// Clean up
if (should_free_event && event) {
cJSON_Delete(event);
}
(void)result; // Result already handled by process_admin_event
}
/**
* Parse Kind 23458 event from Authorization header
* Format: Authorization: Nostr <base64-encoded-event-json>
* Returns: cJSON event object or NULL if not present/invalid
*/
static cJSON* parse_authorization_header(void) {
const char* auth_header = getenv("HTTP_AUTHORIZATION");
if (!auth_header) {
return NULL;
}
// Check for "Nostr " prefix (case-insensitive)
if (strncasecmp(auth_header, "Nostr ", 6) != 0) {
return NULL;
}
// Skip "Nostr " prefix
const char* base64_event = auth_header + 6;
// Decode base64 (simple implementation - in production use proper base64 decoder)
// For now, assume the event is JSON directly (not base64 encoded)
// This matches the pattern from c-relay's admin interface
cJSON* event = cJSON_Parse(base64_event);
return event;
}
/**
* Process a Kind 23458 admin event (from POST body or Authorization header)
* Returns: 0 on success, -1 on error (error response already sent)
*/
static int process_admin_event(cJSON* event) {
if (!event) {
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Invalid JSON\"}\n");
return;
printf("{\"error\":\"Invalid event\"}\n");
return -1;
}
// Verify it's Kind 23456
// Verify it's Kind 23458
cJSON* kind_obj = cJSON_GetObjectItem(event, "kind");
if (!kind_obj || !cJSON_IsNumber(kind_obj) ||
(int)cJSON_GetNumberValue(kind_obj) != 23456) {
cJSON_Delete(event);
if (!kind_obj || !cJSON_IsNumber(kind_obj) ||
(int)cJSON_GetNumberValue(kind_obj) != 23458) {
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Event must be Kind 23456\"}\n");
return;
printf("{\"error\":\"Event must be Kind 23458\"}\n");
return -1;
}
// Get event ID for response correlation
cJSON* id_obj = cJSON_GetObjectItem(event, "id");
if (!id_obj || !cJSON_IsString(id_obj)) {
cJSON_Delete(event);
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Event missing id\"}\n");
return;
return -1;
}
const char* request_id = cJSON_GetStringValue(id_obj);
// Get admin pubkey from event
cJSON* pubkey_obj = cJSON_GetObjectItem(event, "pubkey");
if (!pubkey_obj || !cJSON_IsString(pubkey_obj)) {
cJSON_Delete(event);
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Event missing pubkey\"}\n");
return;
return -1;
}
const char* admin_pubkey = cJSON_GetStringValue(pubkey_obj);
@@ -118,11 +192,10 @@ void handle_admin_event_request(void) {
sqlite3* db;
int rc = sqlite3_open_v2(g_db_path, &db, SQLITE_OPEN_READONLY, NULL);
if (rc != SQLITE_OK) {
cJSON_Delete(event);
printf("Status: 500 Internal Server Error\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Database error\"}\n");
return;
return -1;
}
sqlite3_stmt* stmt;
@@ -141,42 +214,38 @@ void handle_admin_event_request(void) {
sqlite3_close(db);
if (!is_admin) {
cJSON_Delete(event);
printf("Status: 403 Forbidden\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Not authorized as admin\"}\n");
return;
return -1;
}
// Get encrypted content
cJSON* content_obj = cJSON_GetObjectItem(event, "content");
if (!content_obj || !cJSON_IsString(content_obj)) {
cJSON_Delete(event);
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Event missing content\"}\n");
return;
return -1;
}
const char* encrypted_content = cJSON_GetStringValue(content_obj);
// Get server private key for decryption
unsigned char server_privkey[32];
if (get_server_privkey(server_privkey) != 0) {
cJSON_Delete(event);
printf("Status: 500 Internal Server Error\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Failed to get server private key\"}\n");
return;
return -1;
}
// Convert admin pubkey to bytes
unsigned char admin_pubkey_bytes[32];
if (nostr_hex_to_bytes(admin_pubkey, admin_pubkey_bytes, 32) != 0) {
cJSON_Delete(event);
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Invalid admin pubkey format\"}\n");
return;
return -1;
}
// Decrypt content using NIP-44 (or use plaintext for testing)
@@ -195,34 +264,37 @@ void handle_admin_event_request(void) {
);
if (decrypt_result != 0) {
cJSON_Delete(event);
app_log(LOG_ERROR, "ADMIN_EVENT: Decryption failed with result: %d", decrypt_result);
app_log(LOG_ERROR, "ADMIN_EVENT: Encrypted content: %s", encrypted_content);
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Failed to decrypt content\"}\n");
return;
return -1;
}
content_to_parse = decrypted_content;
app_log(LOG_DEBUG, "ADMIN_EVENT: Decrypted content: %s", decrypted_content);
} else {
app_log(LOG_DEBUG, "ADMIN_EVENT: Using plaintext content (starts with '['): %s", encrypted_content);
}
// Parse command array (either decrypted or plaintext)
app_log(LOG_DEBUG, "ADMIN_EVENT: Parsing command array from: %s", content_to_parse);
cJSON* command_array = cJSON_Parse(content_to_parse);
if (!command_array || !cJSON_IsArray(command_array)) {
cJSON_Delete(event);
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Decrypted content is not a valid command array\"}\n");
return;
return -1;
}
// Get command type
cJSON* command_type = cJSON_GetArrayItem(command_array, 0);
if (!command_type || !cJSON_IsString(command_type)) {
cJSON_Delete(command_array);
cJSON_Delete(event);
printf("Status: 400 Bad Request\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Invalid command format\"}\n");
return;
return -1;
}
const char* cmd = cJSON_GetStringValue(command_type);
@@ -235,23 +307,35 @@ void handle_admin_event_request(void) {
// Handle command
int result = -1;
if (strcmp(cmd, "config_query") == 0) {
app_log(LOG_DEBUG, "ADMIN_EVENT: Handling config_query command");
result = handle_config_query_command(response_data);
app_log(LOG_DEBUG, "ADMIN_EVENT: config_query result: %d", result);
} else if (strcmp(cmd, "query_view") == 0) {
app_log(LOG_DEBUG, "ADMIN_EVENT: Handling query_view command");
result = handle_query_view_command(command_array, response_data);
app_log(LOG_DEBUG, "ADMIN_EVENT: query_view result: %d", result);
} else {
app_log(LOG_WARN, "ADMIN_EVENT: Unknown command: %s", cmd);
cJSON_AddStringToObject(response_data, "status", "error");
cJSON_AddStringToObject(response_data, "error", "Unknown command");
result = -1;
}
cJSON_Delete(command_array);
cJSON_Delete(event);
if (result == 0) {
// Send Kind 23457 response
send_admin_response_event(admin_pubkey, request_id, response_data);
app_log(LOG_DEBUG, "ADMIN_EVENT: Sending Kind 23459 response");
// Send Kind 23459 response
int send_result = send_admin_response_event(admin_pubkey, request_id, response_data);
app_log(LOG_DEBUG, "ADMIN_EVENT: Response sent with result: %d", send_result);
return send_result;
} else {
app_log(LOG_ERROR, "ADMIN_EVENT: Command processing failed");
cJSON_Delete(response_data);
printf("Status: 500 Internal Server Error\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("{\"error\":\"Command processing failed\"}\n");
return -1;
}
}
@@ -350,7 +434,126 @@ static int handle_config_query_command(cJSON* response_data) {
}
/**
* Send Kind 23457 admin response event
* Handle query_view command - returns data from a specified database view
* Command format: ["query_view", "view_name"]
*/
static int handle_query_view_command(cJSON* command_array, cJSON* response_data) {
app_log(LOG_DEBUG, "ADMIN_EVENT: handle_query_view_command called");
// Get view name from command array
cJSON* view_name_obj = cJSON_GetArrayItem(command_array, 1);
if (!view_name_obj || !cJSON_IsString(view_name_obj)) {
app_log(LOG_ERROR, "ADMIN_EVENT: View name missing or not a string");
cJSON_AddStringToObject(response_data, "status", "error");
cJSON_AddStringToObject(response_data, "error", "View name required");
return -1;
}
const char* view_name = cJSON_GetStringValue(view_name_obj);
app_log(LOG_DEBUG, "ADMIN_EVENT: Querying view: %s", view_name);
// Validate view name (whitelist approach for security)
const char* allowed_views[] = {
"blob_overview",
"blob_type_distribution",
"blob_time_stats",
"top_uploaders",
NULL
};
int view_allowed = 0;
for (int i = 0; allowed_views[i] != NULL; i++) {
if (strcmp(view_name, allowed_views[i]) == 0) {
view_allowed = 1;
break;
}
}
if (!view_allowed) {
cJSON_AddStringToObject(response_data, "status", "error");
cJSON_AddStringToObject(response_data, "error", "Invalid view name");
app_log(LOG_WARN, "ADMIN_EVENT: Attempted to query invalid view: %s", view_name);
return -1;
}
app_log(LOG_DEBUG, "ADMIN_EVENT: View '%s' is allowed, opening database: %s", view_name, g_db_path);
// Open database
sqlite3* db;
int rc = sqlite3_open_v2(g_db_path, &db, SQLITE_OPEN_READONLY, NULL);
if (rc != SQLITE_OK) {
app_log(LOG_ERROR, "ADMIN_EVENT: Failed to open database: %s (error: %s)", g_db_path, sqlite3_errmsg(db));
cJSON_AddStringToObject(response_data, "status", "error");
cJSON_AddStringToObject(response_data, "error", "Database error");
return -1;
}
// Build SQL query
char sql[256];
snprintf(sql, sizeof(sql), "SELECT * FROM %s", view_name);
app_log(LOG_DEBUG, "ADMIN_EVENT: Executing SQL: %s", sql);
sqlite3_stmt* stmt;
if (sqlite3_prepare_v2(db, sql, -1, &stmt, NULL) != SQLITE_OK) {
app_log(LOG_ERROR, "ADMIN_EVENT: Failed to prepare query: %s (error: %s)", sql, sqlite3_errmsg(db));
sqlite3_close(db);
cJSON_AddStringToObject(response_data, "status", "error");
cJSON_AddStringToObject(response_data, "error", "Failed to prepare query");
return -1;
}
// Get column count and names
int col_count = sqlite3_column_count(stmt);
// Create results array
cJSON* results = cJSON_CreateArray();
// Fetch all rows
while (sqlite3_step(stmt) == SQLITE_ROW) {
cJSON* row = cJSON_CreateObject();
for (int i = 0; i < col_count; i++) {
const char* col_name = sqlite3_column_name(stmt, i);
int col_type = sqlite3_column_type(stmt, i);
switch (col_type) {
case SQLITE_INTEGER:
cJSON_AddNumberToObject(row, col_name, (double)sqlite3_column_int64(stmt, i));
break;
case SQLITE_FLOAT:
cJSON_AddNumberToObject(row, col_name, sqlite3_column_double(stmt, i));
break;
case SQLITE_TEXT:
cJSON_AddStringToObject(row, col_name, (const char*)sqlite3_column_text(stmt, i));
break;
case SQLITE_NULL:
cJSON_AddNullToObject(row, col_name);
break;
default:
// For BLOB or unknown types, skip
break;
}
}
cJSON_AddItemToArray(results, row);
}
sqlite3_finalize(stmt);
sqlite3_close(db);
// Build response
cJSON_AddStringToObject(response_data, "status", "success");
cJSON_AddStringToObject(response_data, "view_name", view_name);
cJSON_AddItemToObject(response_data, "data", results);
app_log(LOG_DEBUG, "ADMIN_EVENT: Query view '%s' returned %d rows", view_name, cJSON_GetArraySize(results));
return 0;
}
/**
* Send Kind 23459 admin response event
*/
static int send_admin_response_event(const char* admin_pubkey, const char* request_id,
cJSON* response_data) {
@@ -407,11 +610,11 @@ static int send_admin_response_event(const char* admin_pubkey, const char* reque
return -1;
}
// Create Kind 23457 response event
// Create Kind 23459 response event
cJSON* response_event = cJSON_CreateObject();
cJSON_AddStringToObject(response_event, "pubkey", server_pubkey);
cJSON_AddNumberToObject(response_event, "created_at", (double)time(NULL));
cJSON_AddNumberToObject(response_event, "kind", 23457);
cJSON_AddNumberToObject(response_event, "kind", 23459);
cJSON_AddStringToObject(response_event, "content", encrypted_response);
// Add tags
@@ -433,7 +636,7 @@ static int send_admin_response_event(const char* admin_pubkey, const char* reque
// Sign the event
cJSON* signed_event = nostr_create_and_sign_event(
23457,
23459,
encrypted_response,
tags,
server_privkey,

62
src/admin_interface.c Normal file
View File

@@ -0,0 +1,62 @@
// Admin interface handler - serves embedded web UI files
#include <stdio.h>
#include <string.h>
#include "ginxsom.h"
#include "admin_interface_embedded.h"
/**
* Serve embedded file with appropriate content type
*/
static void serve_embedded_file(const unsigned char* data, size_t size, const char* content_type) {
printf("Status: 200 OK\r\n");
printf("Content-Type: %s\r\n", content_type);
printf("Content-Length: %zu\r\n", size);
printf("Cache-Control: public, max-age=3600\r\n");
printf("\r\n");
fwrite((void*)data, 1, size, stdout);
fflush(stdout);
}
/**
* Handle admin interface requests
* Serves embedded web UI files from /api path (consistent with c-relay)
*/
void handle_admin_interface_request(const char* path) {
// Normalize path - remove trailing slash
char normalized_path[256];
strncpy(normalized_path, path, sizeof(normalized_path) - 1);
normalized_path[sizeof(normalized_path) - 1] = '\0';
size_t len = strlen(normalized_path);
if (len > 1 && normalized_path[len - 1] == '/') {
normalized_path[len - 1] = '\0';
}
// Route to appropriate embedded file
// All paths use /api/ prefix for consistency with c-relay
if (strcmp(normalized_path, "/api") == 0 || strcmp(normalized_path, "/api/index.html") == 0) {
serve_embedded_file(embedded_index_html, embedded_index_html_size, "text/html; charset=utf-8");
}
else if (strcmp(normalized_path, "/api/index.css") == 0) {
serve_embedded_file(embedded_index_css, embedded_index_css_size, "text/css; charset=utf-8");
}
else if (strcmp(normalized_path, "/api/index.js") == 0) {
serve_embedded_file(embedded_index_js, embedded_index_js_size, "application/javascript; charset=utf-8");
}
else if (strcmp(normalized_path, "/api/nostr-lite.js") == 0) {
serve_embedded_file(embedded_nostr_lite_js, embedded_nostr_lite_js_size, "application/javascript; charset=utf-8");
}
else if (strcmp(normalized_path, "/api/nostr.bundle.js") == 0) {
serve_embedded_file(embedded_nostr_bundle_js, embedded_nostr_bundle_js_size, "application/javascript; charset=utf-8");
}
else if (strcmp(normalized_path, "/api/text_graph.js") == 0) {
serve_embedded_file(embedded_text_graph_js, embedded_text_graph_js_size, "application/javascript; charset=utf-8");
}
else {
// 404 Not Found
printf("Status: 404 Not Found\r\n");
printf("Content-Type: text/html; charset=utf-8\r\n");
printf("\r\n");
printf("<html><body><h1>404 Not Found</h1><p>File not found: %s</p></body></html>\n", normalized_path);
}
}

63364
src/admin_interface_embedded.h Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -10,8 +10,8 @@
// Version information (auto-updated by build system)
#define VERSION_MAJOR 0
#define VERSION_MINOR 1
#define VERSION_PATCH 14
#define VERSION "v0.1.14"
#define VERSION_PATCH 18
#define VERSION "v0.1.18"
#include <stddef.h>
#include <stdint.h>
@@ -272,9 +272,12 @@ int validate_sha256_format(const char* sha256);
// Admin API request handler
void handle_admin_api_request(const char* method, const char* uri, const char* validated_pubkey, int is_authenticated);
// Admin event handler (Kind 23456/23457)
// Admin event handler (Kind 23458/23459)
void handle_admin_event_request(void);
// Admin interface handler (serves embedded web UI)
void handle_admin_interface_request(const char* path);
// Individual endpoint handlers
void handle_stats_api(void);
void handle_config_get_api(void);

View File

@@ -248,7 +248,7 @@ int initialize_database(const char *db_path) {
return -1;
}
// Create storage_stats view
// Create storage_stats view (legacy - kept for backward compatibility)
const char *create_view =
"CREATE VIEW IF NOT EXISTS storage_stats AS "
"SELECT "
@@ -268,6 +268,85 @@ int initialize_database(const char *db_path) {
return -1;
}
// Create blob_overview view for admin dashboard
const char *create_overview_view =
"CREATE VIEW IF NOT EXISTS blob_overview AS "
"SELECT "
" COUNT(*) as total_blobs, "
" COALESCE(SUM(size), 0) as total_bytes, "
" MIN(uploaded_at) as first_upload, "
" MAX(uploaded_at) as last_upload "
"FROM blobs;";
rc = sqlite3_exec(db, create_overview_view, NULL, NULL, &err_msg);
if (rc != SQLITE_OK) {
fprintf(stderr, "Failed to create blob_overview view: %s\n", err_msg);
sqlite3_free(err_msg);
sqlite3_close(db);
return -1;
}
// Create blob_type_distribution view for MIME type statistics
const char *create_type_view =
"CREATE VIEW IF NOT EXISTS blob_type_distribution AS "
"SELECT "
" type as mime_type, "
" COUNT(*) as blob_count, "
" SUM(size) as total_bytes, "
" ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM blobs), 2) as percentage "
"FROM blobs "
"GROUP BY type "
"ORDER BY blob_count DESC;";
rc = sqlite3_exec(db, create_type_view, NULL, NULL, &err_msg);
if (rc != SQLITE_OK) {
fprintf(stderr, "Failed to create blob_type_distribution view: %s\n", err_msg);
sqlite3_free(err_msg);
sqlite3_close(db);
return -1;
}
// Create blob_time_stats view for time-based statistics
const char *create_time_view =
"CREATE VIEW IF NOT EXISTS blob_time_stats AS "
"SELECT "
" COUNT(CASE WHEN uploaded_at >= strftime('%s', 'now', '-1 day') THEN 1 END) as blobs_24h, "
" COUNT(CASE WHEN uploaded_at >= strftime('%s', 'now', '-7 days') THEN 1 END) as blobs_7d, "
" COUNT(CASE WHEN uploaded_at >= strftime('%s', 'now', '-30 days') THEN 1 END) as blobs_30d "
"FROM blobs;";
rc = sqlite3_exec(db, create_time_view, NULL, NULL, &err_msg);
if (rc != SQLITE_OK) {
fprintf(stderr, "Failed to create blob_time_stats view: %s\n", err_msg);
sqlite3_free(err_msg);
sqlite3_close(db);
return -1;
}
// Create top_uploaders view for pubkey statistics
const char *create_uploaders_view =
"CREATE VIEW IF NOT EXISTS top_uploaders AS "
"SELECT "
" uploader_pubkey, "
" COUNT(*) as blob_count, "
" SUM(size) as total_bytes, "
" ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM blobs), 2) as percentage, "
" MIN(uploaded_at) as first_upload, "
" MAX(uploaded_at) as last_upload "
"FROM blobs "
"WHERE uploader_pubkey IS NOT NULL "
"GROUP BY uploader_pubkey "
"ORDER BY blob_count DESC "
"LIMIT 20;";
rc = sqlite3_exec(db, create_uploaders_view, NULL, NULL, &err_msg);
if (rc != SQLITE_OK) {
fprintf(stderr, "Failed to create top_uploaders view: %s\n", err_msg);
sqlite3_free(err_msg);
sqlite3_close(db);
return -1;
}
fprintf(stderr, "Database schema initialized successfully\n");
}
@@ -2314,13 +2393,14 @@ if (!config_loaded /* && !initialize_server_config() */) {
// Special case: Root endpoint is public and doesn't require authentication
if (strcmp(request_method, "GET") == 0 && strcmp(request_uri, "/") == 0) {
// Handle GET / requests - Server info endpoint
// Handle GET / requests - Server info endpoint (NIP-11)
printf("Status: 200 OK\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("Content-Type: application/nostr+json\r\n\r\n");
printf("{\n");
printf(" \"server\": \"ginxsom\",\n");
printf(" \"version\": \"%s\",\n", VERSION);
printf(" \"description\": \"Ginxsom Blossom Server\",\n");
printf(" \"pubkey\": \"%s\",\n", g_blossom_pubkey);
printf(" \"endpoints\": {\n");
printf(" \"blob_get\": \"GET /<sha256>\",\n");
printf(" \"blob_head\": \"HEAD /<sha256>\",\n");
@@ -2382,12 +2462,28 @@ if (!config_loaded /* && !initialize_server_config() */) {
operation = "mirror";
} else if (strcmp(request_method, "PUT") == 0 && strcmp(request_uri, "/report") == 0) {
operation = "report";
} else if (strncmp(request_uri, "/admin", 6) == 0) {
operation = "admin_interface"; // Public static files - no auth required
} else if (strncmp(request_uri, "/api/", 5) == 0) {
operation = "admin";
// Special case: POST /api/admin uses Kind 23456 events for authentication
// Skip centralized validation for these requests
if (strcmp(request_method, "POST") == 0 && strcmp(request_uri, "/api/admin") == 0) {
operation = "admin_event"; // Mark as special case
// Check if this is a static file request or API request
const char *path = request_uri + 5; // Skip "/api/"
int is_static_file = 0;
// Check for static file extensions or root /api path
if (strstr(path, ".html") || strstr(path, ".css") || strstr(path, ".js") ||
strlen(path) == 0 || strcmp(path, "/") == 0) {
is_static_file = 1;
}
if (is_static_file) {
operation = "admin_interface"; // Public static files - no auth required
} else {
operation = "admin";
// Special case: POST /api/admin uses Kind 23458 events for authentication
// Skip centralized validation for these requests
if (strcmp(request_method, "POST") == 0 && strcmp(request_uri, "/api/admin") == 0) {
operation = "admin_event"; // Mark as special case
}
}
} else if (strcmp(request_method, "GET") == 0 && strncmp(request_uri, "/list/", 6) == 0) {
operation = "list";
@@ -2424,6 +2520,8 @@ if (!config_loaded /* && !initialize_server_config() */) {
// Special case: challenge generation failure should be handled by the endpoint
if (strcmp(operation, "challenge") == 0) {
// Let the /auth endpoint handle this - it will generate its own error response
} else if (strcmp(operation, "admin_interface") == 0) {
// Admin interface serves public static files - no auth required
} else if (strcmp(operation, "head") == 0 || strcmp(operation, "head_upload") == 0) {
// HEAD requests might not require auth depending on config - let handler decide
} else if (strcmp(operation, "list") == 0) {
@@ -2431,7 +2529,7 @@ if (!config_loaded /* && !initialize_server_config() */) {
} else if (strcmp(operation, "admin") == 0 && strcmp(request_uri, "/api/health") == 0) {
// Health endpoint is public and doesn't require authentication - let handler decide
} else if (strcmp(operation, "admin_event") == 0) {
// POST /api/admin uses Kind 23456 events - authentication handled by admin_event.c
// POST /api/admin uses Kind 23458 events - authentication handled by admin_event.c
// Skip centralized validation and let the handler validate the event
} else {
// For other operations, validation failure means auth failure
@@ -2528,10 +2626,34 @@ if (!config_loaded /* && !initialize_server_config() */) {
} else if (strcmp(request_method, "POST") == 0 &&
strcmp(request_uri, "/api/admin") == 0) {
// Handle POST /api/admin requests (Kind 23458 admin events)
handle_admin_event_request();
} else if (strncmp(request_uri, "/admin", 6) == 0) {
// Handle admin web interface requests (embedded files)
handle_admin_interface_request(request_uri);
} else if (strncmp(request_uri, "/api/", 5) == 0) {
// Handle admin API requests with pre-validated auth
const char *validated_pubkey = (result.valid && strlen(result.pubkey) == 64) ? result.pubkey : NULL;
handle_admin_api_request(request_method, request_uri, validated_pubkey, result.valid);
// Check if this is a static file request (no auth required) or API request (auth required)
const char *path = request_uri + 5; // Skip "/api/"
int is_static_file = 0;
// Check for static file extensions
if (strstr(path, ".html") || strstr(path, ".css") || strstr(path, ".js") ||
strcmp(request_uri, "/api") == 0 || strcmp(request_uri, "/api/") == 0) {
is_static_file = 1;
}
if (is_static_file) {
// Serve static files without authentication
handle_admin_interface_request(request_uri);
} else {
// Handle admin API requests with pre-validated auth
const char *validated_pubkey = (result.valid && strlen(result.pubkey) == 64) ? result.pubkey : NULL;
handle_admin_api_request(request_method, request_uri, validated_pubkey, result.valid);
}
} else if (strcmp(request_method, "GET") == 0 &&
@@ -2562,13 +2684,14 @@ if (!config_loaded /* && !initialize_server_config() */) {
}
} else if (strcmp(request_method, "GET") == 0 &&
strcmp(request_uri, "/") == 0) {
// Handle GET / requests - Server info endpoint
// Handle GET / requests - Server info endpoint (NIP-11)
printf("Status: 200 OK\r\n");
printf("Content-Type: application/json\r\n\r\n");
printf("Content-Type: application/nostr+json\r\n\r\n");
printf("{\n");
printf(" \"server\": \"ginxsom\",\n");
printf(" \"version\": \"%s\",\n", VERSION);
printf(" \"description\": \"Ginxsom Blossom Server\",\n");
printf(" \"pubkey\": \"%s\",\n", g_blossom_pubkey);
printf(" \"endpoints\": {\n");
printf(" \"blob_get\": \"GET /<sha256>\",\n");
printf(" \"blob_head\": \"HEAD /<sha256>\",\n");

View File

@@ -1,7 +1,7 @@
#!/bin/bash
# Ginxsom Admin Event Test Script
# Tests Kind 23456/23457 admin command system with NIP-44 encryption
# Tests Kind 23458/23459 admin command system with NIP-44 encryption
#
# Prerequisites:
# - nak: https://github.com/fiatjaf/nak
@@ -72,12 +72,12 @@ check_dependencies() {
log_success "All dependencies found"
}
# Create NIP-44 encrypted admin command event (Kind 23456)
# Create NIP-44 encrypted admin command event (Kind 23458)
create_admin_command_event() {
local command="$1"
local expiration=$(($(date +%s) + 3600)) # 1 hour from now
log_info "Creating Kind 23456 admin command event..."
log_info "Creating Kind 23458 admin command event..."
log_info "Command: $command"
# For now, we'll create the event structure manually since nak may not support NIP-44 encryption yet
@@ -87,9 +87,9 @@ create_admin_command_event() {
local content="[\"$command\"]"
# Create event with nak
# Kind 23456 = admin command
# Kind 23458 = admin command
# Tags: p = server pubkey, expiration
local event=$(nak event -k 23456 \
local event=$(nak event -k 23458 \
-c "$content" \
--tag p="$SERVER_PUBKEY" \
--tag expiration="$expiration" \
@@ -104,7 +104,7 @@ send_admin_command() {
log_info "=== Testing Admin Command: $command ==="
# Create Kind 23456 event
# Create Kind 23458 event
local event=$(create_admin_command_event "$command")
if [[ -z "$event" ]]; then
@@ -132,10 +132,10 @@ send_admin_command() {
log_success "HTTP $http_code - Response received"
echo "$body" | jq . 2>/dev/null || echo "$body"
# Try to parse as Kind 23457 event
# Try to parse as Kind 23459 event
local kind=$(echo "$body" | jq -r '.kind // empty' 2>/dev/null)
if [[ "$kind" == "23457" ]]; then
log_success "Received Kind 23457 response event"
if [[ "$kind" == "23459" ]]; then
log_success "Received Kind 23459 response event"
local response_content=$(echo "$body" | jq -r '.content // empty' 2>/dev/null)
log_info "Response content (encrypted): $response_content"
# TODO: Decrypt NIP-44 content to see actual response
@@ -174,7 +174,7 @@ test_server_health() {
main() {
echo "=== Ginxsom Admin Event Test Suite ==="
echo "Testing Kind 23456/23457 admin command system"
echo "Testing Kind 23458/23459 admin command system"
echo ""
log_info "Test Configuration:"