239 lines
7.0 KiB
Bash
Executable File
239 lines
7.0 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
# Performance Benchmarking Suite for C-Relay
|
|
# Measures performance metrics and throughput
|
|
|
|
set -e
|
|
|
|
# Configuration
|
|
RELAY_HOST="127.0.0.1"
|
|
RELAY_PORT="8888"
|
|
BENCHMARK_DURATION=30 # seconds
|
|
|
|
# Colors for output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
NC='\033[0m' # No Color
|
|
|
|
# Metrics tracking
|
|
TOTAL_REQUESTS=0
|
|
SUCCESSFUL_REQUESTS=0
|
|
FAILED_REQUESTS=0
|
|
TOTAL_RESPONSE_TIME=0
|
|
MIN_RESPONSE_TIME=999999
|
|
MAX_RESPONSE_TIME=0
|
|
|
|
# Function to benchmark single request
|
|
benchmark_request() {
|
|
local message="$1"
|
|
local start_time
|
|
local end_time
|
|
local response_time
|
|
|
|
start_time=$(date +%s%N)
|
|
local response
|
|
response=$(timeout 5 bash -c "
|
|
echo '$message' | websocat -B 1048576 ws://$RELAY_HOST:$RELAY_PORT 2>/dev/null | head -1
|
|
" 2>/dev/null || echo 'TIMEOUT')
|
|
end_time=$(date +%s%N)
|
|
|
|
response_time=$(( (end_time - start_time) / 1000000 )) # Convert to milliseconds
|
|
|
|
TOTAL_REQUESTS=$((TOTAL_REQUESTS + 1))
|
|
TOTAL_RESPONSE_TIME=$((TOTAL_RESPONSE_TIME + response_time))
|
|
|
|
if [[ $response_time -lt MIN_RESPONSE_TIME ]]; then
|
|
MIN_RESPONSE_TIME=$response_time
|
|
fi
|
|
|
|
if [[ $response_time -gt MAX_RESPONSE_TIME ]]; then
|
|
MAX_RESPONSE_TIME=$response_time
|
|
fi
|
|
|
|
if [[ "$response" == *"EOSE"* ]] || [[ "$response" == *"EVENT"* ]] || [[ "$response" == *"OK"* ]]; then
|
|
SUCCESSFUL_REQUESTS=$((SUCCESSFUL_REQUESTS + 1))
|
|
else
|
|
FAILED_REQUESTS=$((FAILED_REQUESTS + 1))
|
|
fi
|
|
}
|
|
|
|
# Function to run throughput benchmark
|
|
run_throughput_benchmark() {
|
|
local test_name="$1"
|
|
local message="$2"
|
|
local concurrent_clients="${3:-10}"
|
|
local test_duration="${4:-$BENCHMARK_DURATION}"
|
|
|
|
echo "=========================================="
|
|
echo "Throughput Benchmark: $test_name"
|
|
echo "=========================================="
|
|
echo "Concurrent clients: $concurrent_clients"
|
|
echo "Duration: ${test_duration}s"
|
|
echo ""
|
|
|
|
# Reset metrics
|
|
TOTAL_REQUESTS=0
|
|
SUCCESSFUL_REQUESTS=0
|
|
FAILED_REQUESTS=0
|
|
TOTAL_RESPONSE_TIME=0
|
|
MIN_RESPONSE_TIME=999999
|
|
MAX_RESPONSE_TIME=0
|
|
|
|
local start_time
|
|
start_time=$(date +%s)
|
|
|
|
# Launch concurrent clients
|
|
local pids=()
|
|
for i in $(seq 1 "$concurrent_clients"); do
|
|
(
|
|
local client_start
|
|
client_start=$(date +%s)
|
|
local client_requests=0
|
|
|
|
while [[ $(($(date +%s) - client_start)) -lt test_duration ]]; do
|
|
benchmark_request "$message"
|
|
((client_requests++))
|
|
# Small delay to prevent overwhelming
|
|
sleep 0.01
|
|
done
|
|
|
|
echo "client_${i}_requests:$client_requests"
|
|
) &
|
|
pids+=($!)
|
|
done
|
|
|
|
# Wait for all clients to complete
|
|
local client_results=()
|
|
for pid in "${pids[@]}"; do
|
|
client_results+=("$(wait "$pid")")
|
|
done
|
|
|
|
local end_time
|
|
end_time=$(date +%s)
|
|
local actual_duration=$((end_time - start_time))
|
|
|
|
# Calculate metrics
|
|
local avg_response_time="N/A"
|
|
if [[ $SUCCESSFUL_REQUESTS -gt 0 ]]; then
|
|
avg_response_time="$((TOTAL_RESPONSE_TIME / SUCCESSFUL_REQUESTS))ms"
|
|
fi
|
|
|
|
local requests_per_second="N/A"
|
|
if [[ $actual_duration -gt 0 ]]; then
|
|
requests_per_second="$((TOTAL_REQUESTS / actual_duration))"
|
|
fi
|
|
|
|
local success_rate="N/A"
|
|
if [[ $TOTAL_REQUESTS -gt 0 ]]; then
|
|
success_rate="$((SUCCESSFUL_REQUESTS * 100 / TOTAL_REQUESTS))%"
|
|
fi
|
|
|
|
# Report results
|
|
echo "=== Benchmark Results ==="
|
|
echo "Total requests: $TOTAL_REQUESTS"
|
|
echo "Successful requests: $SUCCESSFUL_REQUESTS"
|
|
echo "Failed requests: $FAILED_REQUESTS"
|
|
echo "Success rate: $success_rate"
|
|
echo "Requests per second: $requests_per_second"
|
|
echo "Average response time: $avg_response_time"
|
|
echo "Min response time: ${MIN_RESPONSE_TIME}ms"
|
|
echo "Max response time: ${MAX_RESPONSE_TIME}ms"
|
|
echo "Actual duration: ${actual_duration}s"
|
|
echo ""
|
|
|
|
# Performance assessment
|
|
if [[ $requests_per_second -gt 1000 ]]; then
|
|
echo -e "${GREEN}✓ EXCELLENT throughput${NC}"
|
|
elif [[ $requests_per_second -gt 500 ]]; then
|
|
echo -e "${GREEN}✓ GOOD throughput${NC}"
|
|
elif [[ $requests_per_second -gt 100 ]]; then
|
|
echo -e "${YELLOW}⚠ MODERATE throughput${NC}"
|
|
else
|
|
echo -e "${RED}✗ LOW throughput${NC}"
|
|
fi
|
|
}
|
|
|
|
# Function to benchmark memory usage patterns
|
|
benchmark_memory_usage() {
|
|
echo "=========================================="
|
|
echo "Memory Usage Benchmark"
|
|
echo "=========================================="
|
|
|
|
local initial_memory
|
|
initial_memory=$(ps aux | grep c_relay | grep -v grep | awk '{print $6}' | head -1)
|
|
|
|
echo "Initial memory usage: ${initial_memory}KB"
|
|
|
|
# Create increasing number of subscriptions
|
|
for i in {10,25,50,100}; do
|
|
echo -n "Testing with $i concurrent subscriptions... "
|
|
|
|
# Create subscriptions
|
|
for j in $(seq 1 "$i"); do
|
|
timeout 2 bash -c "
|
|
echo '[\"REQ\",\"mem_test_'${j}'\",{}]' | websocat -B 1048576 ws://$RELAY_HOST:$RELAY_PORT >/dev/null 2>&1
|
|
" 2>/dev/null &
|
|
done
|
|
|
|
sleep 2
|
|
|
|
local current_memory
|
|
current_memory=$(ps aux | grep c_relay | grep -v grep | awk '{print $6}' | head -1)
|
|
local memory_increase=$((current_memory - initial_memory))
|
|
|
|
echo "${current_memory}KB (+${memory_increase}KB)"
|
|
|
|
# Clean up subscriptions
|
|
for j in $(seq 1 "$i"); do
|
|
timeout 2 bash -c "
|
|
echo '[\"CLOSE\",\"mem_test_'${j}'\"]' | websocat -B 1048576 ws://$RELAY_HOST:$RELAY_PORT >/dev/null 2>&1
|
|
" 2>/dev/null &
|
|
done
|
|
|
|
sleep 1
|
|
done
|
|
|
|
local final_memory
|
|
final_memory=$(ps aux | grep c_relay | grep -v grep | awk '{print $6}' | head -1)
|
|
echo "Final memory usage: ${final_memory}KB"
|
|
}
|
|
|
|
echo "=========================================="
|
|
echo "C-Relay Performance Benchmarking Suite"
|
|
echo "=========================================="
|
|
echo "Benchmarking relay at ws://$RELAY_HOST:$RELAY_PORT"
|
|
echo ""
|
|
|
|
# Test basic connectivity
|
|
echo "=== Connectivity Test ==="
|
|
benchmark_request '["REQ","bench_test",{}]'
|
|
if [[ $SUCCESSFUL_REQUESTS -eq 0 ]]; then
|
|
echo -e "${RED}Cannot connect to relay. Aborting benchmarks.${NC}"
|
|
exit 1
|
|
fi
|
|
echo -e "${GREEN}✓ Relay is accessible${NC}"
|
|
echo ""
|
|
|
|
# Run throughput benchmarks
|
|
run_throughput_benchmark "Simple REQ Throughput" '["REQ","throughput_'$(date +%s%N)'",{}]' 10 15
|
|
echo ""
|
|
|
|
run_throughput_benchmark "Complex Filter Throughput" '["REQ","complex_'$(date +%s%N)'",{"kinds":[1,2,3],"#e":["test"],"limit":10}]' 10 15
|
|
echo ""
|
|
|
|
run_throughput_benchmark "COUNT Message Throughput" '["COUNT","count_'$(date +%s%N)'",{}]' 10 15
|
|
echo ""
|
|
|
|
run_throughput_benchmark "High Load Throughput" '["REQ","high_load_'$(date +%s%N)'",{}]' 25 20
|
|
echo ""
|
|
|
|
# Memory usage benchmark
|
|
benchmark_memory_usage
|
|
echo ""
|
|
|
|
echo "=========================================="
|
|
echo "Benchmarking Complete"
|
|
echo "=========================================="
|
|
echo "Performance benchmarks completed. Review results above for optimization opportunities." |