14 changed files with 58 additions and 409 deletions
@ -1,92 +0,0 @@ |
|||||||
#!/usr/bin/env bash |
|
||||||
set -euo pipefail |
|
||||||
|
|
||||||
echo "[$(date)] Starting relay ingest..." |
|
||||||
|
|
||||||
# Config via env or defaults |
|
||||||
UPSTREAMS=${UPSTREAMS:-"wss://relay.snort.social wss://relay.damus.io wss://relay.nostr.band"} |
|
||||||
DAYS_ARTICLES=${DAYS_ARTICLES:-7} |
|
||||||
DAYS_THREADS=${DAYS_THREADS:-3} |
|
||||||
|
|
||||||
# These two should be programmatically generated from app DB; allow overrides: |
|
||||||
ARTICLE_E_LIST=${ARTICLE_E_LIST:-'[]'} # e.g. ["<eventid1>","<eventid2>"] |
|
||||||
ARTICLE_A_LIST=${ARTICLE_A_LIST:-'[]'} # e.g. ["30023:<authorhex>:<d>",...] |
|
||||||
|
|
||||||
# Helper functions for date calculation |
|
||||||
now_ts() { date +%s; } |
|
||||||
since_days() { |
|
||||||
local days=$1 |
|
||||||
if command -v date >/dev/null 2>&1; then |
|
||||||
# Try GNU date |
|
||||||
if date -d "-${days} days" +%s 2>/dev/null; then |
|
||||||
return 0 |
|
||||||
# Try BSD date (macOS) |
|
||||||
elif date -v-${days}d +%s 2>/dev/null; then |
|
||||||
return 0 |
|
||||||
fi |
|
||||||
fi |
|
||||||
# Fallback: rough calculation |
|
||||||
echo $(( $(date +%s) - (days * 86400) )) |
|
||||||
} |
|
||||||
|
|
||||||
# Build filters using jq if available, otherwise use basic JSON |
|
||||||
if command -v jq >/dev/null 2>&1; then |
|
||||||
FILTER_ARTICLES=$(jq -nc --argjson kinds '[30023]' --arg since "$(since_days $DAYS_ARTICLES)" ' |
|
||||||
{kinds:$kinds, since: ($since|tonumber)}') |
|
||||||
|
|
||||||
FILTER_REPLIES_E=$(jq -nc --argjson kinds '[1]' --argjson es "$ARTICLE_E_LIST" --arg since "$(since_days $DAYS_THREADS)" ' |
|
||||||
{kinds:$kinds, "#e":$es, since: ($since|tonumber)}') |
|
||||||
|
|
||||||
FILTER_REPLIES_A=$(jq -nc --argjson kinds '[1]' --argjson as "$ARTICLE_A_LIST" --arg since "$(since_days $DAYS_THREADS)" ' |
|
||||||
{kinds:$kinds, "#a":$as, since: ($since|tonumber)}') |
|
||||||
|
|
||||||
FILTER_REACTS=$(jq -nc --argjson kinds '[7]' --argjson es "$ARTICLE_E_LIST" '{kinds:$kinds, "#e":$es}') |
|
||||||
FILTER_ZAPS=$(jq -nc --argjson kinds '[9735]' --argjson es "$ARTICLE_E_LIST" '{kinds:$kinds, "#e":$es}') |
|
||||||
FILTER_HL=$(jq -nc --argjson kinds '[9802]' --argjson as "$ARTICLE_A_LIST" '{kinds:$kinds, "#a":$as}') |
|
||||||
FILTER_PROFILES=$(jq -nc --argjson kinds '[0]' '{kinds:$kinds}') |
|
||||||
FILTER_DELETES=$(jq -nc --argjson kinds '[5]' --arg since "$(since_days 30)" '{kinds:$kinds, since:($since|tonumber)}') |
|
||||||
else |
|
||||||
# Fallback to basic JSON strings |
|
||||||
SINCE_ARTICLES=$(since_days $DAYS_ARTICLES) |
|
||||||
SINCE_THREADS=$(since_days $DAYS_THREADS) |
|
||||||
SINCE_DELETES=$(since_days 30) |
|
||||||
|
|
||||||
FILTER_ARTICLES="{\"kinds\":[30023],\"since\":${SINCE_ARTICLES}}" |
|
||||||
FILTER_REPLIES_E="{\"kinds\":[1],\"#e\":${ARTICLE_E_LIST},\"since\":${SINCE_THREADS}}" |
|
||||||
FILTER_REPLIES_A="{\"kinds\":[1],\"#a\":${ARTICLE_A_LIST},\"since\":${SINCE_THREADS}}" |
|
||||||
FILTER_REACTS="{\"kinds\":[7],\"#e\":${ARTICLE_E_LIST}}" |
|
||||||
FILTER_ZAPS="{\"kinds\":[9735],\"#e\":${ARTICLE_E_LIST}}" |
|
||||||
FILTER_HL="{\"kinds\":[9802],\"#a\":${ARTICLE_A_LIST}}" |
|
||||||
FILTER_PROFILES="{\"kinds\":[0]}" |
|
||||||
FILTER_DELETES="{\"kinds\":[5],\"since\":${SINCE_DELETES}}" |
|
||||||
fi |
|
||||||
|
|
||||||
run_sync() { |
|
||||||
local upstream=$1 |
|
||||||
local filter=$2 |
|
||||||
local label=$3 |
|
||||||
echo "[$(date)] Syncing ${label} from ${upstream}..." |
|
||||||
|
|
||||||
# Write filter to temp file to avoid shell escaping nightmares |
|
||||||
local tmpfile="/tmp/strfry-filter-$$.json" |
|
||||||
echo "$filter" | docker compose exec strfry sh -c "cat > $tmpfile" |
|
||||||
|
|
||||||
# Run sync with filter file |
|
||||||
docker compose exec strfry sh -c "./strfry sync '$upstream' --filter=\$(cat $tmpfile) && rm $tmpfile" || echo "[$(date)] WARNING: sync failed for ${label} from ${upstream}" |
|
||||||
} |
|
||||||
|
|
||||||
# Sync from all upstream relays |
|
||||||
for R in $UPSTREAMS; do |
|
||||||
echo "[$(date)] Processing relay: ${R}" |
|
||||||
run_sync "$R" "$FILTER_ARTICLES" "articles (30023)" |
|
||||||
run_sync "$R" "$FILTER_REPLIES_E" "replies by event-id" |
|
||||||
run_sync "$R" "$FILTER_REPLIES_A" "replies by a-tag" |
|
||||||
run_sync "$R" "$FILTER_REACTS" "reactions (7)" |
|
||||||
run_sync "$R" "$FILTER_ZAPS" "zap receipts (9735)" |
|
||||||
run_sync "$R" "$FILTER_HL" "highlights (9802)" |
|
||||||
run_sync "$R" "$FILTER_PROFILES" "profiles (0)" |
|
||||||
run_sync "$R" "$FILTER_DELETES" "deletes (5)" |
|
||||||
done |
|
||||||
|
|
||||||
echo "[$(date)] Relay ingest complete." |
|
||||||
|
|
||||||
@ -1,13 +0,0 @@ |
|||||||
#!/usr/bin/env bash |
|
||||||
set -euo pipefail |
|
||||||
|
|
||||||
echo "[$(date)] Starting relay prime (one-time backfill)..." |
|
||||||
|
|
||||||
# Larger time windows for initial backfill |
|
||||||
export DAYS_ARTICLES=${DAYS_ARTICLES:-90} |
|
||||||
export DAYS_THREADS=${DAYS_THREADS:-30} |
|
||||||
|
|
||||||
# Use the same ingest logic but with extended time windows |
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" |
|
||||||
exec "${SCRIPT_DIR}/ingest.sh" |
|
||||||
|
|
||||||
@ -1,136 +0,0 @@ |
|||||||
#!/usr/bin/env php |
|
||||||
<?php |
|
||||||
/** |
|
||||||
* Smoke test for the local relay |
|
||||||
* Tests that the relay is up and can serve basic queries |
|
||||||
*/ |
|
||||||
|
|
||||||
declare(strict_types=1); |
|
||||||
|
|
||||||
// Bootstrap Symfony autoloader if available, or try to use vendor autoload directly |
|
||||||
$possibleAutoloaders = [ |
|
||||||
__DIR__ . '/../../vendor/autoload.php', |
|
||||||
__DIR__ . '/../../../vendor/autoload.php', |
|
||||||
]; |
|
||||||
|
|
||||||
$autoloaderFound = false; |
|
||||||
foreach ($possibleAutoloaders as $autoloader) { |
|
||||||
if (file_exists($autoloader)) { |
|
||||||
require_once $autoloader; |
|
||||||
$autoloaderFound = true; |
|
||||||
break; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
if (!$autoloaderFound) { |
|
||||||
fwrite(STDERR, "ERROR: Could not find autoloader. Run 'composer install' first.\n"); |
|
||||||
exit(1); |
|
||||||
} |
|
||||||
|
|
||||||
use swentel\nostr\Relay\Relay; |
|
||||||
use swentel\nostr\Message\RequestMessage; |
|
||||||
use swentel\nostr\Filter; |
|
||||||
use WebSocket\Message\Text; |
|
||||||
use WebSocket\Exception\TimeoutException; |
|
||||||
|
|
||||||
// Get relay URL from environment or use default |
|
||||||
$relayUrl = getenv('NOSTR_DEFAULT_RELAY') ?: 'ws://localhost:7777'; |
|
||||||
|
|
||||||
echo "Testing relay: {$relayUrl}\n"; |
|
||||||
echo str_repeat('-', 60) . "\n"; |
|
||||||
|
|
||||||
try { |
|
||||||
// Test 1: Basic connection |
|
||||||
echo "Test 1: Connecting to relay...\n"; |
|
||||||
$relay = new Relay($relayUrl); |
|
||||||
$relay->connect(); |
|
||||||
echo "✓ Connected successfully\n\n"; |
|
||||||
|
|
||||||
// Test 2: Query for long-form articles (kind 30023) |
|
||||||
echo "Test 2: Querying for kind:30023 events (limit 1)...\n"; |
|
||||||
|
|
||||||
$filter = new Filter(); |
|
||||||
$filter->setKinds([30023]); |
|
||||||
$filter->setLimit(1); |
|
||||||
|
|
||||||
$subscriptionId = 'test-' . bin2hex(random_bytes(8)); |
|
||||||
$requestMessage = new RequestMessage($subscriptionId, [$filter]); |
|
||||||
|
|
||||||
$client = $relay->getClient(); |
|
||||||
$client->setTimeout(10); |
|
||||||
$client->text($requestMessage->generate()); |
|
||||||
|
|
||||||
$foundEvent = false; |
|
||||||
$eventCount = 0; |
|
||||||
$startTime = time(); |
|
||||||
$timeout = 10; |
|
||||||
|
|
||||||
while ((time() - $startTime) < $timeout) { |
|
||||||
try { |
|
||||||
$response = $client->receive(); |
|
||||||
|
|
||||||
if (!$response instanceof Text) { |
|
||||||
continue; |
|
||||||
} |
|
||||||
|
|
||||||
$content = $response->getContent(); |
|
||||||
$decoded = json_decode($content, true); |
|
||||||
|
|
||||||
if (!is_array($decoded) || count($decoded) < 2) { |
|
||||||
continue; |
|
||||||
} |
|
||||||
|
|
||||||
$messageType = $decoded[0] ?? ''; |
|
||||||
|
|
||||||
if ($messageType === 'EVENT') { |
|
||||||
$eventCount++; |
|
||||||
$event = $decoded[2] ?? []; |
|
||||||
$eventId = $event['id'] ?? 'unknown'; |
|
||||||
$eventKind = $event['kind'] ?? 'unknown'; |
|
||||||
|
|
||||||
echo "✓ Received EVENT: id={$eventId}, kind={$eventKind}\n"; |
|
||||||
$foundEvent = true; |
|
||||||
|
|
||||||
// Send CLOSE |
|
||||||
$client->text(json_encode(['CLOSE', $subscriptionId])); |
|
||||||
break; |
|
||||||
} elseif ($messageType === 'EOSE') { |
|
||||||
echo " Received EOSE (End of Stored Events)\n"; |
|
||||||
// Send CLOSE |
|
||||||
$client->text(json_encode(['CLOSE', $subscriptionId])); |
|
||||||
break; |
|
||||||
} elseif ($messageType === 'NOTICE' || $messageType === 'CLOSED') { |
|
||||||
echo " Received {$messageType}: " . ($decoded[1] ?? '') . "\n"; |
|
||||||
break; |
|
||||||
} |
|
||||||
} catch (TimeoutException $e) { |
|
||||||
echo " Timeout waiting for response\n"; |
|
||||||
break; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
if (!$foundEvent && $eventCount === 0) { |
|
||||||
echo "⚠ No events found (relay might be empty - try running 'make relay-prime' first)\n\n"; |
|
||||||
} else { |
|
||||||
echo "\n"; |
|
||||||
} |
|
||||||
|
|
||||||
// Test 3: Verify write rejection |
|
||||||
echo "Test 3: Testing write policy (should reject)...\n"; |
|
||||||
// We'll just document this - actual test would require creating a signed event |
|
||||||
echo "⚠ Write rejection test not implemented (requires event signing)\n"; |
|
||||||
echo " Manual test: Try publishing an event - should receive rejection message\n\n"; |
|
||||||
|
|
||||||
$relay->disconnect(); |
|
||||||
|
|
||||||
echo str_repeat('-', 60) . "\n"; |
|
||||||
echo "✓ Smoke test completed successfully\n"; |
|
||||||
|
|
||||||
exit(0); |
|
||||||
|
|
||||||
} catch (\Exception $e) { |
|
||||||
echo "\n✗ ERROR: " . $e->getMessage() . "\n"; |
|
||||||
echo "Stack trace:\n" . $e->getTraceAsString() . "\n"; |
|
||||||
exit(1); |
|
||||||
} |
|
||||||
|
|
||||||
@ -1,3 +1,3 @@ |
|||||||
0 */6 * * * /index_articles.sh >> /var/log/cron.log 2>&1 |
0 */6 * * * /index_articles.sh >> /var/log/cron.log 2>&1 |
||||||
0 */2 * * * /media_discovery.sh >> /var/log/cron.log 2>&1 |
2 */2 * * * /media_discovery.sh >> /var/log/cron.log 2>&1 |
||||||
0 */2 * * * /article_discovery.sh >> /var/log/cron.log 2>&1 |
0 */2 * * * /article_discovery.sh >> /var/log/cron.log 2>&1 |
||||||
|
|||||||
@ -0,0 +1,35 @@ |
|||||||
|
# Same DB path the relay uses |
||||||
|
db = "/var/lib/strfry" |
||||||
|
|
||||||
|
streams { |
||||||
|
# One named stream group that pulls down exactly the kinds you care about |
||||||
|
ingest { |
||||||
|
dir = "down" |
||||||
|
|
||||||
|
# Pull long-form + replies + reactions + zaps + highlights + hygiene |
||||||
|
# 30023 = NIP-23 article |
||||||
|
# 30024 = NIP-23 draft |
||||||
|
# 1 = replies/comments |
||||||
|
# 1111 = comments |
||||||
|
# 7 = reactions |
||||||
|
# 9735 = zap receipts |
||||||
|
# 9802 = highlights |
||||||
|
# 0 = profiles |
||||||
|
# 5 = deletes |
||||||
|
filter = {"kinds":[30023,30024,1111,9735,9802,0,5]} |
||||||
|
|
||||||
|
urls = [ |
||||||
|
"wss://nos.lol" |
||||||
|
"wss://relay.damus.io" |
||||||
|
"wss://theforest.nostr1.com" |
||||||
|
] |
||||||
|
} |
||||||
|
|
||||||
|
# If you later want a second policy (e.g., only profiles), add another block: |
||||||
|
# { "kinds": [30023, 1111, 9802] } |
||||||
|
# profiles_only { |
||||||
|
# dir = "down" |
||||||
|
# filter = { "kinds": [0] } |
||||||
|
# urls = [ "wss://nos.lol" ] |
||||||
|
# } |
||||||
|
} |
||||||
@ -1,19 +0,0 @@ |
|||||||
FROM alpine:3.19 |
|
||||||
|
|
||||||
# Install supercronic, bash, jq, docker-cli for the ingest script |
|
||||||
RUN apk add --no-cache \ |
|
||||||
bash \ |
|
||||||
curl \ |
|
||||||
jq \ |
|
||||||
docker-cli \ |
|
||||||
&& curl -fsSLO https://github.com/aptible/supercronic/releases/download/v0.2.29/supercronic-linux-amd64 \ |
|
||||||
&& chmod +x supercronic-linux-amd64 \ |
|
||||||
&& mv supercronic-linux-amd64 /usr/local/bin/supercronic |
|
||||||
|
|
||||||
# Set working directory |
|
||||||
WORKDIR /app |
|
||||||
|
|
||||||
|
|
||||||
# Default command (will be overridden by compose) |
|
||||||
CMD ["/usr/local/bin/supercronic", "/etc/cron/crontab"] |
|
||||||
|
|
||||||
@ -1,6 +0,0 @@ |
|||||||
# Relay ingest crontab - syncs events from upstream relays every 10 minutes |
|
||||||
# Format: minute hour day month weekday command |
|
||||||
|
|
||||||
# Run ingest every 10 minutes |
|
||||||
*/10 * * * * /app/bin/relay/ingest.sh >> /var/log/relay-ingest.log 2>&1 |
|
||||||
|
|
||||||
@ -1,37 +0,0 @@ |
|||||||
FROM debian:bookworm-slim |
|
||||||
|
|
||||||
# Install build dependencies |
|
||||||
RUN apt-get update && apt-get install -y \ |
|
||||||
git \ |
|
||||||
build-essential \ |
|
||||||
libtool \ |
|
||||||
autotools-dev \ |
|
||||||
automake \ |
|
||||||
pkg-config \ |
|
||||||
liblmdb-dev \ |
|
||||||
libsecp256k1-dev \ |
|
||||||
libzstd-dev \ |
|
||||||
libssl-dev \ |
|
||||||
wget \ |
|
||||||
&& rm -rf /var/lib/apt/lists/* |
|
||||||
|
|
||||||
# Build strfry from source |
|
||||||
WORKDIR /tmp |
|
||||||
RUN git clone --depth 1 --branch v1.0.6 https://github.com/hoytech/strfry.git && \ |
|
||||||
cd strfry && \ |
|
||||||
git submodule update --init && \ |
|
||||||
make setup-golpe && \ |
|
||||||
make -j$(nproc) && \ |
|
||||||
make install && \ |
|
||||||
cd / && \ |
|
||||||
rm -rf /tmp/strfry |
|
||||||
|
|
||||||
# Create data directory |
|
||||||
RUN mkdir -p /var/strfry/db |
|
||||||
|
|
||||||
# Expose relay port |
|
||||||
EXPOSE 7777 |
|
||||||
|
|
||||||
# Default command (can be overridden) |
|
||||||
CMD ["strfry", "relay"] |
|
||||||
|
|
||||||
Loading…
Reference in new issue