diff --git a/.gitignore b/.gitignore index ae5f179c..0df17fbe 100644 --- a/.gitignore +++ b/.gitignore @@ -38,4 +38,6 @@ dist .nostr # Docker Compose overrides -docker-compose.overrides.yml \ No newline at end of file +docker-compose.overrides.yml +# Export output +*.jsonl diff --git a/README.md b/README.md index 4effe45e..f800853e 100644 --- a/README.md +++ b/README.md @@ -543,6 +543,17 @@ To see the integration test coverage report open `.coverage/integration/lcov-rep open .coverage/integration/lcov-report/index.html ``` +## Export Events + +Export all stored events to a [JSON Lines](https://jsonlines.org/) (`.jsonl`) file. Each line is a valid NIP-01 Nostr event JSON object. The export streams rows from the database using cursors, so it works safely on relays with millions of events without loading them into memory. + +``` +npm run export # writes to events.jsonl +npm run export -- backup-2024-01-01.jsonl # custom filename +``` + +The script reads the same `DB_*` environment variables used by the relay (see [CONFIGURATION.md](CONFIGURATION.md)). + ## Configuration You can change the default folder by setting the `NOSTR_CONFIG_DIR` environment variable to a different path. diff --git a/package.json b/package.json index 59cf9877..2c0871c0 100644 --- a/package.json +++ b/package.json @@ -42,6 +42,7 @@ "pretest:integration": "mkdir -p .test-reports/integration", "test:integration": "cucumber-js", "cover:integration": "nyc --report-dir .coverage/integration npm run test:integration -- -p cover", + "export": "node -r ts-node/register src/scripts/export-events.ts", "docker:compose:start": "./scripts/start", "docker:compose:stop": "./scripts/stop", "docker:compose:clean": "./scripts/clean", diff --git a/src/scripts/export-events.ts b/src/scripts/export-events.ts new file mode 100644 index 00000000..d2c08300 --- /dev/null +++ b/src/scripts/export-events.ts @@ -0,0 +1,99 @@ +import 'pg-query-stream' +import dotenv from 'dotenv' +dotenv.config() + +import fs from 'fs' +import knex from 'knex' +import path from 'path' +import { pipeline } from 'stream/promises' +import { Transform } from 'stream' + +const getDbConfig = () => ({ + client: 'pg', + connection: process.env.DB_URI || { + host: process.env.DB_HOST ?? 'localhost', + port: Number(process.env.DB_PORT ?? 5432), + user: process.env.DB_USER ?? 'postgres', + password: process.env.DB_PASSWORD ?? 'postgres', + database: process.env.DB_NAME ?? 'nostream', + }, +}) + +async function exportEvents(): Promise { + const filename = process.argv[2] || 'events.jsonl' + const outputPath = path.resolve(filename) + const db = knex(getDbConfig()) + + try { + const [{ count }] = await db('events') + .whereNull('deleted_at') + .count('* as count') + const total = Number(count) + + if (total === 0) { + console.log('No events to export.') + return + } + + console.log(`Exporting ${total} events to ${outputPath}`) + + const output = fs.createWriteStream(outputPath) + let exported = 0 + + const trx = await db.transaction(null, { isolationLevel: 'repeatable read' }) + try { + await trx.raw('SET TRANSACTION READ ONLY') + + const dbStream = trx('events') + .select( + 'event_id', + 'event_pubkey', + 'event_kind', + 'event_created_at', + 'event_content', + 'event_tags', + 'event_signature', + ) + .whereNull('deleted_at') + .orderBy('event_created_at', 'asc') + .stream() + + const toJsonLine = new Transform({ + objectMode: true, + transform(row: any, _encoding, callback) { + const event = { + id: row.event_id.toString('hex'), + pubkey: row.event_pubkey.toString('hex'), + created_at: row.event_created_at, + kind: row.event_kind, + tags: row.event_tags || [], + content: row.event_content, + sig: row.event_signature.toString('hex'), + } + + exported++ + if (exported % 10000 === 0) { + console.log(`Exported ${exported}/${total} events...`) + } + + callback(null, JSON.stringify(event) + '\n') + }, + }) + + await pipeline(dbStream, toJsonLine, output) + await trx.commit() + } catch (err) { + await trx.rollback() + throw err + } + + console.log(`Export complete: ${exported} events written to ${outputPath}`) + } finally { + await db.destroy() + } +} + +exportEvents().catch((error) => { + console.error('Export failed:', error.message) + process.exit(1) +})