CREATE TABLE IF NOT EXISTS outbox_events (
id BIGSERIAL PRIMARY KEY,
event_name TEXT NOT NULL,
dedupe_key TEXT NOT NULL UNIQUE,
payload JSONB NOT NULL,
status TEXT NOT NULL DEFAULT 'pending',
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
published_at TIMESTAMPTZ
);
import { withClient } from '../db/pool';
export async function createOrderWithOutbox(input: { customerId: string; totalCents: number }) {
return withClient(async (client) => {
await client.query('BEGIN');
try {
const order = await client.query(
'INSERT INTO orders(customer_id, total_cents) VALUES($1, $2) RETURNING id',
[input.customerId, input.totalCents]
);
const orderId = order.rows[0].id;
const dedupeKey = `order:${orderId}:placed`;
await client.query(
'INSERT INTO outbox_events(event_name, dedupe_key, payload) VALUES($1, $2, $3)',
['order.placed', dedupeKey, { orderId, ...input }]
);
await client.query('COMMIT');
return { id: orderId };
} catch (e) {
await client.query('ROLLBACK');
throw e;
}
});
}
The moment you split ‘write to DB’ and ‘publish to a queue’ into two independent operations, you create a place to lose data. Publish first and a DB failure means consumers act on something that never happened. Write first and a publish failure means the system becomes inconsistent. My default is the outbox pattern: insert an outbox row in the same BEGIN/COMMIT transaction as the business write, then publish asynchronously with retries + a deterministic dedupe_key. It’s boring, but it’s correct. I also keep outbox payloads minimal integration events (not an internal state dump) to reduce coupling and make replays safe.