All files / src/logger/emitter batch-sink.ts

93.96% Statements 109/116
95% Branches 38/40
92.3% Functions 12/13
93.96% Lines 109/116

Press n or j to go to the next uncovered block, b, p or k for the previous block.

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 2371x     1x   1x                                                                                                                       1x 65x 65x 65x   65x 4x 4x 4x 4x 4x   61x 61x   61x 65x 3x 3x 6x 1x 1x 2x 2x 1x 6x 5x 5x 6x 3x 3x 1x 2x 2x 1x 1x 1x 3x 1x 1x 1x 1x 1x 1x 3x 3x 3x   58x 76x 7x 7x   76x       69x 69x   76x 263x 263x 76x     58x   58x     65x 58x         58x 58x 58x 58x     58x 58x 271x 2x 2x 2x   269x     271x 7x 271x   262x 262x 271x   58x 8x 8x   8x 8x   58x 34x       34x 34x 34x     34x 34x 34x 34x 34x     34x 34x 58x   58x 58x                               1x 3x 3x 3x 3x                               1x 1x 1x 1x 1x  
import { debounce } from '../../utils/async/debounce.ts';
import type { AsyncFinalizer } from '../implementation/finalizer.ts';
import type { LogEntry } from '../log-entry.ts';
import { asLogEntry } from '../log-entry.ts';
import type { LogSink } from './sink.ts';
import { asLogSink } from './sink.ts';
 
/**
 * Options for configuring the batch sink wrapper.
 */
export type BatchSinkOptions = {
  /**
   * Maximum number of log entries to buffer before flushing.
   *
   * @default 100
   */
  readonly maxBufferSize?: number;
 
  /**
   * Maximum time in milliseconds to wait before flushing the buffer.
   *
   * @default 1000 (1s)
   */
  readonly flushDelayMs?: number;
 
  /**
   * By default the batch sink flushes the buffer if the `process` object is available. Use true to change this
   * behavior.
   *
   * @default false
   */
  readonly skipFlushOnExit?: boolean;
};
 
/**
 * Creates a batching wrapper around any log sink.
 *
 * The batch sink accumulates log entries in memory and forwards them to the wrapped sink in batches, improving
 * performance for high-volume logging scenarios.
 *
 * @example Basic usage with file sink
 *
 * ```ts
 * import { emitter } from 'emitnlog/logger';
 * import { fileSink } from 'emitnlog/logger/node';
 *
 * const batchedFile = emitter.batchSink(fileSink('/logs/app.log'), { maxBufferSize: 100, flushDelayMs: 1000 });
 * const logger = emitter.createLogger('info', batchedFile);
 * ```
 *
 * @example Memory sink with batching
 *
 * ```ts
 * import { emitter } from 'emitnlog/logger';
 *
 * const memory = emitter.memorySink();
 * const batched = emitter.batchSink(memory, { maxBufferSize: 50, flushDelayMs: 2000 });
 * ```
 *
 * ```
 * @param logSink The log sink to wrap with batching
 * @param options Configuration options for batching behavior
 * @returns A log sink that batches log entries
 * ```
 */
export const batchSink = (logSink: LogSink, options?: BatchSinkOptions): AsyncFinalizer<LogSink> => {
  const maxBufferSize = options?.maxBufferSize ?? 100;
  const flushDelayMs = options?.flushDelayMs ?? 1000;
  const skipFlushOnExit = options?.skipFlushOnExit ?? false;
 
  if (flushDelayMs === 0) {
    return asLogSink((level, message, args) => logSink.sink(level, message, args), {
      flush: async () => logSink.flush?.(),
      close: async () => logSink.close?.(),
    });
  }
 
  let buffer: LogEntry[] = [];
  let isClosing = false;
 
  const useTimeBasedFlushing = flushDelayMs < Number.MAX_SAFE_INTEGER;
  if (!useTimeBasedFlushing) {
    return asLogSink(
      (level, message, args) => {
        if (buffer.length >= maxBufferSize - 1) {
          logSink.sink(level, message, args);
          for (const entry of buffer) {
            logSink.sink(entry.level, entry.message, entry.args);
          }
          buffer = [];
        } else {
          buffer.push(asLogEntry(level, message, args));
        }
      },
      {
        flush: async () => {
          for (const entry of buffer) {
            logSink.sink(entry.level, entry.message, entry.args);
          }
          buffer = [];
          await logSink.flush?.();
        },
        close: async () => {
          for (const entry of buffer) {
            logSink.sink(entry.level, entry.message, entry.args);
          }
          buffer = [];
          await logSink.close?.();
        },
      },
    );
  }
 
  const flushBuffer = (force = false): void => {
    if (!buffer.length) {
      return;
    }
 
    if (isClosing && !force) {
      return;
    }
 
    const entries = buffer;
    buffer = [];
 
    for (const entry of entries) {
      logSink.sink(entry.level, entry.message, entry.args);
    }
  };
 
  // Use debounce for time-based flushing with accumulator
  const debouncedFlush = debounce(() => flushBuffer(), { delay: flushDelayMs });
 
  let exitHandler: (() => void) | undefined;
 
  /* eslint-disable no-undef */
  if (!skipFlushOnExit && typeof process !== 'undefined' && typeof process.on === 'function') {
    exitHandler = (): void => {
      isClosing = true;
      flushBuffer(true);
    };
 
    process.on('exit', exitHandler);
    process.on('SIGINT', exitHandler);
    process.on('SIGTERM', exitHandler);
  }
  /* eslint-enable no-undef */
 
  const batchedSink = {
    sink: (level, message, args): void => {
      if (isClosing) {
        logSink.sink(level, message, args);
        return;
      }
 
      buffer.push(asLogEntry(level, message, args));
 
      // Flush immediately if buffer is full
      if (buffer.length >= maxBufferSize) {
        flushBuffer();
      } else {
        // Schedule a flush after the delay using debounce
        void debouncedFlush();
      }
    },
 
    async flush(): Promise<void> {
      debouncedFlush.cancel(true);
      flushBuffer();
 
      await logSink.flush?.();
    },
 
    async close(): Promise<void> {
      if (isClosing) {
        return;
      }
 
      isClosing = true;
      debouncedFlush.cancel(true);
      flushBuffer(true);
 
      /* eslint-disable no-undef */
      if (exitHandler && typeof process !== 'undefined' && typeof process.removeListener === 'function') {
        process.removeListener('exit', exitHandler);
        process.removeListener('SIGINT', exitHandler);
        process.removeListener('SIGTERM', exitHandler);
      }
      /* eslint-enable no-undef */
 
      await logSink.close?.();
    },
  } as const satisfies LogSink;
 
  return batchedSink;
};
 
/**
 * Creates a batch sink with size-based flushing only (no time delay).
 *
 * @example
 *
 * ```ts
 * import { emitter } from 'emitnlog/logger';
 *
 * const batched = emitter.batchSizeSink(
 *   emitter.memorySink(),
 *   100, // Flush every 100 entries
 * );
 * ```
 */
export const batchSizeSink = (logSink: LogSink, maxBufferSize: number): AsyncFinalizer<LogSink> =>
  batchSink(logSink, {
    maxBufferSize,
    flushDelayMs: Number.MAX_SAFE_INTEGER, // Effectively disable time-based flushing
  });
 
/**
 * Creates a batch sink with time-based flushing only (no size limit).
 *
 * @example
 *
 * ```ts
 * import { emitter } from 'emitnlog/logger';
 *
 * const batched = emitter.batchTimeSink(
 *   emitter.memorySink(),
 *   5000, // Flush every 5 seconds
 * );
 * ```
 */
export const batchTimeSink = (logSink: LogSink, flushDelayMs: number): AsyncFinalizer<LogSink> =>
  batchSink(logSink, {
    maxBufferSize: Number.MAX_SAFE_INTEGER, // Effectively disable size-based flushing
    flushDelayMs,
  });