diff --git a/changelogs/fragments/6733.yml b/changelogs/fragments/6733.yml new file mode 100644 index 000000000000..2021bce6c9dc --- /dev/null +++ b/changelogs/fragments/6733.yml @@ -0,0 +1,2 @@ +fix: +- [OSD Availability] Prevent OSD process crashes when disk is full ([#6733](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6733)) \ No newline at end of file diff --git a/config/opensearch_dashboards.yml b/config/opensearch_dashboards.yml index f94234cffe1b..0844db34c36c 100644 --- a/config/opensearch_dashboards.yml +++ b/config/opensearch_dashboards.yml @@ -135,6 +135,11 @@ # Enables you to specify a file where OpenSearch Dashboards stores log output. #logging.dest: stdout +# This configuration option controls the handling of error messages in the logging stream. It is disabled by default. +# When set to true, the 'ENOSPC' error message will not cause the OpenSearch Dashboards process to crash. Otherwise, +# the original behavior will be maintained. +#logging.ignoreEnospcError: false + # Set the value of this setting to true to suppress all logging output. #logging.silent: false diff --git a/src/dev/build/tasks/os_packages/docker_generator/resources/bin/opensearch-dashboards-docker b/src/dev/build/tasks/os_packages/docker_generator/resources/bin/opensearch-dashboards-docker index c9cf5d1213c0..3f17d5e7cc60 100755 --- a/src/dev/build/tasks/os_packages/docker_generator/resources/bin/opensearch-dashboards-docker +++ b/src/dev/build/tasks/os_packages/docker_generator/resources/bin/opensearch-dashboards-docker @@ -58,6 +58,7 @@ opensearch_dashboards_vars=( opensearchDashboards.defaultAppId opensearchDashboards.index logging.dest + logging.ignoreEnospcError logging.json logging.quiet logging.rotate.enabled diff --git a/src/legacy/server/config/schema.js b/src/legacy/server/config/schema.js index f109c6058662..4c8d5c2bce6c 100644 --- a/src/legacy/server/config/schema.js +++ b/src/legacy/server/config/schema.js @@ -109,6 +109,7 @@ export default () => }), events: Joi.any().default({}), dest: Joi.string().default('stdout'), + ignoreEnospcError: Joi.boolean().default(false), filter: Joi.any().default({}), json: Joi.boolean().when('dest', { is: 'stdout', diff --git a/src/legacy/server/logging/configuration.js b/src/legacy/server/logging/configuration.js index 93103b3e5067..e942af2b9352 100644 --- a/src/legacy/server/logging/configuration.js +++ b/src/legacy/server/logging/configuration.js @@ -64,6 +64,7 @@ export default function loggingConfiguration(config) { json: config.get('logging.json'), dest: config.get('logging.dest'), timezone: config.get('logging.timezone'), + ignoreEnospcError: config.get('logging.ignoreEnospcError'), // I'm adding the default here because if you add another filter // using the commandline it will remove authorization. I want users diff --git a/src/legacy/server/logging/log_reporter.js b/src/legacy/server/logging/log_reporter.js index 228c83129802..b8e39304a7b6 100644 --- a/src/legacy/server/logging/log_reporter.js +++ b/src/legacy/server/logging/log_reporter.js @@ -30,6 +30,7 @@ import { Squeeze } from '@hapi/good-squeeze'; import { createWriteStream as writeStr } from 'fs'; +import { pipeline } from 'stream'; import LogFormatJson from './log_format_json'; import LogFormatString from './log_format_string'; @@ -51,18 +52,33 @@ export function getLoggerStream({ events, config }) { let dest; if (config.dest === 'stdout') { dest = process.stdout; + logInterceptor.pipe(squeeze).pipe(format).pipe(dest); } else { dest = writeStr(config.dest, { flags: 'a', encoding: 'utf8', }); - logInterceptor.on('end', () => { - dest.end(); - }); + if (config.ignoreEnospcError) { + pipeline(logInterceptor, squeeze, format, dest, onFinished); + } else { + logInterceptor.on('end', () => { + dest.end(); + }); + logInterceptor.pipe(squeeze).pipe(format).pipe(dest); + } } - logInterceptor.pipe(squeeze).pipe(format).pipe(dest); - return logInterceptor; } + +export function onFinished(error) { + if (error) { + if (error.code === 'ENOSPC') { + // eslint-disable-next-line no-console + console.error('Error in logging pipeline:', error.stack); + } else { + throw error; + } + } +} diff --git a/src/legacy/server/logging/log_reporter.test.js b/src/legacy/server/logging/log_reporter.test.js new file mode 100644 index 000000000000..babe5b7e6858 --- /dev/null +++ b/src/legacy/server/logging/log_reporter.test.js @@ -0,0 +1,148 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +import os from 'os'; +import path from 'path'; +import fs from 'fs'; +import stripAnsi from 'strip-ansi'; +import { getLoggerStream, onFinished } from './log_reporter'; + +const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms)); + +describe('getLoggerStream', () => { + it('should log to stdout when the json config is set to false', async () => { + const lines = []; + const origWrite = process.stdout.write; + process.stdout.write = (buffer) => { + lines.push(stripAnsi(buffer.toString()).trim()); + return true; + }; + + const loggerStream = getLoggerStream({ + config: { + json: false, + dest: 'stdout', + filter: {}, + }, + events: { log: '*' }, + }); + + loggerStream.end({ event: 'log', tags: ['foo'], data: 'test data' }); + + await sleep(500); + + process.stdout.write = origWrite; + expect(lines.length).toBe(1); + expect(lines[0]).toMatch(/^log \[[^\]]*\] \[foo\] test data$/); + }); + + it('should log to stdout when the json config is set to true', async () => { + const lines = []; + const origWrite = process.stdout.write; + process.stdout.write = (buffer) => { + lines.push(JSON.parse(buffer.toString().trim())); + return true; + }; + + const loggerStream = getLoggerStream({ + config: { + json: true, + dest: 'stdout', + filter: {}, + }, + events: { log: '*' }, + }); + + loggerStream.end({ event: 'log', tags: ['foo'], data: 'test data' }); + + await sleep(500); + + process.stdout.write = origWrite; + expect(lines.length).toBe(1); + expect(lines[0]).toMatchObject({ + type: 'log', + tags: ['foo'], + message: 'test data', + }); + }); + + it('should log to custom file when the json config is set to false', async () => { + const dir = os.tmpdir(); + const logfile = `dest-${Date.now()}.log`; + const dest = path.join(dir, logfile); + + const loggerStream = getLoggerStream({ + config: { + json: false, + dest, + filter: {}, + }, + events: { log: '*' }, + }); + + loggerStream.end({ event: 'log', tags: ['foo'], data: 'test data' }); + + await sleep(500); + + const lines = stripAnsi(fs.readFileSync(dest, { encoding: 'utf8' })) + .trim() + .split(os.EOL); + expect(lines.length).toBe(1); + expect(lines[0]).toMatch(/^log \[[^\]]*\] \[foo\] test data$/); + }); + + it('should log to custom file when the json config is set to true and ignoreEnospcError', async () => { + const dir = os.tmpdir(); + const logfile = `dest-${Date.now()}.log`; + const dest = path.join(dir, logfile); + + const loggerStream = getLoggerStream({ + config: { + json: true, + dest, + ignoreEnospcError: true, + filter: {}, + }, + events: { log: '*' }, + }); + + loggerStream.end({ event: 'log', tags: ['foo'], data: 'test data' }); + + await sleep(500); + + const lines = fs + .readFileSync(dest, { encoding: 'utf8' }) + .trim() + .split(os.EOL) + .map((data) => JSON.parse(data)); + expect(lines.length).toBe(1); + expect(lines[0]).toMatchObject({ + type: 'log', + tags: ['foo'], + message: 'test data', + }); + }); + + it('should handle ENOSPC error when disk full', () => { + const error = { code: 'ENOSPC', stack: 'Error stack trace' }; + const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation(); + + expect(() => { + onFinished(error); + }).not.toThrow(); + + expect(consoleErrorSpy).toHaveBeenCalledWith('Error in logging pipeline:', 'Error stack trace'); + + consoleErrorSpy.mockRestore(); + }); + + it('should throw error for non-ENOSPC error', () => { + const error = { message: 'non-ENOSPC error', code: 'OTHER', stack: 'Error stack trace' }; + + expect(() => { + onFinished(error); + }).toThrowError('non-ENOSPC error'); + }); +});