From 9f0c4f84f98e55c4f8d7784b1e61f977c140c02f Mon Sep 17 00:00:00 2001 From: Matt Spurlin Date: Fri, 29 Dec 2023 15:47:08 -0500 Subject: [PATCH 1/2] Update Blob Storage Forwarder to use HTTP and run tests in CI --- .github/workflows/azure_tests.yml | 18 + azure/blobs_logs_monitoring/index.js | 700 ++++++++++++++---- azure/package.json | 5 +- ...st.js => activity_logs_monitoring.test.js} | 71 +- azure/test/blobs_logs_monitoring.test.js | 617 +++++++++++++++ 5 files changed, 1239 insertions(+), 172 deletions(-) create mode 100644 .github/workflows/azure_tests.yml rename azure/test/{client.test.js => activity_logs_monitoring.test.js} (91%) create mode 100644 azure/test/blobs_logs_monitoring.test.js diff --git a/.github/workflows/azure_tests.yml b/.github/workflows/azure_tests.yml new file mode 100644 index 000000000..bf18704ce --- /dev/null +++ b/.github/workflows/azure_tests.yml @@ -0,0 +1,18 @@ +name: Azure unit tests + +on: [pull_request] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Use Node.js + uses: actions/setup-node@v3 + with: + node-version: '18.x' + - run: npm install + working-directory: azure + - run: npm run test + working-directory: azure + \ No newline at end of file diff --git a/azure/blobs_logs_monitoring/index.js b/azure/blobs_logs_monitoring/index.js index f24ff0137..40500e0cd 100644 --- a/azure/blobs_logs_monitoring/index.js +++ b/azure/blobs_logs_monitoring/index.js @@ -2,191 +2,613 @@ // under the Apache License Version 2.0. // This product includes software developed at Datadog (https://www.datadoghq.com/). // Copyright 2021 Datadog, Inc. +var https = require('https'); -var tls = require('tls'); - -const VERSION = '0.2.0'; +const VERSION = '1.0.0'; const STRING = 'string'; // example: 'some message' const STRING_ARRAY = 'string-array'; // example: ['one message', 'two message', ...] const JSON_OBJECT = 'json-object'; // example: {"key": "value"} -const JSON_RECORDS = 'json-records'; // example: [{"records": [{}, {}, ...]}, {"records": [{}, {}, ...]}, ...] -const JSON_ARRAY = 'json-array'; // example: [{"key": "value"}, {"key": "value"}, ...] +const JSON_ARRAY = 'json-array'; // example: [{"key": "value"}, {"key": "value"}, ...] or [{"records": [{}, {}, ...]}, {"records": [{}, {}, ...]}, ...] +const BUFFER_ARRAY = 'buffer-array'; // example: [, ] +const JSON_STRING = 'json-string'; // example: '{"key": "value"}' +const JSON_STRING_ARRAY = 'json-string-array'; // example: ['{"records": [{}, {}]}'] or ['{"key": "value"}'] const INVALID = 'invalid'; +const JSON_TYPE = 'json'; +const STRING_TYPE = 'string'; const DD_API_KEY = process.env.DD_API_KEY || ''; const DD_SITE = process.env.DD_SITE || 'datadoghq.com'; -const DD_URL = process.env.DD_URL || 'functions-intake.logs.' + DD_SITE; -const DD_PORT = process.env.DD_PORT || DD_SITE === 'datadoghq.eu' ? 443 : 10516; +const DD_HTTP_URL = process.env.DD_URL || 'http-intake.logs.' + DD_SITE; +const DD_HTTP_PORT = process.env.DD_PORT || 443; const DD_TAGS = process.env.DD_TAGS || ''; // Replace '' by your comma-separated list of tags const DD_SERVICE = process.env.DD_SERVICE || 'azure'; const DD_SOURCE = process.env.DD_SOURCE || 'azure'; const DD_SOURCE_CATEGORY = process.env.DD_SOURCE_CATEGORY || 'azure'; -module.exports = function(context, blobContent) { - if (!DD_API_KEY || DD_API_KEY === '') { - context.log.error( - 'You must configure your API key before starting this function (see ## Parameters section)' - ); - return; +/* +To scrub PII from your logs, uncomment the applicable configs below. If you'd like to scrub more than just +emails and IP addresses, add your own config to this map in the format +NAME: {pattern: , replacement: } +*/ +const SCRUBBER_RULE_CONFIGS = { + // REDACT_IP: { + // pattern: /[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}/, + // replacement: 'xxx.xxx.xxx.xxx' + // }, + // REDACT_EMAIL: { + // pattern: /[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+/, + // replacement: 'xxxxx@xxxxx.com' + // } +}; + +/* +To split array-type fields in your logs into individual logs, you can add sections to the map below. An example of +a potential use case with azure.datafactory is there to show the format: +{ + source_type: + paths: [list of [list of fields in the log payload to iterate through to find the one to split]], + keep_original_log: bool, if you'd like to preserve the original log in addition to the split ones or not, + preserve_fields: bool, whether or not to keep the original log fields in the new split logs +} +You can also set the DD_LOG_SPLITTING_CONFIG env var with a JSON string in this format. +*/ +const DD_LOG_SPLITTING_CONFIG = { + // 'azure.datafactory': { + // paths: [['properties', 'Output', 'value']], + // keep_original_log: true, + // preserve_fields: true + // } +}; + +function getLogSplittingConfig() { + try { + return JSON.parse(process.env.DD_LOG_SPLITTING_CONFIG); + } catch { + return DD_LOG_SPLITTING_CONFIG; } +} + +class ScrubberRule { + constructor(name, pattern, replacement) { + this.name = name; + this.replacement = replacement; + this.regexp = RegExp(pattern, 'g'); + } +} + - var socket = getSocket(context); - var sender = tagger => record => { - record = tagger(record, context); - if (!send(socket, record)) { - // Retry once - socket = getSocket(context); - send(socket, record); +class Batcher { + constructor(context, maxItemSizeBytes, maxBatchSizeBytes, maxItemsCount) { + this.maxItemSizeBytes = maxItemSizeBytes; + this.maxBatchSizeBytes = maxBatchSizeBytes; + this.maxItemsCount = maxItemsCount; + } + + batch(items) { + var batches = []; + var batch = []; + var sizeBytes = 0; + var sizeCount = 0; + for (var i = 0; i < items.length; i++) { + var item = items[i]; + var itemSizeBytes = this.getSizeInBytes(item); + if ( + sizeCount > 0 && + (sizeCount >= this.maxItemsCount || + sizeBytes + itemSizeBytes > this.maxBatchSizeBytes) + ) { + batches.push(batch); + batch = []; + sizeBytes = 0; + sizeCount = 0; + } + // all items exceeding maxItemSizeBytes are dropped here + if (itemSizeBytes <= this.maxItemSizeBytes) { + batch.push(item); + sizeBytes += itemSizeBytes; + sizeCount += 1; + } } - }; - var logs; - if (typeof blobContent === 'string') { - logs = blobContent.trim().split('\n'); - } else if (Buffer.isBuffer(blobContent)) { - logs = blobContent - .toString('utf8') - .trim() - .split('\n'); - } else { - logs = JSON.stringify(blobContent) - .trim() - .split('\n'); + if (sizeCount > 0) { + batches.push(batch); + } + return batches; } - logs.forEach(log => { - handleLogs(sender, log, context); - }); + getSizeInBytes(string) { + if (typeof string !== 'string') { + string = JSON.stringify(string); + } + return Buffer.byteLength(string, 'utf8'); + } +} - socket.end(); - context.done(); -}; +class HTTPClient { + constructor(context) { + this.context = context; + this.httpOptions = { + hostname: DD_HTTP_URL, + port: DD_HTTP_PORT, + path: '/v1/input', + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'DD-API-KEY': DD_API_KEY + } + }; + this.scrubber = new Scrubber(this.context, SCRUBBER_RULE_CONFIGS); + this.batcher = new Batcher( + this.context, + 256 * 1000, + 4 * 1000 * 1000, + 400 + ); + } -function getSocket(context) { - var socket = tls.connect({ port: DD_PORT, host: DD_URL }); - socket.on('error', err => { - context.log.error(err.toString()); - socket.end(); - }); + async sendAll(records) { + var batches = this.batcher.batch(records); + var promises = []; + for (var i = 0; i < batches.length; i++) { + promises.push(this.sendWithRetry(batches[i])); + } + return await Promise.all( + promises.map(p => p.catch(e => context.log.error(e))) + ); + } - return socket; -} + sendWithRetry(record) { + return new Promise((resolve, reject) => { + return this.send(record) + .then(res => { + resolve(true); + }) + .catch(err => { + this.send(record) + .then(res => { + resolve(true); + }) + .catch(err => { + reject( + `unable to send request after 2 tries, err: ${err}` + ); + }); + }); + }); + } -function send(socket, record) { - return socket.write(DD_API_KEY + ' ' + JSON.stringify(record) + '\n'); + send(record) { + return new Promise((resolve, reject) => { + const req = https + .request(this.httpOptions, resp => { + if (resp.statusCode < 200 || resp.statusCode > 299) { + reject(`invalid status code ${resp.statusCode}`); + } else { + resolve(true); + } + }) + .on('error', error => { + reject(error); + }); + req.write(this.scrubber.scrub(JSON.stringify(record))); + req.end(); + }); + } } -function handleLogs(sender, logs, context) { - var logsType = getLogFormat(logs); - switch (logsType) { - case STRING: +class Scrubber { + constructor(context, configs) { + var rules = []; + for (const [name, settings] of Object.entries(configs)) { try { - logs = JSON.parse(logs); - if (logs.records !== undefined) { - logs.records.forEach(sender(addTagsToJsonLog)); - } else { - sender(addTagsToJsonLog)(logs); - } + rules.push( + new ScrubberRule( + name, + settings['pattern'], + settings['replacement'] + ) + ); } catch { - sender(addTagsToStringLog)(logs); + context.log.error( + `Regexp for rule ${name} pattern ${settings['pattern']} is malformed, skipping. Please update the pattern for this rule to be applied.` + ); } - break; - case JSON_OBJECT: - sender(addTagsToJsonLog)(logs); - break; - case STRING_ARRAY: - logs.forEach(message => { + } + this.rules = rules; + } + + scrub(record) { + if (!this.rules) { + return record; + } + this.rules.forEach(rule => { + record = record.replace(rule.regexp, rule.replacement); + }); + return record; + } +} + +class BlobStorageLogHandler { + constructor(context) { + this.context = context; + this.logSplittingConfig = getLogSplittingConfig(); + this.records = []; + } + + findSplitRecords(record, fields) { + var tempRecord = record; + for (const fieldName in fields) { + // loop through the fields to find the one we want to split + if ( + tempRecord[fieldName] === undefined || + tempRecord[fieldName] === null + ) { + // if the field is null or undefined, return + return null; + } else { + // there is some value for the field try { - message = JSON.parse(message); - if (message.records !== undefined) { - message.records.forEach(sender(addTagsToJsonLog)); - } else { - sender(addTagsToJsonLog)(message); - } + // if for some reason we can't index into it, return null + tempRecord = tempRecord[fieldName]; } catch { - sender(addTagsToStringLog)(message); + return null; } - }); - break; - case JSON_RECORDS: - logs.forEach(message => { - message.records.forEach(sender(addTagsToJsonLog)); - }); - break; - case JSON_ARRAY: - logs.forEach(sender(addTagsToJsonLog)); - break; - case INVALID: - context.log.error('Log format is invalid: ', logs); - break; - default: - context.log.error('Log format is invalid: ', logs); - break; + } + } + return tempRecord; } -} -function getLogFormat(logs) { - if (typeof logs === 'string') { - return STRING; + formatLog(messageType, record) { + if (messageType == JSON_TYPE) { + var originalRecord = this.addTagsToJsonLog(record); + var source = originalRecord['ddsource']; + var config = this.logSplittingConfig[source]; + if (config !== undefined) { + var splitFieldFound = false; + + for (var i = 0; i < config.paths.length; i++) { + var fields = config.paths[i]; + var recordsToSplit = this.findSplitRecords(record, fields); + if ( + recordsToSplit === null || + !(recordsToSplit instanceof Array) + ) { + // if we were unable find the field or if the field isn't an array, skip it + continue; + } + splitFieldFound = true; + + for (var j = 0; j < recordsToSplit.length; j++) { + var splitRecord = recordsToSplit[j]; + if (typeof splitRecord === 'string') { + try { + splitRecord = JSON.parse(splitRecord); + } catch (err) {} + } + var formattedSplitRecord = {}; + var temp = formattedSplitRecord; + // re-create the same nested attributes with only the split log + for (var k = 0; k < fields.length; k++) { + if (k === fields.length - 1) { + // if it is the last field, add the split record + temp[fields[k]] = splitRecord; + } else { + temp[fields[k]] = {}; + temp = temp[fields[k]]; + } + } + formattedSplitRecord = { + parsed_arrays: formattedSplitRecord + }; + + if (config.preserve_fields) { + var newRecord = { ...originalRecord }; + } else { + var newRecord = { + ddsource: source, + ddsourcecategory: + originalRecord['ddsourcecategory'], + service: originalRecord['service'], + ddtags: originalRecord['ddtags'] + }; + if (originalRecord['time'] !== undefined) { + newRecord['time'] = originalRecord['time']; + } + } + Object.assign(newRecord, formattedSplitRecord); + this.records.push(newRecord); + } + } + if (config.keep_original_log || splitFieldFound !== true) { + // keep the original log if it is set in the config + // if it is false in the config, we should still write the log when we don't split + this.records.push(originalRecord); + } + } else { + this.records.push(originalRecord); + } + } else { + record = this.addTagsToStringLog(record); + this.records.push(record); + } } - if (!Array.isArray(logs) && typeof logs === 'object' && logs !== null) { - return JSON_OBJECT; + + handleLogs(logs) { + var logsType = this.getLogFormat(logs); + switch (logsType) { + case STRING: + this.formatLog(STRING_TYPE, logs); + break; + case JSON_STRING: + logs = JSON.parse(logs); + this.formatLog(JSON_TYPE, logs); + break; + case JSON_OBJECT: + this.formatLog(JSON_TYPE, logs); + break; + case STRING_ARRAY: + logs.forEach(log => this.formatLog(STRING_TYPE, log)); + break; + case JSON_ARRAY: + this.handleJSONArrayLogs(logs, JSON_ARRAY); + break; + case BUFFER_ARRAY: + this.handleJSONArrayLogs(logs, BUFFER_ARRAY); + break; + case JSON_STRING_ARRAY: + this.handleJSONArrayLogs(logs, JSON_STRING_ARRAY); + break; + case INVALID: + this.context.log.error('Log format is invalid: ', logs); + break; + default: + this.context.log.error('Log format is invalid: ', logs); + break; + } + return this.records; } - if (!Array.isArray(logs)) { - return INVALID; + + handleJSONArrayLogs(logs, logsType) { + for (var i = 0; i < logs.length; i++) { + var message = logs[i]; + if (logsType == JSON_STRING_ARRAY) { + try { + message = JSON.parse(message); + } catch (err) { + this.context.log.warn( + 'log is malformed json, sending as string' + ); + this.formatLog(STRING_TYPE, message); + continue; + } + } + // If the message is a buffer object, the data type has been set to binary. + if (logsType == BUFFER_ARRAY) { + try { + message = JSON.parse(message.toString()); + } catch (err) { + this.context.log.warn( + 'log is malformed json, sending as string' + ); + this.formatLog(STRING_TYPE, message.toString()); + continue; + } + } + if (message.records != undefined) { + message.records.forEach(message => + this.formatLog(JSON_TYPE, message) + ); + } else { + this.formatLog(JSON_TYPE, message); + } + } } - if (typeof logs[0] === 'object') { - if (logs[0].records !== undefined) { - return JSON_RECORDS; - } else { + + getLogFormat(logs) { + if (typeof logs === 'string') { + if (this.isJsonString(logs)) { + return JSON_STRING; + } + return STRING; + } + if (!Array.isArray(logs) && typeof logs === 'object' && logs !== null) { + return JSON_OBJECT; + } + if (!Array.isArray(logs)) { + return INVALID; + } + if (Buffer.isBuffer(logs[0])) { + return BUFFER_ARRAY; + } + if (typeof logs[0] === 'object') { return JSON_ARRAY; } + if (typeof logs[0] === 'string') { + if (this.isJsonString(logs[0])) { + return JSON_STRING_ARRAY; + } else { + return STRING_ARRAY; + } + } + return INVALID; } - if (typeof logs[0] === 'string') { - return STRING_ARRAY; + + isJsonString(record) { + try { + JSON.parse(record); + return true; + } catch (err) { + return false; + } } - return INVALID; -} -function addTagsToJsonLog(record, context) { - metadata = extractResourceId(record); - record['ddsource'] = metadata.source || DD_SOURCE; - record['ddsourcecategory'] = DD_SOURCE_CATEGORY; - record['service'] = DD_SERVICE; - record['ddtags'] = metadata.tags - .concat([ - DD_TAGS, - 'forwardername:' + context.executionContext.functionName - ]) - .filter(Boolean) - .join(','); - return record; -} + createDDTags(tags) { + const forwarderNameTag = 'forwardername:' + this.context.executionContext.functionName; + const fowarderVersionTag = 'forwarderversion:' + VERSION; + var ddTags = tags.concat([DD_TAGS, forwarderNameTag, fowarderVersionTag]); + return ddTags.filter(Boolean).join(','); + } -function addTagsToStringLog(stringLog, context) { - jsonLog = { message: stringLog }; - return addTagsToJsonLog(jsonLog, context); -} + addTagsToJsonLog(record) { + var [metadata, record] = this.extractMetadataFromLog(record); + record['ddsource'] = metadata.source || DD_SOURCE; + record['ddsourcecategory'] = DD_SOURCE_CATEGORY; + record['service'] = metadata.service || DD_SERVICE; + record['ddtags'] = this.createDDTags(metadata.tags); + return record; + } -function extractResourceId(record) { - metadata = { tags: [], source: '' }; - if ( - record.resourceId === undefined || - typeof record.resourceId !== 'string' || - !record.resourceId.toLowerCase().startsWith('/subscriptions/') - ) { - return metadata; + addTagsToStringLog(stringLog) { + var jsonLog = { message: stringLog }; + return this.addTagsToJsonLog(jsonLog); } - var resourceId = record.resourceId.toLowerCase().split('/'); - if (resourceId.length > 2) { - metadata.tags.push('subscription_id:' + resourceId[2]); + + createResourceIdArray(resourceId) { + // Convert a valid resource ID to an array, handling beginning/ending slashes + var resourceIdArray = resourceId.toLowerCase().split('/'); + if (resourceIdArray[0] === '') { + resourceIdArray = resourceIdArray.slice(1); + } + if (resourceIdArray[resourceIdArray.length - 1] === '') { + resourceIdArray.pop(); + } + return resourceIdArray; + } + + isSource(resourceIdPart) { + // Determine if a section of a resource ID counts as a "source," in our case it means it starts with 'microsoft.' + return resourceIdPart.startsWith('microsoft.'); + } + + formatSourceType(sourceType) { + return sourceType.replace('microsoft.', 'azure.'); } - if (resourceId.length > 4) { - metadata.tags.push('resource_group:' + resourceId[4]); + + getResourceId(record) { + // Most logs have resourceId, but some logs have ResourceId instead + var id = record.resourceId || record.ResourceId; + if (typeof id !== 'string') { + return null; + } + return id; + } + + extractMetadataFromLog(record) { + return [this.extractMetadataFromStandardLog(record), record]; + } + + extractMetadataFromStandardLog(record) { + var metadata = { tags: [], source: '', service: '' }; + var resourceId = this.getResourceId(record); + if (resourceId === null || resourceId === '') { + return metadata; + } + resourceId = this.createResourceIdArray(resourceId); + + if (resourceId[0] === 'subscriptions') { + if (resourceId.length > 1) { + metadata.tags.push('subscription_id:' + resourceId[1]); + if (resourceId.length == 2) { + metadata.source = 'azure.subscription'; + return metadata; + } + } + if (resourceId.length > 3) { + if ( + resourceId[2] === 'providers' && + this.isSource(resourceId[3]) + ) { + // handle provider-only resource IDs + metadata.source = this.formatSourceType(resourceId[3]); + } else { + metadata.tags.push('resource_group:' + resourceId[3]); + if (resourceId.length == 4) { + metadata.source = 'azure.resourcegroup'; + return metadata; + } + } + } + if (resourceId.length > 5 && this.isSource(resourceId[5])) { + metadata.source = this.formatSourceType(resourceId[5]); + } + } else if (resourceId[0] === 'tenants') { + if (resourceId.length > 3 && resourceId[3]) { + metadata.tags.push('tenant:' + resourceId[1]); + metadata.source = this.formatSourceType(resourceId[3]).replace( + 'aadiam', + 'activedirectory' + ); + } + } + return metadata; } - if (resourceId.length > 6 && resourceId[6]) { - metadata.source = resourceId[6].replace('microsoft.', 'azure.'); + + removeWhitespaceFromKeys(obj) { + // remove whitespace from the keys of an object and capitalizes the letter that follows + var newObj = {}; + for (const [key, value] of Object.entries(obj)) { + // regex looks for word boundaries and captures the alpha character that follows + const new_key = key.replace(/\b\w/g, c=> c.toUpperCase()).replaceAll(' ', ''); + newObj[new_key] = value; + } + return newObj; } - return metadata; } + +module.exports = async function(context, blobContent) { + if (!DD_API_KEY || DD_API_KEY === '') { + context.log.error( + 'You must configure your API key before starting this function (see ## Parameters section)' + ); + return; + } + + var logs; + if (typeof blobContent === 'string') { + logs = blobContent.trim().split('\n'); + } else if (Buffer.isBuffer(blobContent)) { + logs = blobContent + .toString('utf8') + .trim() + .split('\n'); + } else { + logs = JSON.stringify(blobContent) + .trim() + .split('\n'); + } + + try { + var handler = new BlobStorageLogHandler(context); + var parsedLogs = handler.handleLogs(logs); + } catch (err) { + context.log.error('Error raised when parsing logs: ', err); + throw err; + } + var results = await new HTTPClient(context).sendAll(parsedLogs); + + if (results.every(v => v === true) !== true) { + context.log.error( + 'Some messages were unable to be sent. See other logs for details.' + ); + } + + context.done(); +}; + +module.exports.forTests = { + BlobStorageLogHandler, + Scrubber, + ScrubberRule, + Batcher, + constants: { + STRING, + STRING_ARRAY, + JSON_OBJECT, + JSON_ARRAY, + BUFFER_ARRAY, + JSON_STRING, + JSON_STRING_ARRAY, + INVALID + } +}; \ No newline at end of file diff --git a/azure/package.json b/azure/package.json index ed774670e..1554d040a 100644 --- a/azure/package.json +++ b/azure/package.json @@ -20,11 +20,10 @@ "url": "https://github.com/DataDog/datadog-serverless-functions/issues" }, "homepage": "https://github.com/DataDog/datadog-serverless-functions#readme", - "dependencies": {}, "devDependencies": { + "lodash": ">=4.17.21", "mocha": "^7.2.0", "prettier": "^1.19.1", - "sinon": "^9.2.4", - "lodash": ">=4.17.21" + "sinon": "^9.2.4" } } diff --git a/azure/test/client.test.js b/azure/test/activity_logs_monitoring.test.js similarity index 91% rename from azure/test/client.test.js rename to azure/test/activity_logs_monitoring.test.js index 80af2ce11..b34efe593 100644 --- a/azure/test/client.test.js +++ b/azure/test/activity_logs_monitoring.test.js @@ -37,7 +37,7 @@ const DEFAULT_TEST_SCRUBBER_RULES = { } }; -describe('Azure Log Monitoring', function() { +describe('Azure Activity Log Monitoring', function() { describe('#getLogFormat', function() { beforeEach(function() { this.forwarder = setUp(); @@ -107,7 +107,7 @@ describe('Azure Log Monitoring', function() { }); }); - describe('#extractMetadataFromResource', function() { + describe('#extractMetadataFromLog', function() { beforeEach(function() { this.forwarder = setUp(); }); @@ -121,11 +121,12 @@ describe('Azure Log Monitoring', function() { 'subscription_id:12345678-1234-abcd-1234-1234567890ab', 'resource_group:some-resource-group' ], - source: 'azure.compute' + source: 'azure.compute', + service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should parse a valid resource group resource', function() { @@ -138,11 +139,12 @@ describe('Azure Log Monitoring', function() { 'subscription_id:12345678-1234-abcd-1234-1234567890ab', 'resource_group:some-resource-group' ], - source: 'azure.resourcegroup' + source: 'azure.resourcegroup', + service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should parse a valid resource group resource ending slash', function() { @@ -155,11 +157,12 @@ describe('Azure Log Monitoring', function() { 'subscription_id:12345678-1234-abcd-1234-1234567890ab', 'resource_group:some-resource-group' ], - source: 'azure.resourcegroup' + source: 'azure.resourcegroup', + service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should parse a valid record without provider length 5', function() { @@ -172,11 +175,12 @@ describe('Azure Log Monitoring', function() { 'subscription_id:12345678-1234-abcd-1234-1234567890ab', 'resource_group:some-resource-group' ], - source: '' + source: '', + service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should parse a valid subscription type resource', function() { @@ -186,11 +190,12 @@ describe('Azure Log Monitoring', function() { }; expectedMetadata = { tags: ['subscription_id:12345678-1234-abcd-1234-1234567890ab'], - source: 'azure.subscription' + source: 'azure.subscription', + service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should parse a valid subscription type resource ending slash', function() { @@ -200,11 +205,12 @@ describe('Azure Log Monitoring', function() { }; expectedMetadata = { tags: ['subscription_id:12345678-1234-abcd-1234-1234567890ab'], - source: 'azure.subscription' + source: 'azure.subscription', + service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should parse a valid record without provider and resource group length 3', function() { @@ -214,35 +220,36 @@ describe('Azure Log Monitoring', function() { }; expectedMetadata = { tags: ['subscription_id:12345678-1234-abcd-1234-1234567890ab'], - source: '' + source: '', + service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should not fail on record without resourceId', function() { record = { key: 'value' }; - expectedMetadata = { tags: [], source: '' }; + expectedMetadata = { tags: [], source: '', service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should not fail on string record', function() { record = { key: 'value' }; - expectedMetadata = { tags: [], source: '' }; + expectedMetadata = { tags: [], source: '', service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should not fail on improper resourceId', function() { record = { resourceId: 'foo/bar' }; - expectedMetadata = { tags: [], source: '' }; + expectedMetadata = { tags: [], source: '', service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should not fail with an invalid source', function() { @@ -255,11 +262,12 @@ describe('Azure Log Monitoring', function() { 'subscription_id:12345678-1234-abcd-1234-1234567890ab', 'resource_group:some-resource-group' ], - source: '' + source: '', + service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should return empty source when not correct source format', function() { @@ -272,11 +280,12 @@ describe('Azure Log Monitoring', function() { 'subscription_id:12345678-1234-abcd-1234-1234567890ab', 'resource_group:some-resource-group' ], - source: '' + source: '', + service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should handle when first element of resource id list is not empty', function() { @@ -289,11 +298,12 @@ describe('Azure Log Monitoring', function() { 'subscription_id:12345678-1234-abcd-1234-1234567890ab', 'resource_group:some-resource-group' ], - source: '' + source: '', + service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); it('should correctly parse provider-only resource ids', function() { @@ -303,11 +313,12 @@ describe('Azure Log Monitoring', function() { }; expectedMetadata = { tags: ['subscription_id:12345678-1234-abcd-1234-1234567890ab'], - source: 'azure.recoveryservices' + source: 'azure.recoveryservices', + service: '' }; assert.deepEqual( expectedMetadata, - this.forwarder.extractMetadataFromResource(record) + this.forwarder.extractMetadataFromLog(record)[0] ); }); }); diff --git a/azure/test/blobs_logs_monitoring.test.js b/azure/test/blobs_logs_monitoring.test.js new file mode 100644 index 000000000..e561ea6c3 --- /dev/null +++ b/azure/test/blobs_logs_monitoring.test.js @@ -0,0 +1,617 @@ +var assert = require('assert'); +var client = require('../blobs_logs_monitoring').forTests; +var constants = client.constants; +var sinon = require('sinon'); + +function fakeContext() { + // create a fake context object to pass into handleLogs + contextSpy = sinon.spy(); + contextSpy.log = sinon.spy(); + contextSpy.log.error = function(x) {}; // do nothing + contextSpy.log.warn = function(x) {}; // do nothing + + return contextSpy; +} + +function setUp() { + var forwarder = new client.BlobStorageLogHandler(fakeContext()); + + forwarder.addTagsToJsonLog = x => { + return Object.assign({ ddsource: 'none' }, x); + }; + forwarder.addTagsToStringLog = x => { + return { ddsource: 'none', message: x }; + }; + + return forwarder; +} + +const DEFAULT_TEST_SCRUBBER_RULES = { + REDACT_IP: { + pattern: /[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}/, + replacement: 'xxx.xxx.xxx.xxx' + }, + REDACT_EMAIL: { + pattern: /[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+/, + replacement: 'xxxxx@xxxxx.com' + } +}; + +describe('Azure Activity Log Monitoring', function() { + describe('#getLogFormat', function() { + beforeEach(function() { + this.forwarder = setUp(); + }); + it('should return string', function() { + eventHubMessages = ''; + assert.equal( + constants.STRING, + this.forwarder.getLogFormat(eventHubMessages) + ); + eventHubMessages = 'foobar'; + assert.equal( + constants.STRING, + this.forwarder.getLogFormat(eventHubMessages) + ); + }); + it('should return string array', function() { + eventHubMessages = ['', 'foobar']; + assert.equal( + constants.STRING_ARRAY, + this.forwarder.getLogFormat(eventHubMessages) + ); + }); + it('should return json object', function() { + eventHubMessages = { key: 'value', otherkey: 'othervalue' }; + assert.equal( + constants.JSON_OBJECT, + this.forwarder.getLogFormat(eventHubMessages) + ); + }); + it('should return json array when there are no records', function() { + eventHubMessages = [ + { key: 'value', otherkey: 'othervalue' }, + { key: 'value', otherkey: 'othervalue' } + ]; + assert.equal( + constants.JSON_ARRAY, + this.forwarder.getLogFormat(eventHubMessages) + ); + }); + it('should return invalid', function() { + eventHubMessages = 1; + assert.equal( + constants.INVALID, + this.forwarder.getLogFormat(eventHubMessages) + ); + eventHubMessages = () => {}; + assert.equal( + constants.INVALID, + this.forwarder.getLogFormat(eventHubMessages) + ); + eventHubMessages = true; + assert.equal( + constants.INVALID, + this.forwarder.getLogFormat(eventHubMessages) + ); + eventHubMessages = null; + assert.equal( + constants.INVALID, + this.forwarder.getLogFormat(eventHubMessages) + ); + eventHubMessages = undefined; + assert.equal( + constants.INVALID, + this.forwarder.getLogFormat(eventHubMessages) + ); + }); + }); + + describe('#extractMetadataFromLog', function() { + beforeEach(function() { + this.forwarder = setUp(); + }); + it('should parse a valid record', function() { + record = { + resourceId: + '/SUBSCRIPTIONS/12345678-1234-ABCD-1234-1234567890AB/RESOURCEGROUPS/SOME-RESOURCE-GROUP/PROVIDERS/MICROSOFT.COMPUTE/VIRTUALMACHINES/SOME-VM' + }; + expectedMetadata = { + tags: [ + 'subscription_id:12345678-1234-abcd-1234-1234567890ab', + 'resource_group:some-resource-group' + ], + source: 'azure.compute', + service: '' + }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should parse a valid resource group resource', function() { + record = { + resourceId: + '/SUBSCRIPTIONS/12345678-1234-ABCD-1234-1234567890AB/RESOURCEGROUPS/SOME-RESOURCE-GROUP' + }; + expectedMetadata = { + tags: [ + 'subscription_id:12345678-1234-abcd-1234-1234567890ab', + 'resource_group:some-resource-group' + ], + source: 'azure.resourcegroup', + service: '' + }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should parse a valid resource group resource ending slash', function() { + record = { + resourceId: + '/SUBSCRIPTIONS/12345678-1234-ABCD-1234-1234567890AB/RESOURCEGROUPS/SOME-RESOURCE-GROUP/' + }; + expectedMetadata = { + tags: [ + 'subscription_id:12345678-1234-abcd-1234-1234567890ab', + 'resource_group:some-resource-group' + ], + source: 'azure.resourcegroup', + service: '' + }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should parse a valid record without provider length 5', function() { + record = { + resourceId: + '/SUBSCRIPTIONS/12345678-1234-ABCD-1234-1234567890AB/RESOURCEGROUPS/SOME-RESOURCE-GROUP/ffffff' + }; + expectedMetadata = { + tags: [ + 'subscription_id:12345678-1234-abcd-1234-1234567890ab', + 'resource_group:some-resource-group' + ], + source: '', + service: '' + }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should parse a valid subscription type resource', function() { + record = { + resourceId: + '/SUBSCRIPTIONS/12345678-1234-ABCD-1234-1234567890AB' + }; + expectedMetadata = { + tags: ['subscription_id:12345678-1234-abcd-1234-1234567890ab'], + source: 'azure.subscription', + service: '' + }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should parse a valid subscription type resource ending slash', function() { + record = { + resourceId: + '/SUBSCRIPTIONS/12345678-1234-ABCD-1234-1234567890AB/' + }; + expectedMetadata = { + tags: ['subscription_id:12345678-1234-abcd-1234-1234567890ab'], + source: 'azure.subscription', + service: '' + }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should parse a valid record without provider and resource group length 3', function() { + record = { + resourceId: + '/SUBSCRIPTIONS/12345678-1234-ABCD-1234-1234567890AB/ffffff' + }; + expectedMetadata = { + tags: ['subscription_id:12345678-1234-abcd-1234-1234567890ab'], + source: '', + service: '' + }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should not fail on record without resourceId', function() { + record = { key: 'value' }; + expectedMetadata = { tags: [], source: '', service: '' }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should not fail on string record', function() { + record = { key: 'value' }; + expectedMetadata = { tags: [], source: '', service: '' }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should not fail on improper resourceId', function() { + record = { resourceId: 'foo/bar' }; + expectedMetadata = { tags: [], source: '', service: '' }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should not fail with an invalid source', function() { + record = { + resourceId: + '/SUBSCRIPTIONS/12345678-1234-ABCD-1234-1234567890AB/RESOURCEGROUPS/SOME-RESOURCE-GROUP/PROVIDERS/////' + }; + expectedMetadata = { + tags: [ + 'subscription_id:12345678-1234-abcd-1234-1234567890ab', + 'resource_group:some-resource-group' + ], + source: '', + service: '' + }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should return empty source when not correct source format', function() { + record = { + resourceId: + '/SUBSCRIPTIONS/12345678-1234-ABCD-1234-1234567890AB/RESOURCEGROUPS/SOME-RESOURCE-GROUP/PROVIDERS/NOTTHESAMEFORMAT/VIRTUALMACHINES/SOME-VM' + }; + expectedMetadata = { + tags: [ + 'subscription_id:12345678-1234-abcd-1234-1234567890ab', + 'resource_group:some-resource-group' + ], + source: '', + service: '' + }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should handle when first element of resource id list is not empty', function() { + record = { + resourceId: + 'SUBSCRIPTIONS/12345678-1234-ABCD-1234-1234567890AB/RESOURCEGROUPS/SOME-RESOURCE-GROUP/PROVIDERS/NOTTHESAMEFORMAT/VIRTUALMACHINES/SOME-VM' + }; + expectedMetadata = { + tags: [ + 'subscription_id:12345678-1234-abcd-1234-1234567890ab', + 'resource_group:some-resource-group' + ], + source: '', + service: '' + }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + it('should correctly parse provider-only resource ids', function() { + record = { + resourceId: + '/SUBSCRIPTIONS/12345678-1234-ABCD-1234-1234567890AB/PROVIDERS/MICROSOFT.RECOVERYSERVICES/SOMETHING/SOMETHINGELSE' + }; + expectedMetadata = { + tags: ['subscription_id:12345678-1234-abcd-1234-1234567890ab'], + source: 'azure.recoveryservices', + service: '' + }; + assert.deepEqual( + expectedMetadata, + this.forwarder.extractMetadataFromLog(record)[0] + ); + }); + }); + + function testHandleJSONLogs(forwarder, logs, expected) { + actual = forwarder.handleLogs(logs); + assert.deepEqual(actual, expected); + } + + function testHandleStringLogs(forwarder, logs, expected) { + actual = forwarder.handleLogs(logs); + assert.deepEqual(actual, expected); + } + + describe('#handleLogs', function() { + beforeEach(function() { + this.forwarder = setUp(); + }); + + it('should handle string properly', function() { + log = 'hello'; + expected = [{ ddsource: 'none', message: 'hello' }]; + assert.equal(this.forwarder.getLogFormat(log), constants.STRING); + testHandleStringLogs(this.forwarder, log, expected); + }); + + it('should handle json-string properly', function() { + log = '{"hello": "there"}'; + expected = [{ ddsource: 'none', hello: 'there' }]; + assert.equal( + this.forwarder.getLogFormat(log), + constants.JSON_STRING + ); + testHandleJSONLogs(this.forwarder, log, expected); + }); + + it('should handle json-object properly', function() { + log = { hello: 'there' }; + expected = [{ ddsource: 'none', hello: 'there' }]; + assert.equal( + this.forwarder.getLogFormat(log), + constants.JSON_OBJECT + ); + testHandleJSONLogs(this.forwarder, log, expected); + }); + + it('should handle string-array properly', function() { + log = ['one message', 'two message']; + expected = [ + { ddsource: 'none', message: 'one message' }, + { ddsource: 'none', message: 'two message' } + ]; + assert.equal( + this.forwarder.getLogFormat(log), + constants.STRING_ARRAY + ); + testHandleStringLogs(this.forwarder, log, expected); + }); + + it('should handle json-records properly', function() { + log = [{ records: [{ hello: 'there' }, { goodbye: 'now' }] }]; + expected = [ + { ddsource: 'none', hello: 'there' }, + { ddsource: 'none', goodbye: 'now' } + ]; + assert.equal( + this.forwarder.getLogFormat(log), + constants.JSON_ARRAY + ); //JSON_RECORDS + testHandleJSONLogs(this.forwarder, log, expected); + }); + + it('should handle json-array properly', function() { + log = [{ hello: 'there' }, { goodbye: 'now' }]; + expected = [ + { ddsource: 'none', hello: 'there' }, + { ddsource: 'none', goodbye: 'now' } + ]; + assert.equal( + this.forwarder.getLogFormat(log), + constants.JSON_ARRAY + ); + testHandleJSONLogs(this.forwarder, log, expected); + }); + + it('should handle buffer array properly', function() { + log = [Buffer.from('{"records": [{ "test": "testing"}]}')]; + expected = [{ ddsource: 'none', test: 'testing' }]; + assert.equal( + this.forwarder.getLogFormat(log), + constants.BUFFER_ARRAY + ); + testHandleJSONLogs(this.forwarder, log, expected); + }); + + it('should handle buffer array without records properly', function() { + log = [Buffer.from('{ "test": "example"}')]; + expected = [{ ddsource: 'none', test: 'example' }]; + assert.equal( + this.forwarder.getLogFormat(log), + constants.BUFFER_ARRAY + ); + testHandleJSONLogs(this.forwarder, log, expected); + }); + + it('should handle buffer array with malformed string', function() { + log = [Buffer.from('{"time": "xy')]; + expected = [{ ddsource: 'none', message: '{"time": "xy' }]; + assert.equal( + this.forwarder.getLogFormat(log), + constants.BUFFER_ARRAY + ); + testHandleStringLogs(this.forwarder, log, expected); + }); + + it('should handle json-string-array properly records', function() { + log = ['{"records": [{ "time": "xyz"}, {"time": "abc"}]}']; + expected = [ + { ddsource: 'none', time: 'xyz' }, + { ddsource: 'none', time: 'abc' } + ]; + assert.equal( + this.forwarder.getLogFormat(log), + constants.JSON_STRING_ARRAY + ); + testHandleJSONLogs(this.forwarder, log, expected); + }); + + it('should handle json-string-array properly no records', function() { + log = ['{"time": "xyz"}']; + expected = [{ ddsource: 'none', time: 'xyz' }]; + assert.equal( + this.forwarder.getLogFormat(log), + constants.JSON_STRING_ARRAY + ); + testHandleJSONLogs(this.forwarder, log, expected); + }); + + it('should handle json-string-array with malformed string', function() { + log = ['{"time": "xyz"}', '{"time": "xy']; + expected = [ + { ddsource: 'none', time: 'xyz' }, + { ddsource: 'none', message: '{"time": "xy' } + ]; + assert.equal( + this.forwarder.getLogFormat(log), + constants.JSON_STRING_ARRAY + ); + testHandleStringLogs(this.forwarder, log, expected); + }); + }); + describe('#formatSourceType', function() { + beforeEach(function() { + this.forwarder = setUp(); + }); + it('should replace microsoft with azure', function() { + expected = 'azure.bleh'; + actual = this.forwarder.formatSourceType('microsoft.bleh'); + assert.equal(actual, expected); + }); + }); + describe('#scrubPII', function() { + it('should set up configs correctly', function() { + test_rules = { + REDACT_IP: { + pattern: '[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}', + replacement: 'xxx.xxx.xxx.xxx' + } + }; + scrubber = new client.Scrubber(fakeContext(), test_rules); + rule = scrubber.rules[0]; + assert.equal(rule instanceof client.ScrubberRule, true); + assert.equal(rule.name, 'REDACT_IP'); + assert.equal(rule.regexp instanceof RegExp, true); + assert.equal(rule.replacement, 'xxx.xxx.xxx.xxx'); + }); + it('should scrub email from record', function() { + expected = 'sender_email: xxxxx@xxxxx.com'; + scrubber = new client.Scrubber( + fakeContext(), + DEFAULT_TEST_SCRUBBER_RULES + ); + actual = scrubber.scrub('sender_email: hello@test.com'); + assert.equal(actual, expected); + }); + it('should scrub ip address from record', function() { + expected = 'client_ip: xxx.xxx.xxx.xxx'; + scrubber = new client.Scrubber( + fakeContext(), + DEFAULT_TEST_SCRUBBER_RULES + ); + actual = scrubber.scrub('client_ip: 12.123.23.12'); + assert.equal(actual, expected); + }); + it('should scrub ip address and email from record', function() { + expected = 'client_ip: xxx.xxx.xxx.xxx, email: xxxxx@xxxxx.com'; + scrubber = new client.Scrubber( + fakeContext(), + DEFAULT_TEST_SCRUBBER_RULES + ); + actual = scrubber.scrub( + 'client_ip: 12.123.23.12, email: hello@test.com' + ); + assert.equal(actual, expected); + }); + it('should scrub multiple ip address from string', function() { + expected = + 'client_ip: xxx.xxx.xxx.xxx, client_ip2: xxx.xxx.xxx.xxx'; + scrubber = new client.Scrubber( + fakeContext(), + DEFAULT_TEST_SCRUBBER_RULES + ); + actual = scrubber.scrub( + 'client_ip: 12.123.23.12, client_ip2: 122.123.213.112' + ); + assert.equal(actual, expected); + }); + it('should scrub multiple ip address and email from string', function() { + expected = + 'client_ip: xxx.xxx.xxx.xxx, client_ip2: xxx.xxx.xxx.xxx email: xxxxx@xxxxx.com email2: xxxxx@xxxxx.com'; + scrubber = new client.Scrubber( + fakeContext(), + DEFAULT_TEST_SCRUBBER_RULES + ); + actual = scrubber.scrub( + 'client_ip: 12.123.23.12, client_ip2: 122.123.213.112 email: hello@test.com email2: hello2@test.com' + ); + assert.equal(actual, expected); + }); + it('should handle malformed regexp correctly', function() { + // we don't want to break if we have a malformed regex, just want to skip it until the user fixes it + test_rules = { + REDACT_SOMETHING: { + pattern: '[2-', + replacement: 'xxx.xxx.xxx.xxx' + } + }; + scrubber = new client.Scrubber(fakeContext(), test_rules); + assert.equal(scrubber.rules.length, 0); + }); + it('should not scrub when there are no rules defined', function() { + // if there are no rules, then the log should be the same before and after + test_rules = {}; + expected = + 'client_ip: 12.123.23.12, client_ip2: 122.123.213.112 email: hello@test.com email2: hello2@test.com'; + scrubber = new client.Scrubber(fakeContext(), test_rules); + actual = scrubber.scrub( + 'client_ip: 12.123.23.12, client_ip2: 122.123.213.112 email: hello@test.com email2: hello2@test.com' + ); + assert.equal(actual, expected); + }); + }); +}); + +describe('Batching', function() { + describe('#batch', function() { + it('should return two batches because of size', function() { + batcher = new client.Batcher(15, 15, 1); + logs = [{ hi: 'bye' }, 'bleh']; + actual = batcher.batch(logs); + expected = [[{ hi: 'bye' }], ['bleh']]; + assert.deepEqual(actual, expected); + }); + it('should return two batches because of batch size bytes', function() { + batcher = new client.Batcher(5, 12, 10); + logs = [{ hi: 'bye' }, 'bleh']; + actual = batcher.batch(logs); + expected = [[{ hi: 'bye' }], ['bleh']]; + assert.deepEqual(actual, expected); + }); + it('should drop message based on message bytes size', function() { + batcher = new client.Batcher(5, 5, 1); + logs = [{ hi: 'bye' }, 'bleh']; + actual = batcher.batch(logs); + expected = [['bleh']]; + assert.deepEqual(actual, expected); + }); + }); + describe('#getSizeInBytes', function() { + it('should return 5 for string', function() { + batcher = new client.Batcher(15, 15, 1); + log = 'aaaaa'; + actual = batcher.getSizeInBytes(log); + expected = 5; + assert.equal(actual, expected); + }); + + it('should return 7 for object', function() { + batcher = new client.Batcher(15, 15, 1); + log = { a: 2 }; + actual = batcher.getSizeInBytes(log); + expected = 7; + assert.equal(actual, expected); + }); + }); +}); From 27373b6ab1f15fe3f8fdce881f223a4230cfc913 Mon Sep 17 00:00:00 2001 From: Matt Spurlin Date: Tue, 2 Jan 2024 20:23:14 -0500 Subject: [PATCH 2/2] remove unused function --- azure/blobs_logs_monitoring/index.js | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/azure/blobs_logs_monitoring/index.js b/azure/blobs_logs_monitoring/index.js index 40500e0cd..8cfe1858b 100644 --- a/azure/blobs_logs_monitoring/index.js +++ b/azure/blobs_logs_monitoring/index.js @@ -543,17 +543,6 @@ class BlobStorageLogHandler { } return metadata; } - - removeWhitespaceFromKeys(obj) { - // remove whitespace from the keys of an object and capitalizes the letter that follows - var newObj = {}; - for (const [key, value] of Object.entries(obj)) { - // regex looks for word boundaries and captures the alpha character that follows - const new_key = key.replace(/\b\w/g, c=> c.toUpperCase()).replaceAll(' ', ''); - newObj[new_key] = value; - } - return newObj; - } } module.exports = async function(context, blobContent) {