From 4a233c61b1504d70c8aacf916979ae0216ac57dd Mon Sep 17 00:00:00 2001
From: Simeon Widdis <sawiddis@gmail.com>
Date: Thu, 22 Jun 2023 13:21:46 -0700
Subject: [PATCH] Merge pull request #552 from Swiddis/osints/main

Merge in repository classes from osints/dev

Signed-off-by: Simeon Widdis <sawiddis@amazon.com>
---
 .../repository/__test__/integration.test.ts   | 246 +++++++++++++++
 .../repository/__test__/repository.test.ts    |  85 ++++++
 .../integrations/repository/integration.ts    | 282 ++++++++++++++++++
 .../integrations/repository/repository.ts     |  41 +++
 server/adaptors/integrations/types.ts         |   4 +-
 server/adaptors/integrations/validators.ts    | 119 ++++++++
 .../opensearch_observability_plugin.ts        |  48 ++-
 7 files changed, 798 insertions(+), 27 deletions(-)
 create mode 100644 server/adaptors/integrations/repository/__test__/integration.test.ts
 create mode 100644 server/adaptors/integrations/repository/__test__/repository.test.ts
 create mode 100644 server/adaptors/integrations/repository/integration.ts
 create mode 100644 server/adaptors/integrations/repository/repository.ts
 create mode 100644 server/adaptors/integrations/validators.ts

diff --git a/server/adaptors/integrations/repository/__test__/integration.test.ts b/server/adaptors/integrations/repository/__test__/integration.test.ts
new file mode 100644
index 0000000000..4474fc48ff
--- /dev/null
+++ b/server/adaptors/integrations/repository/__test__/integration.test.ts
@@ -0,0 +1,246 @@
+/*
+ * Copyright OpenSearch Contributors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+import * as fs from 'fs/promises';
+import { Integration } from '../integration';
+import { Dirent, Stats } from 'fs';
+import * as path from 'path';
+
+jest.mock('fs/promises');
+
+describe('Integration', () => {
+  let integration: Integration;
+  const sampleIntegration: IntegrationTemplate = {
+    name: 'sample',
+    version: '2.0.0',
+    license: 'Apache-2.0',
+    type: '',
+    components: [],
+    assets: {
+      savedObjects: {
+        name: 'sample',
+        version: '1.0.1',
+      },
+    },
+  };
+
+  beforeEach(() => {
+    integration = new Integration('./sample');
+  });
+
+  describe('check', () => {
+    it('should return false if the directory does not exist', async () => {
+      const spy = jest.spyOn(fs, 'stat').mockResolvedValue({ isDirectory: () => false } as Stats);
+
+      const result = await integration.check();
+
+      expect(spy).toHaveBeenCalled();
+      expect(result).toBe(false);
+    });
+
+    it('should return true if the directory exists and getConfig returns a valid template', async () => {
+      jest.spyOn(fs, 'stat').mockResolvedValue({ isDirectory: () => true } as Stats);
+      integration.getConfig = jest.fn().mockResolvedValue(sampleIntegration);
+
+      const result = await integration.check();
+
+      expect(result).toBe(true);
+    });
+
+    it('should return false if the directory exists but getConfig returns null', async () => {
+      jest.spyOn(fs, 'stat').mockResolvedValue({ isDirectory: () => true } as Stats);
+      integration.getConfig = jest.fn().mockResolvedValue(null);
+
+      const result = await integration.check();
+
+      expect(result).toBe(false);
+    });
+  });
+
+  describe('getLatestVersion', () => {
+    it('should return the latest version if there are JSON files matching the integration name', async () => {
+      const files: unknown[] = ['sample-1.0.0.json', 'sample-2.0.0.json'];
+      jest.spyOn(fs, 'readdir').mockResolvedValue(files as Dirent[]);
+
+      const result = await integration.getLatestVersion();
+
+      expect(result).toBe('2.0.0');
+    });
+
+    it('should return null if there are no JSON files matching the integration name', async () => {
+      const files: unknown[] = ['other-1.0.0.json', 'other-2.0.0.json'];
+      jest.spyOn(fs, 'readdir').mockResolvedValue(files as Dirent[]);
+
+      const result = await integration.getLatestVersion();
+
+      expect(result).toBeNull();
+    });
+
+    it('should ignore files without a decimal version', async () => {
+      const files: unknown[] = ['sample-1.0.0.json', 'sample-2.0.two.json', 'sample-three.json'];
+      jest.spyOn(fs, 'readdir').mockResolvedValue(files as Dirent[]);
+
+      const result = await integration.getLatestVersion();
+
+      expect(result).toBe('1.0.0');
+    });
+  });
+
+  describe('getConfig', () => {
+    it('should return the parsed config template if it is valid', async () => {
+      jest.spyOn(fs, 'readFile').mockResolvedValue(JSON.stringify(sampleIntegration));
+
+      const result = await integration.getConfig(sampleIntegration.version);
+
+      expect(result).toEqual(sampleIntegration);
+    });
+
+    it('should return null and log validation errors if the config template is invalid', async () => {
+      const invalidTemplate = { ...sampleIntegration, version: 2 };
+      jest.spyOn(fs, 'readFile').mockResolvedValue(JSON.stringify(invalidTemplate));
+      const logValidationErrorsMock = jest.spyOn(console, 'error');
+
+      const result = await integration.getConfig(sampleIntegration.version);
+
+      expect(result).toBeNull();
+      expect(logValidationErrorsMock).toHaveBeenCalledWith(expect.any(String), expect.any(Array));
+    });
+
+    it('should return null and log syntax errors if the config file has syntax errors', async () => {
+      jest.spyOn(fs, 'readFile').mockResolvedValue('Invalid JSON');
+      const logSyntaxErrorsMock = jest.spyOn(console, 'error');
+
+      const result = await integration.getConfig(sampleIntegration.version);
+
+      expect(result).toBeNull();
+      expect(logSyntaxErrorsMock).toHaveBeenCalledWith(expect.any(String), expect.any(SyntaxError));
+    });
+
+    it('should return null and log errors if the integration config does not exist', async () => {
+      integration.directory = './non-existing-directory';
+      const logErrorsMock = jest.spyOn(console, 'error');
+      jest.spyOn(fs, 'readFile').mockImplementation((..._args) => {
+        // Can't find any information on how to mock an actual file not found error,
+        // But at least according to the current implementation this should be equivalent.
+        const error: any = new Error('ENOENT: File not found');
+        error.code = 'ENOENT';
+        return Promise.reject(error);
+      });
+
+      const result = await integration.getConfig(sampleIntegration.version);
+
+      expect(jest.spyOn(fs, 'readFile')).toHaveBeenCalled();
+      expect(logErrorsMock).toHaveBeenCalledWith(expect.any(String));
+      expect(result).toBeNull();
+    });
+  });
+
+  describe('getAssets', () => {
+    it('should return linked saved object assets when available', async () => {
+      integration.getConfig = jest.fn().mockResolvedValue(sampleIntegration);
+      jest.spyOn(fs, 'readFile').mockResolvedValue('{"name":"asset1"}\n{"name":"asset2"}');
+
+      const result = await integration.getAssets(sampleIntegration.version);
+
+      expect(result.savedObjects).toEqual([{ name: 'asset1' }, { name: 'asset2' }]);
+    });
+
+    it('should reject a return if the provided version has no config', async () => {
+      integration.getConfig = jest.fn().mockResolvedValue(null);
+
+      expect(integration.getAssets()).rejects.toThrowError();
+    });
+
+    it('should log an error if the saved object assets are invalid', async () => {
+      const logErrorsMock = jest.spyOn(console, 'error');
+      integration.getConfig = jest.fn().mockResolvedValue(sampleIntegration);
+      jest.spyOn(fs, 'readFile').mockResolvedValue('{"unclosed":');
+
+      const result = await integration.getAssets(sampleIntegration.version);
+
+      expect(logErrorsMock).toHaveBeenCalledWith(expect.any(String), expect.any(Error));
+      expect(result.savedObjects).toBeUndefined();
+    });
+  });
+
+  describe('getSchemas', () => {
+    it('should retrieve mappings and schemas for all components in the config', async () => {
+      const sampleConfig = {
+        components: [
+          { name: 'component1', version: '1.0.0' },
+          { name: 'component2', version: '2.0.0' },
+        ],
+      };
+      integration.getConfig = jest.fn().mockResolvedValue(sampleConfig);
+
+      const mappingFile1 = 'component1-1.0.0.mapping.json';
+      const mappingFile2 = 'component2-2.0.0.mapping.json';
+
+      jest
+        .spyOn(fs, 'readFile')
+        .mockResolvedValueOnce(JSON.stringify({ mapping: 'mapping1' }))
+        .mockResolvedValueOnce(JSON.stringify({ mapping: 'mapping2' }));
+
+      const result = await integration.getSchemas();
+
+      expect(result).toEqual({
+        mappings: {
+          component1: { mapping: 'mapping1' },
+          component2: { mapping: 'mapping2' },
+        },
+      });
+
+      expect(fs.readFile).toHaveBeenCalledWith(
+        path.join(integration.directory, 'schemas', mappingFile1),
+        { encoding: 'utf-8' }
+      );
+      expect(fs.readFile).toHaveBeenCalledWith(
+        path.join(integration.directory, 'schemas', mappingFile2),
+        { encoding: 'utf-8' }
+      );
+    });
+
+    it('should reject with an error if the config is null', async () => {
+      integration.getConfig = jest.fn().mockResolvedValue(null);
+
+      await expect(integration.getSchemas()).rejects.toThrowError(
+        'Attempted to get assets of invalid config'
+      );
+    });
+
+    it('should reject with an error if a mapping file is invalid', async () => {
+      const sampleConfig = {
+        components: [{ name: 'component1', version: '1.0.0' }],
+      };
+      integration.getConfig = jest.fn().mockResolvedValue(sampleConfig);
+      jest.spyOn(fs, 'readFile').mockRejectedValueOnce(new Error('Could not load schema'));
+
+      await expect(integration.getSchemas()).rejects.toThrowError('Could not load schema');
+    });
+  });
+
+  describe('getStatic', () => {
+    it('should return data as a buffer if the static is present', async () => {
+      const readFileMock = jest
+        .spyOn(fs, 'readFile')
+        .mockResolvedValue(Buffer.from('logo data', 'ascii'));
+      expect(await integration.getStatic('/logo.png')).toStrictEqual(
+        Buffer.from('logo data', 'ascii')
+      );
+      expect(readFileMock).toBeCalledWith(path.join('sample', 'static', 'logo.png'));
+    });
+
+    it('should return null and log an error if the static file is not found', async () => {
+      const logErrorsMock = jest.spyOn(console, 'error');
+      jest.spyOn(fs, 'readFile').mockImplementation((..._args) => {
+        const error: any = new Error('ENOENT: File not found');
+        error.code = 'ENOENT';
+        return Promise.reject(error);
+      });
+      expect(await integration.getStatic('/logo.png')).toBeNull();
+      expect(logErrorsMock).toBeCalledWith(expect.any(String));
+    });
+  });
+});
diff --git a/server/adaptors/integrations/repository/__test__/repository.test.ts b/server/adaptors/integrations/repository/__test__/repository.test.ts
new file mode 100644
index 0000000000..913968f495
--- /dev/null
+++ b/server/adaptors/integrations/repository/__test__/repository.test.ts
@@ -0,0 +1,85 @@
+/*
+ * Copyright OpenSearch Contributors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+import * as fs from 'fs/promises';
+import { Repository } from '../repository';
+import { Integration } from '../integration';
+import { Dirent, Stats } from 'fs';
+import path from 'path';
+
+jest.mock('fs/promises');
+
+describe('Repository', () => {
+  let repository: Repository;
+
+  beforeEach(() => {
+    repository = new Repository('path/to/directory');
+  });
+
+  describe('getIntegrationList', () => {
+    it('should return an array of Integration instances', async () => {
+      // Mock fs.readdir to return a list of folders
+      jest.spyOn(fs, 'readdir').mockResolvedValue((['folder1', 'folder2'] as unknown) as Dirent[]);
+
+      // Mock fs.lstat to return a directory status
+      jest.spyOn(fs, 'lstat').mockResolvedValue({ isDirectory: () => true } as Stats);
+
+      // Mock Integration check method to always return true
+      jest.spyOn(Integration.prototype, 'check').mockResolvedValue(true);
+
+      const integrations = await repository.getIntegrationList();
+
+      expect(integrations).toHaveLength(2);
+      expect(integrations[0]).toBeInstanceOf(Integration);
+      expect(integrations[1]).toBeInstanceOf(Integration);
+    });
+
+    it('should filter out null values from the integration list', async () => {
+      jest.spyOn(fs, 'readdir').mockResolvedValue((['folder1', 'folder2'] as unknown) as Dirent[]);
+
+      // Mock fs.lstat to return a mix of directories and files
+      jest.spyOn(fs, 'lstat').mockImplementation(async (toLstat) => {
+        if (toLstat === path.join('path', 'to', 'directory', 'folder1')) {
+          return { isDirectory: () => true } as Stats;
+        } else {
+          return { isDirectory: () => false } as Stats;
+        }
+      });
+
+      jest.spyOn(Integration.prototype, 'check').mockResolvedValue(true);
+
+      const integrations = await repository.getIntegrationList();
+
+      expect(integrations).toHaveLength(1);
+      expect(integrations[0]).toBeInstanceOf(Integration);
+    });
+
+    it('should handle errors and return an empty array', async () => {
+      jest.spyOn(fs, 'readdir').mockRejectedValue(new Error('Mocked error'));
+
+      const integrations = await repository.getIntegrationList();
+
+      expect(integrations).toEqual([]);
+    });
+  });
+
+  describe('getIntegration', () => {
+    it('should return an Integration instance if it exists and passes the check', async () => {
+      jest.spyOn(Integration.prototype, 'check').mockResolvedValue(true);
+
+      const integration = await repository.getIntegration('integrationName');
+
+      expect(integration).toBeInstanceOf(Integration);
+    });
+
+    it('should return null if the integration does not exist or fails the check', async () => {
+      jest.spyOn(Integration.prototype, 'check').mockResolvedValue(false);
+
+      const integration = await repository.getIntegration('invalidIntegration');
+
+      expect(integration).toBeNull();
+    });
+  });
+});
diff --git a/server/adaptors/integrations/repository/integration.ts b/server/adaptors/integrations/repository/integration.ts
new file mode 100644
index 0000000000..e14d5c069c
--- /dev/null
+++ b/server/adaptors/integrations/repository/integration.ts
@@ -0,0 +1,282 @@
+/*
+ * Copyright OpenSearch Contributors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+import * as fs from 'fs/promises';
+import path from 'path';
+import { ValidateFunction } from 'ajv';
+import { templateValidator } from '../validators';
+
+/**
+ * Helper function to compare version numbers.
+ * Assumes that the version numbers are valid, produces undefined behavior otherwise.
+ *
+ * @param a Left-hand number
+ * @param b Right-hand number
+ * @returns -1 if a > b, 1 if a < b, 0 otherwise.
+ */
+function compareVersions(a: string, b: string): number {
+  const aParts = a.split('.').map(Number.parseInt);
+  const bParts = b.split('.').map(Number.parseInt);
+
+  for (let i = 0; i < Math.max(aParts.length, bParts.length); i++) {
+    const aValue = i < aParts.length ? aParts[i] : 0;
+    const bValue = i < bParts.length ? bParts[i] : 0;
+
+    if (aValue > bValue) {
+      return -1; // a > b
+    } else if (aValue < bValue) {
+      return 1; // a < b
+    }
+  }
+
+  return 0; // a == b
+}
+
+/**
+ * Helper function to check if the given path is a directory
+ *
+ * @param dirPath The directory to check.
+ * @returns True if the path is a directory.
+ */
+async function isDirectory(dirPath: string): Promise<boolean> {
+  try {
+    const stats = await fs.stat(dirPath);
+    return stats.isDirectory();
+  } catch {
+    return false;
+  }
+}
+
+/**
+ * Helper function to log validation errors.
+ * Relies on the `ajv` package for validation error logs..
+ *
+ * @param integration The name of the component that failed validation.
+ * @param validator A failing ajv validator.
+ */
+function logValidationErrors(integration: string, validator: ValidateFunction<any>) {
+  const errors = validator.errors?.map((e) => e.message);
+  console.error(`Validation errors in ${integration}`, errors);
+}
+
+/**
+ * The Integration class represents the data for Integration Templates.
+ * It is backed by the repository file system.
+ * It includes accessor methods for integration configs, as well as helpers for nested components.
+ */
+export class Integration {
+  directory: string;
+  name: string;
+
+  constructor(directory: string) {
+    this.directory = directory;
+    this.name = path.basename(directory);
+  }
+
+  /**
+   * Check the integration for validity.
+   * This is not a deep check, but a quick check to verify that the integration is a valid directory and has a config file.
+   *
+   * @returns true if the integration is valid.
+   */
+  async check(): Promise<boolean> {
+    if (!(await isDirectory(this.directory))) {
+      return false;
+    }
+    return (await this.getConfig()) !== null;
+  }
+
+  /**
+   * Like check(), but thoroughly checks all nested integration dependencies.
+   *
+   * @returns true if the integration is valid.
+   */
+  async deepCheck(): Promise<boolean> {
+    if (!(await this.check())) {
+      console.error('check failed');
+      return false;
+    }
+
+    try {
+      // An integration must have at least one mapping
+      const schemas = await this.getSchemas();
+      if (Object.keys(schemas.mappings).length === 0) {
+        return false;
+      }
+      // An integration must have at least one asset
+      const assets = await this.getAssets();
+      if (Object.keys(assets).length === 0) {
+        return false;
+      }
+    } catch (err: any) {
+      // Any loading errors are considered invalid
+      console.error('Deep check failed for exception', err);
+      return false;
+    }
+
+    return true;
+  }
+
+  /**
+   * Get the latest version of the integration available.
+   * This method relies on the fact that integration configs have their versions in their name.
+   * Any files that don't match the config naming convention will be ignored.
+   *
+   * @returns A string with the latest version, or null if no versions are available.
+   */
+  async getLatestVersion(): Promise<string | null> {
+    const files = await fs.readdir(this.directory);
+    const versions: string[] = [];
+
+    for (const file of files) {
+      if (path.extname(file) === '.json' && file.startsWith(`${this.name}-`)) {
+        const version = file.substring(this.name.length + 1, file.length - 5);
+        if (!version.match(/^\d+(\.\d+)*$/)) {
+          continue;
+        }
+        versions.push(version);
+      }
+    }
+
+    versions.sort((a, b) => compareVersions(a, b));
+
+    return versions.length > 0 ? versions[0] : null;
+  }
+
+  /**
+   * Get the configuration of the current integration.
+   *
+   * @param version The version of the config to retrieve.
+   * @returns The config if a valid config matching the version is present, otherwise null.
+   */
+  async getConfig(version?: string): Promise<IntegrationTemplate | null> {
+    const maybeVersion: string | null = version ? version : await this.getLatestVersion();
+
+    if (maybeVersion === null) {
+      return null;
+    }
+
+    const configFile = `${this.name}-${maybeVersion}.json`;
+    const configPath = path.join(this.directory, configFile);
+
+    try {
+      const config = await fs.readFile(configPath, { encoding: 'utf-8' });
+      const possibleTemplate = JSON.parse(config);
+
+      if (!templateValidator(possibleTemplate)) {
+        logValidationErrors(configFile, templateValidator);
+        return null;
+      }
+
+      return possibleTemplate;
+    } catch (err: any) {
+      if (err instanceof SyntaxError) {
+        console.error(`Syntax errors in ${configFile}`, err);
+        return null;
+      }
+      if (err instanceof Error && (err as { code?: string }).code === 'ENOENT') {
+        console.error(`Attempted to retrieve non-existent config ${configFile}`);
+        return null;
+      }
+      throw new Error('Could not load integration', { cause: err });
+    }
+  }
+
+  /**
+   * Retrieve assets associated with the integration.
+   * This method greedily retrieves all assets.
+   * If the version is invalid, an error is thrown.
+   * If an asset is invalid, it will be skipped.
+   *
+   * @param version The version of the integration to retrieve assets for.
+   * @returns An object containing the different types of assets.
+   */
+  async getAssets(
+    version?: string
+  ): Promise<{
+    savedObjects?: object[];
+  }> {
+    const config = await this.getConfig(version);
+    if (config === null) {
+      return Promise.reject(new Error('Attempted to get assets of invalid config'));
+    }
+    const result: { savedObjects?: object[] } = {};
+    if (config.assets.savedObjects) {
+      const sobjPath = path.join(
+        this.directory,
+        'assets',
+        `${config.assets.savedObjects.name}-${config.assets.savedObjects.version}.ndjson`
+      );
+      try {
+        const ndjson = await fs.readFile(sobjPath, { encoding: 'utf-8' });
+        const asJson = '[' + ndjson.replace(/\n/g, ',') + ']';
+        const parsed = JSON.parse(asJson);
+        result.savedObjects = parsed;
+      } catch (err: any) {
+        console.error("Failed to load saved object assets, proceeding as if it's absent", err);
+      }
+    }
+    return result;
+  }
+
+  /**
+   * Retrieve schema data associated with the integration.
+   * This method greedily retrieves all mappings and schemas.
+   * It's assumed that a valid version will be provided.
+   * If the version is invalid, an error is thrown.
+   * If a schema is invalid, an error will be thrown.
+   *
+   * @param version The version of the integration to retrieve assets for.
+   * @returns An object containing the different types of assets.
+   */
+  async getSchemas(
+    version?: string
+  ): Promise<{
+    mappings: { [key: string]: any };
+  }> {
+    const config = await this.getConfig(version);
+    if (config === null) {
+      return Promise.reject(new Error('Attempted to get assets of invalid config'));
+    }
+    const result: { mappings: { [key: string]: any } } = {
+      mappings: {},
+    };
+    try {
+      for (const component of config.components) {
+        const schemaFile = `${component.name}-${component.version}.mapping.json`;
+        const rawSchema = await fs.readFile(path.join(this.directory, 'schemas', schemaFile), {
+          encoding: 'utf-8',
+        });
+        const parsedSchema = JSON.parse(rawSchema);
+        result.mappings[component.name] = parsedSchema;
+      }
+    } catch (err: any) {
+      // It's not clear that an invalid schema can be recovered from.
+      // For integrations to function, we need schemas to be valid.
+      console.error('Error loading schema', err);
+      return Promise.reject(new Error('Could not load schema', { cause: err }));
+    }
+    return result;
+  }
+
+  /**
+   * Retrieves the data for a static file associated with the integration.
+   *
+   * @param staticPath The path of the static to retrieve.
+   * @returns A buffer with the static's data if present, otherwise null.
+   */
+  async getStatic(staticPath: string): Promise<Buffer | null> {
+    const fullStaticPath = path.join(this.directory, 'static', staticPath);
+    try {
+      return await fs.readFile(fullStaticPath);
+    } catch (err: any) {
+      if (err instanceof Error && (err as { code?: string }).code === 'ENOENT') {
+        console.error(`Static not found: ${staticPath}`);
+        return null;
+      }
+      throw err;
+    }
+  }
+}
diff --git a/server/adaptors/integrations/repository/repository.ts b/server/adaptors/integrations/repository/repository.ts
new file mode 100644
index 0000000000..00d241327d
--- /dev/null
+++ b/server/adaptors/integrations/repository/repository.ts
@@ -0,0 +1,41 @@
+/*
+ * Copyright OpenSearch Contributors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+import * as fs from 'fs/promises';
+import * as path from 'path';
+import { Integration } from './integration';
+
+export class Repository {
+  directory: string;
+
+  constructor(directory: string) {
+    this.directory = directory;
+  }
+
+  async getIntegrationList(): Promise<Integration[]> {
+    try {
+      const folders = await fs.readdir(this.directory);
+      const integrations = Promise.all(
+        folders.map(async (folder) => {
+          const integPath = path.join(this.directory, folder);
+          if (!(await fs.lstat(integPath)).isDirectory()) {
+            return null;
+          }
+          const integ = new Integration(integPath);
+          return (await integ.check()) ? integ : null;
+        })
+      );
+      return (await integrations).filter((x) => x !== null) as Integration[];
+    } catch (error) {
+      console.error(`Error reading integration directories in: ${this.directory}`, error);
+      return [];
+    }
+  }
+
+  async getIntegration(name: string): Promise<Integration | null> {
+    const integ = new Integration(path.join(this.directory, name));
+    return (await integ.check()) ? integ : null;
+  }
+}
diff --git a/server/adaptors/integrations/types.ts b/server/adaptors/integrations/types.ts
index d4c2b68294..58293580fb 100644
--- a/server/adaptors/integrations/types.ts
+++ b/server/adaptors/integrations/types.ts
@@ -7,7 +7,6 @@ interface IntegrationTemplate {
   name: string;
   version: string;
   displayName?: string;
-  integrationType: string;
   license: string;
   type: string;
   author?: string;
@@ -26,6 +25,9 @@ interface IntegrationTemplate {
       version: string;
     };
   };
+  sampleData?: {
+    path: string;
+  };
 }
 
 interface StaticAsset {
diff --git a/server/adaptors/integrations/validators.ts b/server/adaptors/integrations/validators.ts
new file mode 100644
index 0000000000..0bc7029b0d
--- /dev/null
+++ b/server/adaptors/integrations/validators.ts
@@ -0,0 +1,119 @@
+/*
+ * Copyright OpenSearch Contributors
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+import Ajv, { JSONSchemaType } from 'ajv';
+
+const ajv = new Ajv();
+
+const staticAsset: JSONSchemaType<StaticAsset> = {
+  type: 'object',
+  properties: {
+    path: { type: 'string' },
+    annotation: { type: 'string', nullable: true },
+  },
+  required: ['path'],
+  additionalProperties: false,
+};
+
+const templateSchema: JSONSchemaType<IntegrationTemplate> = {
+  type: 'object',
+  properties: {
+    name: { type: 'string' },
+    version: { type: 'string' },
+    displayName: { type: 'string', nullable: true },
+    license: { type: 'string' },
+    type: { type: 'string' },
+    author: { type: 'string', nullable: true },
+    description: { type: 'string', nullable: true },
+    sourceUrl: { type: 'string', nullable: true },
+    statics: {
+      type: 'object',
+      properties: {
+        logo: { ...staticAsset, nullable: true },
+        gallery: { type: 'array', items: staticAsset, nullable: true },
+        darkModeLogo: { ...staticAsset, nullable: true },
+        darkModeGallery: { type: 'array', items: staticAsset, nullable: true },
+      },
+      additionalProperties: false,
+      nullable: true,
+    },
+    components: {
+      type: 'array',
+      items: {
+        type: 'object',
+        properties: {
+          name: { type: 'string' },
+          version: { type: 'string' },
+        },
+        required: ['name', 'version'],
+      },
+    },
+    assets: {
+      type: 'object',
+      properties: {
+        savedObjects: {
+          type: 'object',
+          properties: {
+            name: { type: 'string' },
+            version: { type: 'string' },
+          },
+          required: ['name', 'version'],
+          nullable: true,
+          additionalProperties: false,
+        },
+      },
+      additionalProperties: false,
+    },
+    sampleData: {
+      type: 'object',
+      properties: {
+        path: {
+          type: 'string',
+        },
+      },
+      required: ['path'],
+      additionalProperties: false,
+      nullable: true,
+    },
+  },
+  required: ['name', 'version', 'license', 'type', 'components', 'assets'],
+  additionalProperties: false,
+};
+
+const instanceSchema: JSONSchemaType<IntegrationInstance> = {
+  type: 'object',
+  properties: {
+    name: { type: 'string' },
+    templateName: { type: 'string' },
+    dataSource: {
+      type: 'object',
+      properties: {
+        sourceType: { type: 'string' },
+        dataset: { type: 'string' },
+        namespace: { type: 'string' },
+      },
+      required: ['sourceType', 'dataset', 'namespace'],
+      additionalProperties: false,
+    },
+    creationDate: { type: 'string' },
+    assets: {
+      type: 'array',
+      items: {
+        type: 'object',
+        properties: {
+          assetType: { type: 'string' },
+          assetId: { type: 'string' },
+          isDefaultAsset: { type: 'boolean' },
+          description: { type: 'string' },
+        },
+        required: ['assetType', 'assetId', 'isDefaultAsset', 'description'],
+      },
+    },
+  },
+  required: ['name', 'templateName', 'dataSource', 'creationDate', 'assets'],
+};
+
+export const templateValidator = ajv.compile(templateSchema);
+export const instanceValidator = ajv.compile(instanceSchema);
diff --git a/server/adaptors/opensearch_observability_plugin.ts b/server/adaptors/opensearch_observability_plugin.ts
index 2a99187c65..fbdbac72be 100644
--- a/server/adaptors/opensearch_observability_plugin.ts
+++ b/server/adaptors/opensearch_observability_plugin.ts
@@ -3,13 +3,9 @@
  * SPDX-License-Identifier: Apache-2.0
  */
 
-import { OPENSEARCH_PANELS_API } from "../../common/constants/shared";
+import { OPENSEARCH_PANELS_API } from '../../common/constants/shared';
 
-export function OpenSearchObservabilityPlugin(
-  Client: any,
-  config: any,
-  components: any
-) {
+export function OpenSearchObservabilityPlugin(Client: any, config: any, components: any) {
   const clientAction = components.clientAction.factory;
 
   Client.prototype.observability = components.clientAction.namespaceFactory();
@@ -21,38 +17,38 @@ export function OpenSearchObservabilityPlugin(
       fmt: OPENSEARCH_PANELS_API.OBJECT,
       params: {
         objectId: {
-          type: "string",
+          type: 'string',
         },
         objectIdList: {
-          type: "string",
+          type: 'string',
         },
         objectType: {
-          type: "string",
+          type: 'string',
         },
         sortField: {
-          type: "string",
+          type: 'string',
         },
         sortOrder: {
-          type: "string",
+          type: 'string',
         },
         fromIndex: {
-          type: "number",
+          type: 'number',
         },
         maxItems: {
-          type: "number",
+          type: 'number',
         },
         name: {
-          type: "string",
+          type: 'string',
         },
         lastUpdatedTimeMs: {
-          type: "string",
+          type: 'string',
         },
         createdTimeMs: {
-          type: "string",
+          type: 'string',
         },
       },
     },
-    method: "GET",
+    method: 'GET',
   });
 
   // Get Object by Id
@@ -61,12 +57,12 @@ export function OpenSearchObservabilityPlugin(
       fmt: `${OPENSEARCH_PANELS_API.OBJECT}/<%=objectId%>`,
       req: {
         objectId: {
-          type: "string",
+          type: 'string',
           required: true,
         },
       },
     },
-    method: "GET",
+    method: 'GET',
   });
 
   // Create new Object
@@ -74,7 +70,7 @@ export function OpenSearchObservabilityPlugin(
     url: {
       fmt: OPENSEARCH_PANELS_API.OBJECT,
     },
-    method: "POST",
+    method: 'POST',
     needBody: true,
   });
 
@@ -84,12 +80,12 @@ export function OpenSearchObservabilityPlugin(
       fmt: `${OPENSEARCH_PANELS_API.OBJECT}/<%=objectId%>`,
       req: {
         objectId: {
-          type: "string",
+          type: 'string',
           required: true,
         },
       },
     },
-    method: "PUT",
+    method: 'PUT',
     needBody: true,
   });
 
@@ -99,12 +95,12 @@ export function OpenSearchObservabilityPlugin(
       fmt: `${OPENSEARCH_PANELS_API.OBJECT}/<%=objectId%>`,
       req: {
         objectId: {
-          type: "string",
+          type: 'string',
           required: true,
         },
       },
     },
-    method: "DELETE",
+    method: 'DELETE',
   });
 
   // Delete Object by Id List
@@ -113,11 +109,11 @@ export function OpenSearchObservabilityPlugin(
       fmt: OPENSEARCH_PANELS_API.OBJECT,
       params: {
         objectIdList: {
-          type: "string",
+          type: 'string',
           required: true,
         },
       },
     },
-    method: "DELETE",
+    method: 'DELETE',
   });
 }