Skip to content

Commit

Permalink
Dataset id validation and list status fix (#226)
Browse files Browse the repository at this point in the history
* #OBS-I150: added enums for the list api and added dataset_id validation in create and update live dataset

* #OBS-I150: Live and Retired are the allowed filters for dataset table

* #OBS-I150: removed dataset id check in update

* #OBS-I150: created validation middleware for detecting sql injection

* #OBS-I150: Added dataset_id for filter and created validate sql injection function

* #OBS-I150: Reverted sql injection changes

* #OBS-I150: Added test case and its fixture

* #OBS-I150: changes message for invalid dataset id
  • Loading branch information
yashashkumar authored Aug 7, 2024
1 parent 639a3a3 commit 7bc1cb9
Show file tree
Hide file tree
Showing 7 changed files with 54 additions and 5 deletions.
5 changes: 5 additions & 0 deletions api-service/src/helpers/prometheus/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,8 @@ export const onFailure = (req: any, res: Response) => {
duration && setQueryResponseTime(duration, { entity, id, endpoint: url });
incrementFailedApiCalls({ entity, id, endpoint: url });
}

export const validateDatasetId = (datasetId: string) => {
const pattern = /^[^!@#$%^&*()+{}\[\]:;<>,?~\\|]+$/;
return pattern.test(datasetId);
}
9 changes: 7 additions & 2 deletions api-service/src/resources/Constants.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@
"status": 404,
"code": "NOT_FOUND"
},
"INVALID_DATASET_ID": {
"message": "The dataset id should exclude any special characters, permitting only alphabets, numbers, \".\", \"-\", and \"_\".",
"status": 400,
"code": "BAD_REQUEST"
},
"DATASET_ID_NOT_FOUND": {
"message": "Dataset Id not found, Failed to ingest",
"status": 404,
Expand Down Expand Up @@ -107,5 +112,5 @@
"status": 500,
"message": "Something went wrong while executing the query. Please try again later.",
"code": "INTERNAL_SERVER_ERROR"
}
}
}
}
24 changes: 22 additions & 2 deletions api-service/src/resources/schemas/DatasetListReq.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,28 @@
"type": "object",
"properties": {
"filters": {
"type": "object"
"type": "object",
"properties": {
"status": {
"type": "array",
"items": {
"type": "string",
"enum": [
"Live",
"Retired"
]
}
},
"dataset_id": {
"type": "string"
}
},
"required": [
"status"
]
}
},
"required": ["filters"]
"required": [
"filters"
]
}
4 changes: 4 additions & 0 deletions api-service/src/services/DatasetService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ import { DbUtil } from "../helpers/DbUtil";
import { refreshDatasetConfigs } from "../helpers/DatasetConfigs";
import { ErrorResponseHandler } from "../helpers/ErrorResponseHandler";
import { DatasetStatus, IConnector } from "../models/DatasetModels";
import { validateDatasetId } from "../helpers/prometheus/helpers";
import constants from "../resources/Constants.json";

const telemetryObject = { id: null, type: "dataset", ver: "1.0.0" };

Expand All @@ -24,6 +26,8 @@ export class DatasetService {
public save = async (req: Request, res: Response, next: NextFunction) => {
try {
const dataset = new Datasets(req.body)
const isValid = validateDatasetId(req.body?.dataset_id);
if(!isValid) throw constants.INVALID_DATASET_ID;
const payload: any = dataset.setValues()
updateTelemetryAuditEvent({ request: req, object: { ...telemetryObject, id: _.get(payload, 'dataset_id') } });
await this.dbUtil.save(req, res, next, payload)
Expand Down
1 change: 0 additions & 1 deletion api-service/src/services/ValidationService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import { IValidator, QValidator } from "../models/DatasetModels";
import { ValidationStatus } from "../models/ValidationModels";
import { QueryValidator } from "../validators/QueryValidator";
import { RequestsValidator } from "../validators/RequestsValidator";

export class ValidationService {

private request: QValidator;
Expand Down
15 changes: 15 additions & 0 deletions api-service/src/test/DatasetTestService.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,21 @@ describe("Dataset create API", () => {
done();
});
});
it("Invalid dataset_id provided", (done) => {
chai
.request(app)
.post(config.apiDatasetSaveEndPoint)
.send(TestDataset.DATASET_ID_VALIDATION_SCHEMA)
.end((err, res) => {
res.should.have.status(httpStatus.BAD_REQUEST);
res.body.should.be.a("object")
res.body.responseCode.should.be.eq(httpStatus["400_NAME"]);
res.body.should.have.property("result")
res.body.id.should.be.eq(routesConfig.config.dataset.save.api_id);
res.body.params.status.should.be.eq(constants.STATUS.FAILURE)
done();
});
});
it("should not insert record when given invalid schema", (done) => {
chai
.request(app)
Expand Down
1 change: 1 addition & 0 deletions api-service/src/test/Fixtures.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ class TestDataset {
public static VALID_SCHEMA_MASTER_DATASET = { "type": "master-dataset", "dataset_id": "3f8b2ba7-9c74-4d7f-8b38-2b0d460b999c", "name": "telemetry-raw", "data_schema": { "type": "object", "properties": { "eid": { "type": "string" }, "ver": { "type": "string" }, "syncts": { "type": "integer" }, "ets": { "type": "integer" }, "mid": { "type": "string" }, "actor": { "type": "object", "properties": { "id": { "type": "string" }, "type": { "type": "string" } }, "required": [ "id", "type" ] }, "edata": { "type": "object", "properties": { "type": { "type": "string" } }, "required": [ "type" ] }, "@timestamp": { "type": "string" }, "context": { "type": "object", "properties": { "pdata": { "type": "object", "properties": { "ver": { "type": "string" }, "id": { "type": "string" }, "pid": { "type": "string" } }, "required": [ "ver", "id", "pid" ] }, "did": { "type": "string" }, "env": { "type": "string" }, "channel": { "type": "string" } }, "required": [ "pdata", "did", "env", "channel" ] }, "@version": { "type": "string" }, "object": { "type": "object", "properties": { "id": { "type": "string" }, "type": { "type": "string" } }, "required": [ "id", "type" ] } }, "required": [ "eid", "ver", "syncts", "ets", "mid", "actor", "edata", "@timestamp", "context", "@version", "object" ] }, "tags": [], "router_config": { "topic": "router.topic" }, "status": DatasetStatus.Live, "published_date": "2023-03-14T04:46:33.459Z" }
public static VALID_UPDATE_SCHEMA = { "type": "master-dataset", "dataset_id": "observations", "id": "observations", "name": "telemetry-raw", "data_schema": { "type": "object", "properties": { "eid": { "type": "string" }, "ver": { "type": "string" }, "syncts": { "type": "integer" }, "ets": { "type": "integer" }, "mid": { "type": "string" }, "actor": { "type": "object", "properties": { "id": { "type": "string" }, "type": { "type": "string" } }, "required": [ "id", "type" ] }, "edata": { "type": "object", "properties": { "type": { "type": "string" } }, "required": [ "type" ] }, "@timestamp": { "type": "string" }, "context": { "type": "object", "properties": { "pdata": { "type": "object", "properties": { "ver": { "type": "string" }, "id": { "type": "string" }, "pid": { "type": "string" } }, "required": [ "ver", "id", "pid" ] }, "did": { "type": "string" }, "env": { "type": "string" }, "channel": { "type": "string" } }, "required": [ "pdata", "did", "env", "channel" ] }, "@version": { "type": "string" }, "object": { "type": "object", "properties": { "id": { "type": "string" }, "type": { "type": "string" } }, "required": [ "id", "type" ] } }, "required": [ "eid", "ver", "syncts", "ets", "mid", "actor", "edata", "@timestamp", "context", "@version", "object" ] }, "router_config": { "topic": "router.topic" }, "tags": [], "status": DatasetStatus.Retired, "published_date": "2023-03-14T04:46:33.459Z" }
public static INVALID_SCHEMA = { "dataset_id": "observations", "type": "dataset", "name": "observations", "router_config": { "topic": "" }, "data_schema": "string", "dataset_config": { "entry_topic": "local.ingest", "redis_db_host": "localhost", "redis_db_port": 6379 }, "status": DatasetStatus.Live, "published_date": "2023-03-24 12:19:32.091544" }
public static DATASET_ID_VALIDATION_SCHEMA = { "dataset_id": "observations*@(*", "type": "dataset", "name": "observations", "router_config": { "topic": "" }, "data_schema": "string", "dataset_config": { "entry_topic": "local.ingest", "redis_db_host": "localhost", "redis_db_port": 6379 }, "status": DatasetStatus.Live, "published_date": "2023-03-24 12:19:32.091544" }
public static MISSING_REQUIRED_FIELDS_CREATE = { "type": "dataset", "dataset_id": "observations", "name": "telemetry-raw", "data_schema": { "type": "object", "properties": { "eid": { "type": "string" }, "ver": { "type": "string" }, "syncts": { "type": "integer" }, "ets": { "type": "integer" }, "mid": { "type": "string" }, "actor": { "type": "object", "properties": { "id": { "type": "string" }, "type": { "type": "string" } }, "required": [ "id", "type" ] }, "edata": { "type": "object", "properties": { "type": { "type": "string" } }, "required": [ "type" ] }, "@timestamp": { "type": "string" }, "context": { "type": "object", "properties": { "pdata": { "type": "object", "properties": { "ver": { "type": "string" }, "id": { "type": "string" }, "pid": { "type": "string" } }, "required": [ "ver", "id", "pid" ] }, "did": { "type": "string" }, "env": { "type": "string" }, "channel": { "type": "string" } }, "required": [ "pdata", "did", "env", "channel" ] }, "@version": { "type": "string" }, "object": { "type": "object", "properties": { "id": { "type": "string" }, "type": { "type": "string" } }, "required": [ "id", "type" ] } }, "required": [ "eid", "ver", "syncts", "ets", "mid", "actor", "edata", "@timestamp", "context", "@version", "object" ] }, "status": DatasetStatus.Live, "published_date": "2023-03-14T04:46:33.459Z" };
public static SAMPLE_ID = "observations";
public static VALID_LIST_REQUEST_ACTIVE_STATUS = { "filters": { "status": [ DatasetStatus.Live ] } };
Expand Down

0 comments on commit 7bc1cb9

Please sign in to comment.