Skip to content

Commit

Permalink
YAR-14287: Convert to TypeScript (MaterializeInc#66)
Browse files Browse the repository at this point in the history
* Convert to TypeScript

* Allow integration tests on PRs

* Update avro name hook

* Fix CSR prefix bug
  • Loading branch information
bobbyiliev authored and Andre Rosa committed Feb 12, 2024
1 parent 4627aaf commit 319b7f0
Show file tree
Hide file tree
Showing 32 changed files with 3,062 additions and 1,321 deletions.
22 changes: 8 additions & 14 deletions .github/workflows/integration.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
name: Integration Tests
on:
pull_request:
paths:
- materialize/**
branches:
- main

jobs:
integration:
Expand All @@ -17,7 +17,7 @@ jobs:
run: docker exec datagen datagen --version

- name: Produce to Kafka from SQL Schema in JSON Format
run: docker exec datagen datagen -s /tests/schema.sql -f json -n 3 --record-size 100 -d
run: docker exec datagen datagen -s /tests/schema2.sql -f json -n 3 --record-size 100 -d

- name: Produce to Kafka from JSON Schema in JSON Format
run: docker exec datagen datagen -s /tests/schema.json -f json -n 3 --record-size 100 -d
Expand All @@ -26,25 +26,19 @@ jobs:
run: docker exec datagen datagen -s /tests/schema.avsc -f json -n 3 --record-size 100 -d

- name: Produce to Kafka from SQL Schema in Avro Format
run: docker exec datagen datagen -s /tests/schema.sql -f avro -n 3 --record-size 100 -d -w 100
run: docker exec datagen datagen -s /tests/schema2.sql -f avro -n 3 --record-size 100 -d -w 100

- name: Produce to Kafka from JSON Schema in Avro Format
run: docker exec datagen datagen -s /tests/schema.json -f avro -n 3 --record-size 100 -d -w 100
run: docker exec datagen datagen -s /tests/schema.json -f avro -n 3 --record-size 100 -d -w 100 -p json

- name: Produce to Kafka from Avro Schema in Avro Format
run: docker exec datagen datagen -s /tests/schema.avsc -f avro -n 3 --record-size 100 -d -w 100

- name: Topic prefix with json
run: docker exec datagen datagen -s /tests/schema.sql -f json -n 3 --record-size 100 -d --prefix test

- name: Topic prefix with avro
run: docker exec datagen datagen -s /tests/schema.sql -f avro -n 3 --record-size 100 -d --prefix test
run: docker exec datagen datagen -s /tests/schema.avsc -f avro -n 3 --record-size 100 -d -w 100 -p sql

- name: Clean Kafka topic
run: docker exec datagen datagen -s /tests/schema.sql -f json -d --clean
run: docker exec datagen datagen -s /tests/schema2.sql -f json -d --clean

- name: Clean Kafka topic with prefix
run: docker exec datagen datagen -s /tests/schema.sql -f json -d --clean --prefix test
run: docker exec datagen datagen -s /tests/schema2.sql -f json -d --clean --prefix test

- name: Clean Kafka topic and schema registry
run: docker exec datagen datagen -s /tests/schema.avsc -f avro -d --clean
Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,8 @@ jobs:
# Install dependencies
- run: npm install

# Build
- run: npm run build

# Run tests
- run: npm test
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -69,3 +69,5 @@ assets/img/.DS_Store

# VSCode related files #
# .vscode

dist/
4 changes: 4 additions & 0 deletions .npmignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
src
tsconfig.json
tslint.json
.prettierrc
17 changes: 11 additions & 6 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,21 +1,26 @@
# Use the latest LTS version of Node.js as the base image
FROM node:lts-alpine
FROM node:lts-slim as builder

# Set the working directory in the container
WORKDIR /app

# Copy the package.json and package-lock.json files to the container
COPY package*.json ./
COPY . .

# Install the application dependencies
RUN npm install
RUN npm install && npm run build

FROM node:lts-slim

# Copy the application source code to the container
COPY ./datagen.js ./
COPY ./src ./src
WORKDIR /app

COPY package*.json ./
RUN npm install --only=production
COPY --from=builder /app/dist ./dist
COPY ./tests ./tests

RUN npm link
RUN npm link --only=production

# Set the command to run the application when the container starts
ENTRYPOINT [ "datagen" ]
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ docker pull materialize/datagen
git clone https://github.com/MaterializeInc/datagen.git
cd datagen
npm install
npm run build
npm link
```

Expand Down
30 changes: 15 additions & 15 deletions datagen.js → datagen.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,16 @@
*
* @author Bobby Iliev <https://github.com/bobbyiliev>
*/
const end = require('./src/utils/end');
const alert = require('cli-alerts');
import end from './src/utils/end.js';
import alert from 'cli-alerts';

const { parseSqlSchema } = require('./src/schemas/parseSqlSchema');
const { parseAvroSchema } = require('./src/schemas/parseAvroSchema');
const { parseJsonSchema } = require('./src/schemas/parseJsonSchema');
const cleanKafka = require('./src/kafka/cleanKafka');
const dataGenerator = require('./src/dataGenerator');
const fs = require('fs');
const { program, Option } = require('commander');
import { parseSqlSchema } from './src/schemas/parseSqlSchema.js';
import { parseAvroSchema } from './src/schemas/parseAvroSchema.js';
import parseJsonSchema from './src/schemas/parseJsonSchema.js';
import cleanKafka from './src/kafka/cleanKafka.js';
import dataGenerator from './src/dataGenerator.js';
import fs from 'fs';
import { program, Option } from 'commander';

program.name('datagen').description('Fake Data Generator').version('0.1.3');

Expand Down Expand Up @@ -59,12 +59,12 @@ global.clean = options.clean;
global.dryRun = options.dryRun;
global.prefix = options.prefix;

if (debug) {
if (global.debug) {
console.log(options);
}

if (!wait) {
wait = 0;
if (!global.wait) {
global.wait = 0;
}

(async () => {
Expand Down Expand Up @@ -104,12 +104,12 @@ if (!wait) {
process.exit();
}

if (clean) {
if (global.clean) {
let topics = []
for (table of parsedSchema){
for (let table of parsedSchema) {
topics.push(table._meta.topic)
}
await cleanKafka(options.format,topics)
await cleanKafka(options.format, topics)
process.exit(0);
}

Expand Down
5 changes: 3 additions & 2 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,18 @@ services:
- 8081:8081
depends_on:
- kafka
- zookeeper
environment:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: kafka:9092
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_LISTENERS: http://schema-registry:8081,http://localhost:8081
healthcheck: {test: curl -f localhost:8081, interval: 1s, start_period: 120s}

datagen:
build: .
container_name: datagen
depends_on:
- kafka
- schema-registry
schema-registry: {condition: service_healthy}
environment:
SCHEMA_REGISTRY_URL: http://schema-registry:8081
KAFKA_BROKERS: kafka:9092
Expand Down
Loading

0 comments on commit 319b7f0

Please sign in to comment.