diff --git a/.gitignore b/.gitignore index f9fe9ae7..6731bf4f 100644 --- a/.gitignore +++ b/.gitignore @@ -12,7 +12,9 @@ logs/* db/* !db/.gitkeep csv/ +report-files/ logs package-lock.json workers/loc.api/queue/temp/*.csv +workers/loc.api/queue/temp/*.pdf workers/loc.api/queue/temp/*.zip diff --git a/config/service.report.json.example b/config/service.report.json.example index 3040c805..95f897f3 100644 --- a/config/service.report.json.example +++ b/config/service.report.json.example @@ -13,7 +13,7 @@ "signedUrlExpireTime": 28800 }, "isSpamRestrictionMode": true, - "isAddedUniqueEndingToCsvName": false, + "isAddedUniqueEndingToReportFileName": false, "isСompress": true, "isLoggerDisabled": false } diff --git a/test/2-api-filter.spec.js b/test/2-api-filter.spec.js index c49afe9b..282d938b 100644 --- a/test/2-api-filter.spec.js +++ b/test/2-api-filter.spec.js @@ -17,7 +17,7 @@ const { createMockRESTv2SrvWithDate } = require('./helpers/helpers.mock-rest-v2') const { - testMethodOfGettingCsv + testMethodOfGettingReportFile } = require('./helpers/helpers.tests') process.env.NODE_CONFIG_DIR = path.join(__dirname, 'config') @@ -578,7 +578,7 @@ describe('API filter', () => { { args: { ...baseArgs, - method: 'getLedgersCsv', + method: 'getLedgersFile', params: { ...baseParams, symbol: ['BTC'], @@ -591,7 +591,7 @@ describe('API filter', () => { { args: { ...baseArgs, - method: 'getLedgersCsv', + method: 'getLedgersFile', params: { ...baseParams, symbol: ['BTC'], @@ -604,7 +604,7 @@ describe('API filter', () => { { args: { ...baseArgs, - method: 'getLedgersCsv', + method: 'getLedgersFile', params: { ...baseParams, symbol: ['BTC'], @@ -617,7 +617,7 @@ describe('API filter', () => { { args: { ...baseArgs, - method: 'getTradesCsv', + method: 'getTradesFile', params: { ...baseParams, symbol: ['tBTCUSD', 'tETHUSD'], @@ -645,7 +645,7 @@ describe('API filter', () => { } }) - it('it should be successfully performed by the getLedgersCsv method', async function () { + it('it should be successfully performed by the getLedgersFile method', async function () { this.timeout(20000) const procPromise = queueToPromise(processorQueue) @@ -656,7 +656,7 @@ describe('API filter', () => { .type('json') .send({ auth, - method: 'getLedgersCsv', + method: 'getLedgersFile', params: { symbol: ['BTC'], end, @@ -673,10 +673,10 @@ describe('API filter', () => { .expect('Content-Type', /json/) .expect(200) - await testMethodOfGettingCsv(procPromise, aggrPromise, res) + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) }) - it('it should be successfully performed by the getMultipleCsv method', async function () { + it('it should be successfully performed by the getMultipleFile method', async function () { this.timeout(20000) const procPromise = queueToPromise(processorQueue) @@ -687,13 +687,13 @@ describe('API filter', () => { .type('json') .send({ auth, - method: 'getMultipleCsv', + method: 'getMultipleFile', params: { email, language: 'ru', multiExport: [ { - method: 'getTradesCsv', + method: 'getTradesFile', symbol: ['tBTCUSD', 'tETHUSD'], end, start, @@ -707,7 +707,7 @@ describe('API filter', () => { } }, { - method: 'getTickersHistoryCsv', + method: 'getTickersHistoryFile', symbol: 'BTC', end, start, @@ -723,6 +723,6 @@ describe('API filter', () => { .expect('Content-Type', /json/) .expect(200) - await testMethodOfGettingCsv(procPromise, aggrPromise, res) + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) }) }) diff --git a/test/3-report-signature.spec.js b/test/3-report-signature.spec.js index 048c635f..605181da 100644 --- a/test/3-report-signature.spec.js +++ b/test/3-report-signature.spec.js @@ -17,7 +17,7 @@ const { createMockRESTv2SrvWithDate } = require('./helpers/helpers.mock-rest-v2') const { - testMethodOfGettingCsv + testMethodOfGettingReportFile } = require('./helpers/helpers.tests') const signature = `-----BEGIN PGP SIGNATURE----- @@ -79,7 +79,7 @@ describe('Signature', () => { } catch (err) { } }) - it('it should be successfully performed by the getMultipleCsv method', async function () { + it('it should be successfully performed by the getMultipleFile method', async function () { this.timeout(60000) const procPromise = queueToPromise(processorQueue) @@ -90,13 +90,13 @@ describe('Signature', () => { .type('json') .send({ auth, - method: 'getMultipleCsv', + method: 'getMultipleFile', params: { email, isSignatureRequired: true, multiExport: [ { - method: 'getTradesCsv', + method: 'getTradesFile', symbol: ['tBTCUSD', 'tETHUSD'], end, start, @@ -104,7 +104,7 @@ describe('Signature', () => { timezone: 'America/Los_Angeles' }, { - method: 'getTickersHistoryCsv', + method: 'getTickersHistoryFile', symbol: 'BTC', end, start, @@ -117,10 +117,10 @@ describe('Signature', () => { .expect('Content-Type', /json/) .expect(200) - await testMethodOfGettingCsv(procPromise, aggrPromise, res) + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) }) - it('it should be successfully performed by the getLedgersCsv method', async function () { + it('it should be successfully performed by the getLedgersFile method', async function () { this.timeout(60000) const procPromise = queueToPromise(processorQueue) @@ -131,7 +131,7 @@ describe('Signature', () => { .type('json') .send({ auth, - method: 'getLedgersCsv', + method: 'getLedgersFile', params: { symbol: ['BTC'], end, @@ -146,7 +146,7 @@ describe('Signature', () => { .expect('Content-Type', /json/) .expect(200) - await testMethodOfGettingCsv(procPromise, aggrPromise, res) + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) }) it('it should be successfully performed by the verifyDigitalSignature method', async function () { diff --git a/test/4-queue-base.spec.js b/test/4-queue-base.spec.js index ce40e28f..9f832cf0 100644 --- a/test/4-queue-base.spec.js +++ b/test/4-queue-base.spec.js @@ -1,7 +1,6 @@ 'use strict' const path = require('path') -const { assert } = require('chai') const request = require('supertest') const { @@ -10,25 +9,20 @@ const { } = require('./helpers/helpers.boot') const { rmDB, - rmAllFiles, - queueToPromise, - queueToPromiseMulti + rmAllFiles } = require('./helpers/helpers.core') const { createMockRESTv2SrvWithDate } = require('./helpers/helpers.mock-rest-v2') const { - testMethodOfGettingCsv, - testProcQueue -} = require('./helpers/helpers.tests') + reportFileGenerations +} = require('./test-cases') process.env.NODE_CONFIG_DIR = path.join(__dirname, 'config') const { app } = require('bfx-report-express') const agent = request.agent(app) let wrkReportServiceApi = null -let processorQueue = null -let aggregatorQueue = null let mockRESTv2Srv = null const basePath = '/api' @@ -45,6 +39,18 @@ const auth = { const mockDataAmount = 10000 describe('Queue', () => { + const params = { + mockDataAmount, + processorQueue: null, + aggregatorQueue: null, + basePath, + auth, + email, + date, + end, + start + } + before(async function () { this.timeout(20000) @@ -55,12 +61,12 @@ describe('Queue', () => { const env = await startEnvironment() wrkReportServiceApi = env.wrksReportServiceApi[0] - processorQueue = wrkReportServiceApi.lokue_processor.q - aggregatorQueue = wrkReportServiceApi.lokue_aggregator.q + params.processorQueue = wrkReportServiceApi.lokue_processor.q + params.aggregatorQueue = wrkReportServiceApi.lokue_aggregator.q }) after(async function () { - this.timeout(5000) + this.timeout(20000) await stopEnvironment() await rmDB(dbDirPath) @@ -71,849 +77,12 @@ describe('Queue', () => { } catch (err) { } }) - it('it should be successfully performed by the getMultipleCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getMultipleCsv', - params: { - email, - language: 'ru', - multiExport: [ - { - method: 'getTradesCsv', - symbol: ['tBTCUSD', 'tETHUSD'], - end, - start, - limit: 1000, - timezone: 'America/Los_Angeles' - }, - { - method: 'getTickersHistoryCsv', - symbol: 'BTC', - end, - start, - limit: 1000 - } - ] - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should not be successfully performed by the getMultipleCsv method', async function () { - this.timeout(60000) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getMultipleCsv', - params: { - email, - multiExport: [ - { - symbol: ['tBTCUSD', 'tETHUSD'], - end, - start, - limit: 1000, - timezone: 'America/Los_Angeles' - } - ] - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(400) - - assert.isObject(res.body) - assert.isObject(res.body.error) - assert.propertyVal(res.body.error, 'code', 400) - assert.propertyVal(res.body.error, 'message', 'Args params is not valid') - assert.propertyVal(res.body, 'id', 5) - }) - - it('it should be successfully performed by the getTickersHistoryCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getTickersHistoryCsv', - params: { - symbol: 'BTC', - end, - start, - limit: 1000, - email, - language: 'zh-TW' - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getPositionsHistoryCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getPositionsHistoryCsv', - params: { - symbol: 'tBTCUSD', - end, - start, - limit: 1000, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getActivePositionsCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getActivePositionsCsv', - params: { - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getPositionsAuditCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getPositionsAuditCsv', - params: { - id: [12345], - symbol: 'tBTCUSD', - end, - start, - limit: 1000, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getWalletsCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getWalletsCsv', - params: { - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getFundingOfferHistoryCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getFundingOfferHistoryCsv', - params: { - symbol: 'fUSD', - end, - start, - limit: 1000, - email, - milliseconds: true - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getFundingLoanHistoryCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getFundingLoanHistoryCsv', - params: { - symbol: 'fUSD', - end, - start, - limit: 1000, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getFundingCreditHistoryCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getFundingCreditHistoryCsv', - params: { - symbol: 'fUSD', - end, - start, - limit: 1000, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getLedgersCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getLedgersCsv', - params: { - symbol: ['BTC'], - end, - start, - limit: 1000, - timezone: -3, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getPayInvoiceListCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getPayInvoiceListCsv', - params: { - end, - start, - limit: 100, - timezone: -3, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getTradesCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getTradesCsv', - params: { - symbol: ['tBTCUSD', 'tETHUSD'], - end, - start, - limit: 1000, - timezone: 'America/Los_Angeles', - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getFundingTradesCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getFundingTradesCsv', - params: { - symbol: ['fBTC'], - end, - start, - limit: 1000, - timezone: 'America/Los_Angeles', - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getStatusMessagesCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getStatusMessagesCsv', - params: { - symbol: ['tBTCF0:USTF0'], - timezone: 'America/Los_Angeles', - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getPublicTradesCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getPublicTradesCsv', - params: { - symbol: 'tBTCUSD', - end, - start: (new Date()).setDate(date.getDate() - 27), - limit: 1000, - timezone: 'America/Los_Angeles', - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getCandlesCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getCandlesCsv', - params: { - symbol: 'tBTCUSD', - end, - start: (new Date()).setDate(date.getDate() - 27), - limit: 1000, - timezone: 'America/Los_Angeles', - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should not be successfully performed by the getPublicTradesCsv method, time frame more then a month', async function () { - this.timeout(60000) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getPublicTradesCsv', - params: { - symbol: 'tBTCUSD', - end, - start, - limit: 1000, - timezone: 'America/Los_Angeles', - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(422) - - assert.isObject(res.body) - assert.isObject(res.body.error) - assert.propertyVal(res.body.error, 'code', 422) - assert.propertyVal(res.body.error, 'message', 'For public trades export please select a time frame smaller than a month') - assert.propertyVal(res.body, 'id', 5) - }) - - it('it should not be successfully performed by the getPublicTradesCsv method, with symbol array', async function () { - this.timeout(60000) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getPublicTradesCsv', - params: { - symbol: ['tBTCUSD', 'tETHUSD'], - end, - start, - limit: 1000, - timezone: 'America/Los_Angeles', - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(400) - - assert.isObject(res.body) - assert.isObject(res.body.error) - assert.propertyVal(res.body.error, 'code', 400) - assert.propertyVal(res.body.error, 'message', 'Args params is not valid') - assert.propertyVal(res.body, 'id', 5) + describe('CSV generation', () => { + reportFileGenerations(agent, params) }) - it('it should be successfully performed by the getOrderTradesCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getOrderTradesCsv', - params: { - id: 12345, - symbol: 'tBTCUSD', - end, - start, - limit: 1000, - timezone: 'America/Los_Angeles', - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getOrdersCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getOrdersCsv', - params: { - symbol: 'tBTCUSD', - end, - start, - limit: 1000, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getActiveOrdersCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getActiveOrdersCsv', - params: { - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getMovementsCsv method', async function () { - this.timeout(3 * 60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getMovementsCsv', - params: { - symbol: 'BTC', - end, - start, - limit: 10000, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getMovementsCsv method, where amount > 0', async function () { - this.timeout(3 * 60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getMovementsCsv', - params: { - symbol: 'BTC', - end, - start, - limit: 10000, - email, - isDeposits: true - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getMovementsCsv method, where amount < 0', async function () { - this.timeout(3 * 60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getMovementsCsv', - params: { - symbol: 'BTC', - end, - start, - limit: 10000, - email, - isWithdrawals: true - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getLoginsCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getLoginsCsv', - params: { - end, - start, - limit: 1000, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getChangeLogsCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getChangeLogsCsv', - params: { - end, - start, - limit: 1000, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should be successfully performed by the getWeightedAveragesReportCsv method', async function () { - this.timeout(60000) - - const procPromise = queueToPromise(processorQueue) - const aggrPromise = queueToPromise(aggregatorQueue) - - const _start = end - ((end - start) / (mockDataAmount / 2000)) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getWeightedAveragesReportCsv', - params: { - symbol: 'tBTCUSD', - end, - start: _start, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - await testMethodOfGettingCsv(procPromise, aggrPromise, res) - }) - - it('it should not be successfully auth by the getLedgersCsv method', async function () { - this.timeout(60000) - - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - method: 'getLedgersCsv', - params: { - symbol: 'BTC', - end, - start, - limit: 10000, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(401) - - assert.isObject(res.body) - assert.propertyVal(res.body, 'id', 5) - assert.isObject(res.body.error) - assert.containsAllKeys(res.body.error, [ - 'code', - 'message' - ]) - }) - - it('it should be successfully performed by the getLedgersCsv method, with multiple users', async function () { - this.timeout(5 * 60000) - - const count = 10 - const procPromise = queueToPromiseMulti( - processorQueue, - count, - testProcQueue - ) - const aggrPromise = queueToPromiseMulti(aggregatorQueue, count) - - for (let i = 0; i < count; i += 1) { - const res = await agent - .post(`${basePath}/json-rpc`) - .type('json') - .send({ - auth, - method: 'getLedgersCsv', - params: { - symbol: 'BTC', - end, - start, - limit: 10000, - email - }, - id: 5 - }) - .expect('Content-Type', /json/) - .expect(200) - - assert.isObject(res.body) - assert.propertyVal(res.body, 'id', 5) - assert.isObject(res.body.result) - assert.isOk(res.body.result.isSendEmail || res.body.result.isSaveLocaly) - } - - await procPromise - await aggrPromise + describe('PDF generation', () => { + params.isPDFRequired = true + reportFileGenerations(agent, params) }) }) diff --git a/test/5-queue-load.spec.js b/test/5-queue-load.spec.js index d8e8edf5..f4f82d3d 100644 --- a/test/5-queue-load.spec.js +++ b/test/5-queue-load.spec.js @@ -65,7 +65,7 @@ describe('Queue load', () => { }) after(async function () { - this.timeout(10000) + this.timeout(20000) await stopEnvironment() await rmDB(dbDirPath) @@ -76,7 +76,7 @@ describe('Queue load', () => { } catch (err) { } }) - it('it should be successfully performed by the getLedgersCsv method, with 100 users', async function () { + it('it should be successfully performed by the getLedgersFile method, with 100 users', async function () { this.timeout(10 * 60000) const count = 100 @@ -98,7 +98,7 @@ describe('Queue load', () => { .type('json') .send({ auth, - method: 'getLedgersCsv', + method: 'getLedgersFile', params: { symbol: 'BTC', end, diff --git a/test/helpers/helpers.tests.js b/test/helpers/helpers.tests.js index 964b89bf..a9baab05 100644 --- a/test/helpers/helpers.tests.js +++ b/test/helpers/helpers.tests.js @@ -41,7 +41,7 @@ const testProcQueue = (procRes, opts) => { assert.isBoolean(procRes.isUnauth) } -const testMethodOfGettingCsv = async ( +const testMethodOfGettingReportFile = async ( procPromise, aggrPromise, res, @@ -83,6 +83,6 @@ const testMethodOfGettingCsv = async ( } module.exports = { - testMethodOfGettingCsv, + testMethodOfGettingReportFile, testProcQueue } diff --git a/test/helpers/helpers.worker.js b/test/helpers/helpers.worker.js index 132c7074..398d853a 100644 --- a/test/helpers/helpers.worker.js +++ b/test/helpers/helpers.worker.js @@ -22,6 +22,7 @@ const startHelpers = ( { name: 's3', port: 13371 }, { name: 'sendgrid', port: 1310 }, { name: 'gpg', port: 1320 }, + { name: 'pdf', port: 1330 }, { name: 'testcalls', port: 1300 } ] ) => { diff --git a/test/simulate/mocks-spies/pdf.js b/test/simulate/mocks-spies/pdf.js new file mode 100644 index 00000000..2999ce1e --- /dev/null +++ b/test/simulate/mocks-spies/pdf.js @@ -0,0 +1,46 @@ +'use strict' + +const workerArgs = ['rest:ext:pdf'] + +const getMockBuffer = (template) => Buffer.from(template, 'utf8') + +function addFunctions (ExtApi) { + ExtApi.prototype.createPDFBuffer = async function (space, args, cb) { + const { + template, + format, + orientation + } = args + + if (!template) return cb(new Error('ERR_API_NO_TEMPLATE')) + if (!format) return cb(new Error('ERR_API_NO_FORMAT')) + if (!orientation) return cb(new Error('ERR_API_NO_ORIENTATION')) + + const pdfBuffer = getMockBuffer(template) + + const grcBfx = this.ctx.grc_bfx + const call = { + worker: 'ext.pdf', + on: 'createPDFBuffer', + params: { args }, + res: { pdfBuffer }, + timestamp: Date.now() + } + + grcBfx.req( + 'rest:ext:testcalls', + 'addCall', + [call], + { timeout: 10000 }, + (err, data) => { + if (err) cb(new Error('ext.pdf:createPDFBuffer:testcalls')) + else return cb(null, pdfBuffer) + } + ) + } +} + +module.exports = { + addFunctions, + workerArgs +} diff --git a/test/test-cases/index.js b/test/test-cases/index.js new file mode 100644 index 00000000..21eb07c8 --- /dev/null +++ b/test/test-cases/index.js @@ -0,0 +1,9 @@ +'use strict' + +const reportFileGenerations = require( + './report-file-generations' +) + +module.exports = { + reportFileGenerations +} diff --git a/test/test-cases/report-file-generations.js b/test/test-cases/report-file-generations.js new file mode 100644 index 00000000..81b7b467 --- /dev/null +++ b/test/test-cases/report-file-generations.js @@ -0,0 +1,909 @@ +'use strict' + +const { assert } = require('chai') + +const { + queueToPromise, + queueToPromiseMulti +} = require('../helpers/helpers.core') +const { + testMethodOfGettingReportFile, + testProcQueue +} = require('../helpers/helpers.tests') + +module.exports = ( + agent, + params = {} +) => { + const { + mockDataAmount, + basePath, + auth, + email, + date, + end, + start, + isPDFRequired + } = params + + it('it should be successfully performed by the getMultipleFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getMultipleFile', + params: { + email, + language: 'ru', + isPDFRequired, + multiExport: [ + { + method: 'getTradesFile', + symbol: ['tBTCUSD', 'tETHUSD'], + end, + start, + limit: 1000, + timezone: 'America/Los_Angeles' + }, + { + method: 'getTickersHistoryFile', + symbol: 'BTC', + end, + start, + limit: 1000 + } + ] + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should not be successfully performed by the getMultipleFile method', async function () { + this.timeout(60000) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getMultipleFile', + params: { + email, + isPDFRequired, + multiExport: [ + { + symbol: ['tBTCUSD', 'tETHUSD'], + end, + start, + limit: 1000, + timezone: 'America/Los_Angeles' + } + ] + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(400) + + assert.isObject(res.body) + assert.isObject(res.body.error) + assert.propertyVal(res.body.error, 'code', 400) + assert.propertyVal(res.body.error, 'message', 'Args params is not valid') + assert.propertyVal(res.body, 'id', 5) + }) + + it('it should be successfully performed by the getTickersHistoryFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getTickersHistoryFile', + params: { + isPDFRequired, + symbol: 'BTC', + end, + start, + limit: 1000, + email, + language: 'zh-TW' + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getPositionsHistoryFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getPositionsHistoryFile', + params: { + isPDFRequired, + symbol: 'tBTCUSD', + end, + start, + limit: 1000, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getActivePositionsFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getActivePositionsFile', + params: { + isPDFRequired, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getPositionsAuditFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getPositionsAuditFile', + params: { + isPDFRequired, + id: [12345], + symbol: 'tBTCUSD', + end, + start, + limit: 1000, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getWalletsFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getWalletsFile', + params: { + isPDFRequired, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getFundingOfferHistoryFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getFundingOfferHistoryFile', + params: { + isPDFRequired, + symbol: 'fUSD', + end, + start, + limit: 1000, + email, + milliseconds: true + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getFundingLoanHistoryFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getFundingLoanHistoryFile', + params: { + isPDFRequired, + symbol: 'fUSD', + end, + start, + limit: 1000, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getFundingCreditHistoryFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getFundingCreditHistoryFile', + params: { + isPDFRequired, + symbol: 'fUSD', + end, + start, + limit: 1000, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getLedgersFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getLedgersFile', + params: { + isPDFRequired, + symbol: ['BTC'], + end, + start, + limit: 1000, + timezone: -3, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getPayInvoiceListFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getPayInvoiceListFile', + params: { + isPDFRequired, + end, + start, + limit: 100, + timezone: -3, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getTradesFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getTradesFile', + params: { + isPDFRequired, + symbol: ['tBTCUSD', 'tETHUSD'], + end, + start, + limit: 1000, + timezone: 'America/Los_Angeles', + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getFundingTradesFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getFundingTradesFile', + params: { + isPDFRequired, + symbol: ['fBTC'], + end, + start, + limit: 1000, + timezone: 'America/Los_Angeles', + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getStatusMessagesFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getStatusMessagesFile', + params: { + isPDFRequired, + symbol: ['tBTCF0:USTF0'], + timezone: 'America/Los_Angeles', + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getPublicTradesFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getPublicTradesFile', + params: { + isPDFRequired, + symbol: 'tBTCUSD', + end, + start: (new Date()).setDate(date.getDate() - 27), + limit: 1000, + timezone: 'America/Los_Angeles', + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getCandlesFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getCandlesFile', + params: { + isPDFRequired, + symbol: 'tBTCUSD', + end, + start: (new Date()).setDate(date.getDate() - 27), + limit: 1000, + timezone: 'America/Los_Angeles', + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should not be successfully performed by the getPublicTradesFile method, time frame more then a month', async function () { + this.timeout(60000) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getPublicTradesFile', + params: { + isPDFRequired, + symbol: 'tBTCUSD', + end, + start, + limit: 1000, + timezone: 'America/Los_Angeles', + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(422) + + assert.isObject(res.body) + assert.isObject(res.body.error) + assert.propertyVal(res.body.error, 'code', 422) + assert.propertyVal(res.body.error, 'message', 'For public trades export please select a time frame smaller than a month') + assert.propertyVal(res.body, 'id', 5) + }) + + it('it should not be successfully performed by the getPublicTradesFile method, with symbol array', async function () { + this.timeout(60000) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getPublicTradesFile', + params: { + isPDFRequired, + symbol: ['tBTCUSD', 'tETHUSD'], + end, + start, + limit: 1000, + timezone: 'America/Los_Angeles', + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(400) + + assert.isObject(res.body) + assert.isObject(res.body.error) + assert.propertyVal(res.body.error, 'code', 400) + assert.propertyVal(res.body.error, 'message', 'Args params is not valid') + assert.propertyVal(res.body, 'id', 5) + }) + + it('it should be successfully performed by the getOrderTradesFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getOrderTradesFile', + params: { + isPDFRequired, + id: 12345, + symbol: 'tBTCUSD', + end, + start, + limit: 1000, + timezone: 'America/Los_Angeles', + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getOrdersFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getOrdersFile', + params: { + isPDFRequired, + symbol: 'tBTCUSD', + end, + start, + limit: 1000, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getActiveOrdersFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getActiveOrdersFile', + params: { + isPDFRequired, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getMovementsFile method', async function () { + this.timeout(3 * 60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getMovementsFile', + params: { + isPDFRequired, + symbol: 'BTC', + end, + start, + limit: 10000, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getMovementsFile method, where amount > 0', async function () { + this.timeout(3 * 60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getMovementsFile', + params: { + isPDFRequired, + symbol: 'BTC', + end, + start, + limit: 10000, + email, + isDeposits: true + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getMovementsFile method, where amount < 0', async function () { + this.timeout(3 * 60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getMovementsFile', + params: { + isPDFRequired, + symbol: 'BTC', + end, + start, + limit: 10000, + email, + isWithdrawals: true + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getLoginsFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getLoginsFile', + params: { + isPDFRequired, + end, + start, + limit: 1000, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getChangeLogsFile method', async function () { + this.timeout(60000) + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getChangeLogsFile', + params: { + isPDFRequired, + end, + start, + limit: 1000, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should be successfully performed by the getWeightedAveragesReportFile method', async function () { + this.timeout(60000) + + // TODO: Wait for implementation of pdf template in the next release + if (isPDFRequired) { + this.skip() + } + + const procPromise = queueToPromise(params.processorQueue) + const aggrPromise = queueToPromise(params.aggregatorQueue) + + const _start = end - ((end - start) / (mockDataAmount / 2000)) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getWeightedAveragesReportFile', + params: { + isPDFRequired, + symbol: 'tBTCUSD', + end, + start: _start, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + await testMethodOfGettingReportFile(procPromise, aggrPromise, res) + }) + + it('it should not be successfully auth by the getLedgersFile method', async function () { + this.timeout(60000) + + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + method: 'getLedgersFile', + params: { + isPDFRequired, + symbol: 'BTC', + end, + start, + limit: 10000, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(401) + + assert.isObject(res.body) + assert.propertyVal(res.body, 'id', 5) + assert.isObject(res.body.error) + assert.containsAllKeys(res.body.error, [ + 'code', + 'message' + ]) + }) + + it('it should be successfully performed by the getLedgersFile method, with multiple users', async function () { + this.timeout(5 * 60000) + + const count = 10 + const procPromise = queueToPromiseMulti( + params.processorQueue, + count, + testProcQueue + ) + const aggrPromise = queueToPromiseMulti(params.aggregatorQueue, count) + + for (let i = 0; i < count; i += 1) { + const res = await agent + .post(`${basePath}/json-rpc`) + .type('json') + .send({ + auth, + method: 'getLedgersFile', + params: { + isPDFRequired, + symbol: 'BTC', + end, + start, + limit: 10000, + email + }, + id: 5 + }) + .expect('Content-Type', /json/) + .expect(200) + + assert.isObject(res.body) + assert.propertyVal(res.body, 'id', 5) + assert.isObject(res.body.result) + assert.isOk(res.body.result.isSendEmail || res.body.result.isSaveLocaly) + } + + await procPromise + await aggrPromise + }) +} diff --git a/workers/api.service.report.wrk.js b/workers/api.service.report.wrk.js index b133e6ad..1003859d 100644 --- a/workers/api.service.report.wrk.js +++ b/workers/api.service.report.wrk.js @@ -8,9 +8,9 @@ const argv = require('yargs') type: 'number', default: 1 }) - .option('csvFolder', { + .option('reportFolder', { type: 'string', - default: 'csv' + default: 'report-files' }) .option('tempFolder', { type: 'string', @@ -31,7 +31,7 @@ const argv = require('yargs') type: 'boolean', default: false }) - .option('remoteCsvUrn', { + .option('remoteReportUrn', { type: 'string', default: '' }) @@ -70,9 +70,18 @@ class WrkReportServiceApi extends WrkApi { loadDIConfig (cont = container) { const conf = this.conf[this.group] + /** + * @deprecated isAddedUniqueEndingToCsvName + * Keep for back compatibility + */ + conf.isAddedUniqueEndingToReportFileName = ( + conf.isAddedUniqueEndingToReportFileName ?? + conf.isAddedUniqueEndingToCsvName + ) + this.container = cont - diConfig(conf) + diConfig(conf, this.ctx.root) } loadCoreDeps (...args) { @@ -101,7 +110,7 @@ class WrkReportServiceApi extends WrkApi { names = [ 'isSpamRestrictionMode', 'isLoggerDisabled', - 'remoteCsvUrn' + 'remoteReportUrn' ] ) { const conf = this.conf[this.group] diff --git a/workers/loc.api/di/app.deps.js b/workers/loc.api/di/app.deps.js index 00255759..c66db1d3 100644 --- a/workers/loc.api/di/app.deps.js +++ b/workers/loc.api/di/app.deps.js @@ -23,13 +23,14 @@ const aggregator = require('../queue/aggregator') const writeDataToStream = require('../queue/write-data-to-stream') const uploadToS3 = require('../queue/upload-to-s3') const sendMail = require('../queue/send-mail') -const generateCsv = require('../generate-csv') -const CsvJobData = require('../generate-csv/csv.job.data') +const generateReportFile = require('../generate-report-file') +const PdfWriter = require('../generate-report-file/pdf-writer') +const ReportFileJobData = require('../generate-report-file/report.file.job.data') const Interrupter = require('../interrupter') const AbstractWSEventEmitter = require('../abstract.ws.event.emitter') const { weightedAveragesReportCsvWriter -} = require('../generate-csv/csv-writer') +} = require('../generate-report-file/csv-writer') const WeightedAveragesReport = require('../weighted.averages.report') const BfxApiRouter = require('../bfx.api.router') @@ -49,7 +50,7 @@ module.exports = ({ ['_getREST', TYPES.GetREST], ['_grcBfxReq', TYPES.GrcBfxReq], ['_prepareApiResponse', TYPES.PrepareApiResponse], - ['_generateCsv', TYPES.GenerateCsv], + ['_generateReportFile', TYPES.GenerateReportFile], ['_hasGrcService', TYPES.HasGrcService], ['_weightedAveragesReport', TYPES.WeightedAveragesReport] ]) @@ -121,16 +122,19 @@ module.exports = ({ bind(TYPES.GrcSlackFac).toConstantValue( grcSlackFac ) - bind(TYPES.CsvJobData) - .to(CsvJobData) + bind(TYPES.PdfWriter) + .to(PdfWriter) .inSingletonScope() - bind(TYPES.GenerateCsv) + bind(TYPES.ReportFileJobData) + .to(ReportFileJobData) + .inSingletonScope() + bind(TYPES.GenerateReportFile) .toDynamicValue(() => bindDepsToFn( - generateCsv, + generateReportFile, [ TYPES.ProcessorQueue, TYPES.HasGrcService, - TYPES.CsvJobData, + TYPES.ReportFileJobData, TYPES.RService, TYPES.RootPath, TYPES.CONF @@ -163,18 +167,18 @@ module.exports = ({ [TYPES.GrcBfxReq] ) ) - bind(TYPES.Processor).toConstantValue( - bindDepsToFn( + bind(TYPES.Processor) + .toDynamicValue(() => bindDepsToFn( processor, [ TYPES.CONF, TYPES.RootPath, TYPES.ProcessorQueue, TYPES.AggregatorQueue, - TYPES.WriteDataToStream + TYPES.WriteDataToStream, + TYPES.PdfWriter ] - ) - ) + )) bind(TYPES.Aggregator).toConstantValue( bindDepsToFn( aggregator, diff --git a/workers/loc.api/di/di.config.js b/workers/loc.api/di/di.config.js index e588e236..dfeb06da 100644 --- a/workers/loc.api/di/di.config.js +++ b/workers/loc.api/di/di.config.js @@ -3,7 +3,8 @@ const TYPES = require('./types') const container = require('./index') -module.exports = (conf) => { +module.exports = (conf, rootFolderPath) => { container.bind(TYPES.CONF).toConstantValue(conf) + container.bind(TYPES.ROOT_FOLDER_PATH).toConstantValue(rootFolderPath) container.bind(TYPES.Container).toConstantValue(container) } diff --git a/workers/loc.api/di/types.js b/workers/loc.api/di/types.js index cf3c2575..693bb2f6 100644 --- a/workers/loc.api/di/types.js +++ b/workers/loc.api/di/types.js @@ -2,6 +2,7 @@ module.exports = { CONF: Symbol.for('CONF'), + ROOT_FOLDER_PATH: Symbol.for('ROOT_FOLDER_PATH'), FOREX_SYMBS: Symbol.for('FOREX_SYMBS'), RootPath: Symbol.for('RootPath'), Container: Symbol.for('Container'), @@ -22,17 +23,18 @@ module.exports = { HasGrcService: Symbol.for('HasGrcService'), ProcessorQueue: Symbol.for('ProcessorQueue'), AggregatorQueue: Symbol.for('AggregatorQueue'), - GenerateCsv: Symbol.for('GenerateCsv'), + GenerateReportFile: Symbol.for('GenerateReportFile'), Processor: Symbol.for('Processor'), Aggregator: Symbol.for('Aggregator'), WriteDataToStream: Symbol.for('WriteDataToStream'), UploadToS3: Symbol.for('UploadToS3'), SendMail: Symbol.for('SendMail'), - CsvJobData: Symbol.for('CsvJobData'), + ReportFileJobData: Symbol.for('ReportFileJobData'), Interrupter: Symbol.for('Interrupter'), AbstractWSEventEmitter: Symbol.for('AbstractWSEventEmitter'), GetDataFromApi: Symbol.for('GetDataFromApi'), WeightedAveragesReport: Symbol.for('WeightedAveragesReport'), WeightedAveragesReportCsvWriter: Symbol.for('WeightedAveragesReportCsvWriter'), - BfxApiRouter: Symbol.for('BfxApiRouter') + BfxApiRouter: Symbol.for('BfxApiRouter'), + PdfWriter: Symbol.for('PdfWriter') } diff --git a/workers/loc.api/errors/index.js b/workers/loc.api/errors/index.js index 05e42753..d12fed4e 100644 --- a/workers/loc.api/errors/index.js +++ b/workers/loc.api/errors/index.js @@ -68,11 +68,11 @@ class FindMethodError extends BaseError { } } -class FindMethodToGetCsvFileError extends BadRequestError { - constructor (message = 'ERR_METHOD_TO_GET_CSV_FILE_NOT_FOUND') { +class FindMethodToGetReportFileError extends BadRequestError { + constructor (message = 'ERR_METHOD_TO_GET_REPORT_FILE_NOT_FOUND') { super(message) - this.statusMessage = 'Method to get csv file not found' + this.statusMessage = 'Method to get report file not found' } } @@ -164,6 +164,12 @@ class GrcSlackAvailabilityError extends BaseError { } } +class GrcPDFAvailabilityError extends BaseError { + constructor (message = 'ERR_GRC_PDF_IS_NOT_AVAILABLE') { + super(message) + } +} + class WeightedAveragesTimeframeError extends UnprocessableEntityError { constructor (message = 'ERR_TIME_FRAME_MORE_THAN_TWO_YEARS') { super(message) @@ -180,7 +186,7 @@ module.exports = { UnprocessableEntityError, FindMethodError, - FindMethodToGetCsvFileError, + FindMethodToGetReportFileError, ArgsParamsError, GrenacheServiceConfigArgsError, EmailSendingError, @@ -193,6 +199,7 @@ module.exports = { ArgsParamsFilterError, LedgerPaymentFilteringParamsError, GrcSlackAvailabilityError, + GrcPDFAvailabilityError, ImplementationError, WeightedAveragesTimeframeError } diff --git a/workers/loc.api/generate-csv/csv-writer/helpers/index.js b/workers/loc.api/generate-report-file/csv-writer/helpers/index.js similarity index 100% rename from workers/loc.api/generate-csv/csv-writer/helpers/index.js rename to workers/loc.api/generate-report-file/csv-writer/helpers/index.js diff --git a/workers/loc.api/generate-csv/csv-writer/index.js b/workers/loc.api/generate-report-file/csv-writer/index.js similarity index 100% rename from workers/loc.api/generate-csv/csv-writer/index.js rename to workers/loc.api/generate-report-file/csv-writer/index.js diff --git a/workers/loc.api/generate-csv/csv-writer/weighted-averages-report-csv-writer.js b/workers/loc.api/generate-report-file/csv-writer/weighted-averages-report-csv-writer.js similarity index 100% rename from workers/loc.api/generate-csv/csv-writer/weighted-averages-report-csv-writer.js rename to workers/loc.api/generate-report-file/csv-writer/weighted-averages-report-csv-writer.js diff --git a/workers/loc.api/generate-csv/index.js b/workers/loc.api/generate-report-file/index.js similarity index 65% rename from workers/loc.api/generate-csv/index.js rename to workers/loc.api/generate-report-file/index.js index 492536f8..b64e9d01 100644 --- a/workers/loc.api/generate-csv/index.js +++ b/workers/loc.api/generate-report-file/index.js @@ -6,13 +6,14 @@ const { normalizeFilterParams } = require('../helpers') const { - EmailSendingError + EmailSendingError, + GrcPDFAvailabilityError } = require('../errors') -const getLocalCsvFolderPaths = require( - '../queue/helpers/get-local-csv-folder-paths' +const getLocalReportFolderPaths = require( + '../queue/helpers/get-local-report-folder-paths' ) -const _getCsvStoreStatus = async ({ +const _getReportFileStoreStatus = async ({ hasGrcService, args, rootPath, @@ -26,27 +27,39 @@ const _getCsvStoreStatus = async ({ typeof email !== 'string' ) { const { - localCsvFolderPath - } = getLocalCsvFolderPaths(rootPath) - const remoteCsvUrn = ( + localReportFolderPath + } = getLocalReportFolderPaths(rootPath) + const remoteReportUrn = ( token && typeof token === 'string' && - conf?.remoteCsvUrn && - typeof conf?.remoteCsvUrn === 'string' + conf?.remoteReportUrn && + typeof conf?.remoteReportUrn === 'string' ) - ? `${conf?.remoteCsvUrn}?token=${token}` + ? `${conf?.remoteReportUrn}?token=${token}` : null return { isSaveLocaly: true, - localCsvFolderPath, - remoteCsvUrn + localReportFolderPath, + remoteReportUrn, + + /** + * @deprecated fields + */ + localCsvFolderPath: localReportFolderPath, + remoteCsvUrn: remoteReportUrn } } if (!await hasGrcService.hasS3AndSendgrid()) { throw new EmailSendingError() } + if ( + args?.params?.isPDFRequired && + !await hasGrcService.hasPDFService() + ) { + throw new GrcPDFAvailabilityError() + } return { isSendEmail: true } } @@ -54,21 +67,21 @@ const _getCsvStoreStatus = async ({ const _filterModelNameMap = Object.values(FILTER_MODELS_NAMES) .reduce((map, name) => { const baseName = `${name[0].toUpperCase()}${name.slice(1)}` - const key = `get${baseName}CsvJobData` + const key = `get${baseName}FileJobData` map.set(key, name) return map }, new Map()) -const _truncateCsvNameEnding = (name) => { +const _truncateFileNameEnding = (name) => { if (!name) { return name } const cleanedName = name .replace(/^get/i, '') - .replace(/csv$/i, '') + .replace(/(file)|(csv)$/i, '') return `${cleanedName[0].toLowerCase()}${cleanedName.slice(1)}` } @@ -77,9 +90,9 @@ const _getFilterModelNamesAndArgs = ( name, reqArgs ) => { - if (name !== 'getMultipleCsvJobData') { + if (name !== 'getMultipleFileJobData') { const filterModelName = _filterModelNameMap.get(name) - const truncatedName = _truncateCsvNameEnding(name) + const truncatedName = _truncateFileNameEnding(name) const args = normalizeFilterParams(truncatedName, reqArgs) return [{ @@ -97,7 +110,7 @@ const _getFilterModelNamesAndArgs = ( return _multiExport.map((params) => { const { method } = { ...params } const name = `${method}JobData` - const truncatedName = _truncateCsvNameEnding(method) + const truncatedName = _truncateFileNameEnding(method) const args = normalizeFilterParams(truncatedName, { params }) const filterModelName = _filterModelNameMap.get(name) @@ -111,7 +124,7 @@ const _getFilterModelNamesAndArgs = ( module.exports = ( processorQueue, hasGrcService, - csvJobData, + reportFileJobData, rService, rootPath, conf @@ -121,7 +134,7 @@ module.exports = ( ) => { const user = await rService.verifyUser(null, args) - const status = await _getCsvStoreStatus({ + const status = await _getReportFileStoreStatus({ hasGrcService, args, rootPath, @@ -136,7 +149,7 @@ module.exports = ( checkFilterParams(filterModelName, args) } - const getter = csvJobData[name].bind(csvJobData) + const getter = reportFileJobData[name].bind(reportFileJobData) const jobData = await getter(args, null, user) processorQueue.addJob(jobData) diff --git a/workers/loc.api/generate-report-file/pdf-writer/index.js b/workers/loc.api/generate-report-file/pdf-writer/index.js new file mode 100644 index 00000000..072a697f --- /dev/null +++ b/workers/loc.api/generate-report-file/pdf-writer/index.js @@ -0,0 +1,322 @@ +'use strict' + +const { Transform } = require('stream') +const path = require('path') +const fs = require('fs') +const pug = require('pug') +const yaml = require('js-yaml') +const { merge } = require('lib-js-util-base') + +const getCompleteFileName = require('../../queue/helpers/get-complete-file-name') +const getTranslator = require('../../helpers/get-translator') +const { + getDateNotLessMinStart +} = require('../../helpers/date-param.helpers') +const { + GrcPDFAvailabilityError +} = require('../../errors') +const TEMPLATE_FILE_NAMES = require('./template-file-names') + +const placeholderPattern = /\$\{[a-zA-Z0-9]+\}/g +const pathToFonts = path.join(__dirname, 'templates/fonts') +const fontsTemplate = fs + .readFileSync(path.join(pathToFonts, 'fonts.css'), 'utf8') +const base64Fonts = { + interRegular: fs + .readFileSync(path.join(pathToFonts, 'Inter-Regular.ttf'), 'base64') +} + +const { decorateInjectable } = require('../../di/utils') + +const depsTypes = (TYPES) => [ + TYPES.ROOT_FOLDER_PATH, + TYPES.HasGrcService, + TYPES.GrcBfxReq +] +class PdfWriter { + #fonts = this.#renderFontsTemplate(fontsTemplate, base64Fonts) + #translationsArr = [] + #translations = {} + #templatePaths = new Map() + #templates = new Map() + + constructor ( + rootFolderPath, + hasGrcService, + grcBfxReq + ) { + this.rootFolderPath = rootFolderPath + this.hasGrcService = hasGrcService + this.grcBfxReq = grcBfxReq + + this.isElectronjsEnv = false + + this.addTranslations() + this.addTemplates() + this.compileTemplate() + } + + async createPDFStream (opts) { + const pdfWriter = this + + return new Transform({ + writableObjectMode: true, + + construct (cb) { + this.data = [] + cb() + }, + transform (chunk, encoding, cb) { + if (Array.isArray(chunk)) { + this.data.push(...chunk) + cb() + + return + } + + this.data.push(chunk) + cb() + }, + flush (cb) { + pdfWriter.#processPdf( + this.data, + opts + ).then((buffer) => { + this.push(buffer) + this.push(null) + cb() + }).catch((err) => { + cb(err) + }) + } + }) + } + + async #processPdf ( + apiData, + opts + ) { + const template = await this.#renderTemplate( + apiData, + opts + ) + const buffer = await this.createPDFBuffer({ template }) + + return buffer + } + + async createPDFBuffer (args) { + const _args = { + template: 'No data', + format: 'portrait', + orientation: 'Letter', + ...args + } + + if (!await this.hasGrcService.hasPDFService()) { + throw new GrcPDFAvailabilityError() + } + + const bufferData = await this.grcBfxReq({ + service: 'rest:ext:pdf', + action: 'createPDFBuffer', + args: [_args] + }) + const res = Buffer.from(bufferData) + + return res + } + + #getTemplate (pdfCustomTemplateName, language) { + const templateKey = this.#getTemplateKey( + pdfCustomTemplateName ?? TEMPLATE_FILE_NAMES.MAIN, + language + ) + + if (this.#templates.has(templateKey)) { + return this.#templates.get(templateKey) + } + + return this.#templates.get(this.#getTemplateKey( + TEMPLATE_FILE_NAMES.MAIN, + 'en' + )) + } + + async #renderTemplate ( + apiData, + opts + ) { + const { + jobData, + pdfCustomTemplateName, + language = 'en', + isError + } = opts ?? {} + + const template = this.#getTemplate( + pdfCustomTemplateName, + language + ) + const { + date, + readableBaseName + } = getCompleteFileName( + jobData.name, + jobData.args.params + ) + const reportColumns = jobData?.columnsPdf ?? jobData?.columnsCsv + + const html = template({ + isElectronjsEnv: this.isElectronjsEnv, + apiData, + jobData, + reportColumns, + language, + isError, + reportName: readableBaseName ?? 'Report table', + start: new Date( + getDateNotLessMinStart(jobData?.args?.params?.start) + ), + end: Number.isFinite(jobData?.args?.params?.end) + ? new Date(jobData.args.params.end) + : new Date(), + date: date instanceof Date + ? date + : new Date() + }) + + return html + } + + #getTranslator (language) { + return getTranslator({ + language, + translations: this.#translations + }) + } + + addTranslations (trans = this.loadTranslations()) { + const translationsArr = Array.isArray(trans) + ? trans + : [trans] + + this.#translationsArr.push(...translationsArr) + this.#translations = merge({}, ...this.#translationsArr) + } + + loadTranslations ( + pathToTrans = path.join(__dirname, 'translations.yml') + ) { + const translations = yaml.load( + fs.readFileSync(pathToTrans, 'utf8') + ) + + return translations + } + + addTemplates (params) { + const { + fileNames = TEMPLATE_FILE_NAMES, + templateFolderPath = path.join(__dirname, 'templates') + } = params ?? {} + + const _fileNames = this.#getFileNameArray(fileNames) + + for (const fileName of _fileNames) { + if ( + !fileName || + typeof fileName !== 'string' + ) { + continue + } + + this.#templatePaths.set( + fileName, + path.join(templateFolderPath, fileName) + ) + } + } + + #getFileNameArray (fileNames) { + if (Array.isArray(fileNames)) { + return fileNames + } + if ( + fileNames && + typeof fileNames === 'object' + ) { + return Object.values(fileNames) + } + + return [fileNames] + } + + compileTemplate (opts) { + const _opts = { + basedir: this.rootFolderPath, + pretty: true, + ...opts + } + const languages = this.#getAvailableLanguages() + + for (const language of languages) { + const translate = this.#getTranslator(language) + + for (const [templateFileName, templatePath] of this.#templatePaths) { + const fn = pug.compileFile(templatePath, { + ..._opts, + language, + filters: { + translate, + fonts: () => this.#fonts + } + }) + const templateKey = this.#getTemplateKey( + templateFileName, + language + ) + + this.#templates.set(templateKey, fn) + } + } + } + + #getAvailableLanguages () { + const languages = Object.keys(this.#translations) + + if (languages.every((lang) => lang !== 'en')) { + languages.push('en') + } + + return languages + } + + #getTemplateKey (templateFileName, language) { + return `${templateFileName}:${language}` + } + + #renderFontsTemplate ( + template, + params = {} + ) { + const str = template.replace(placeholderPattern, (match) => { + const propName = match.replace('${', '').replace('}', '') + + if ( + !Number.isFinite(params?.[propName]) && + typeof params?.[propName] !== 'string' + ) { + return '' + } + + return params[propName] + }) + + return str + } +} + +decorateInjectable(PdfWriter, depsTypes) + +module.exports = PdfWriter diff --git a/workers/loc.api/generate-report-file/pdf-writer/template-file-names.js b/workers/loc.api/generate-report-file/pdf-writer/template-file-names.js new file mode 100644 index 00000000..df828418 --- /dev/null +++ b/workers/loc.api/generate-report-file/pdf-writer/template-file-names.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports = { + MAIN: 'main.pug' +} diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/base.pug b/workers/loc.api/generate-report-file/pdf-writer/templates/base.pug new file mode 100644 index 00000000..5c190652 --- /dev/null +++ b/workers/loc.api/generate-report-file/pdf-writer/templates/base.pug @@ -0,0 +1,155 @@ +doctype html +html(lang=language) + head + meta(charset='utf-8') + title + :translate(prop='template.title') + Report + style + :fonts + include style.css + + //- Workaround for font size + //- https://github.com/marcbachmann/node-html-pdf/issues/619 + //- https://github.com/marcbachmann/node-html-pdf/issues/525 + //- ratio for rendering: 72dpi (pdf) / 96dpi (browser), 72/96 = 0.75 + if process.platform !== 'win32' && !isElectronjsEnv + style @media print { html { zoom: 0.75; } } + style @media print { .header, .footer { zoom: 1; } } + else + style @media print { html { zoom: 1; } } + + body.body + mixin logo + div(class='logo') + svg(class='logo-svg' viewbox='0 0 165 20' fill='none' xmlns='http://www.w3.org/2000/svg') + path(fill='#1b262d' d='M141.437 19.662a.2.2 0 01-.144.338h-4.386a.198.198 0 01-.145-.063l-5.83-6.166-5.894 6.167a.198.198 0 01-.144.062h-4.387a.2.2 0 01-.144-.338l8.036-8.421-7.764-8.183a.2.2 0 01.145-.338h4.387c.055 0 .108.023.146.063l5.576 5.928 5.598-5.928a.202.202 0 01.146-.063h4.387a.2.2 0 01.145.338l-7.764 8.183 8.036 8.42zM99.856 20a.278.278 0 01-.278-.278V2.998c0-.153.125-.277.278-.277h16.571c.153 0 .277.124.277.277V5.39a.277.277 0 01-.277.277h-12.83v3.851h7.692c.153 0 .278.125.278.278v2.392a.278.278 0 01-.278.277h-7.692v4.287h12.997c.153 0 .278.124.278.277v2.693a.278.278 0 01-.278.278H99.856zM70.514 20a.278.278 0 01-.277-.278V2.998c0-.153.124-.277.277-.277h3.463c.154 0 .278.124.278.277v16.724a.278.278 0 01-.278.278h-3.463zM50.813 20a.278.278 0 01-.277-.278V2.998c0-.153.124-.277.277-.277h16.37c.153 0 .277.124.277.277v2.459a.278.278 0 01-.277.277H54.554v4.555h6.989c.153 0 .278.124.278.277v2.593a.278.278 0 01-.278.277h-6.989v6.286a.278.278 0 01-.278.278h-3.463zM40.067 5.801v13.921a.278.278 0 01-.277.278h-3.463a.278.278 0 01-.278-.278V5.802h-7.381a.278.278 0 01-.278-.278V2.998c0-.153.124-.277.278-.277h18.814c.153 0 .277.124.277.277v2.526a.278.278 0 01-.277.277h-7.415zM21.873 20a.278.278 0 01-.277-.278V2.998c0-.153.124-.277.277-.277h3.463c.154 0 .278.124.278.277v16.724a.278.278 0 01-.278.278h-3.463z') + path(fill='#1b262d' fill-rule='evenodd' clip-rule='evenodd' d='M0 2.998c0-.153.124-.277.278-.277h14.798a3.609 3.609 0 013.608 3.609v1.153a3.609 3.609 0 01-3.609 3.61h.135a3.61 3.61 0 013.61 3.608v1.69A3.609 3.609 0 0115.21 20H.278A.278.278 0 010 19.722V2.998zm3.949 2.536h9.353c.92 0 1.666.746 1.666 1.666v.787c0 .92-.746 1.665-1.666 1.665H3.949V5.534zm0 6.963h9.453c.92 0 1.666.746 1.666 1.666v1.057c0 .92-.746 1.665-1.666 1.665H3.95v-4.388z') + path(fill='#1b262d' d='M77.031 19.722V2.998c0-.153.124-.277.278-.277h2.363c.068 0 .133.024.184.07l13.33 11.798V2.999c0-.154.124-.278.277-.278h3.062c.153 0 .277.124.277.277v16.724a.278.278 0 01-.277.278h-2.363a.278.278 0 01-.184-.07L80.648 8.1v11.622a.278.278 0 01-.278.278H77.31a.278.278 0 01-.278-.278z') + path(fill='#03ca9b' d='M144.777 13.042c-.175-2.832 1.015-5.98 3.464-8.429 5.326-5.326 16.418-4.58 16.481-4.575-.03.044-8.139 11.8-17.748 12.9-.742.085-1.476.119-2.197.104z') + path(fill='#03ca9b' d='M145.902 16.662c.275.438.599.847.974 1.222 3.288 3.288 9.23 2.677 13.271-1.364 5.342-5.342 4.575-16.482 4.575-16.482-.029.066-5.842 13.095-15.08 15.937-1.254.386-2.511.61-3.74.687z') + + mixin title + div(class='title') + h1(class='title-text') + :translate(prop='template.title') + Report + + mixin statementDetails + - const email = jobData.userInfo.email ?? jobData.userInfo.username ?? '' + - const username = jobData.userInfo.username ?? email.replace(/@.*/, '') ?? '' + + .content.statement-details-content + ul.responsive-table.sm.width-by-content + li.table-header + .col + :translate(prop='template.statementDetails') + Statement Details + li.table-row + .col + :translate(prop='template.statementDate') + Statement date + .col + | #{date.toLocaleString(language, { timeZone: 'UTC' })} + li.table-row + if hasOnlyEndTimestamp + .col + :translate(prop='template.snapshotAt') + Snapshot at + .col + | #{end.toLocaleString(language, { timeZone: 'UTC' })} + else + .col + :translate(prop='template.period') + Period + .col + | #{start.toLocaleString(language, { timeZone: 'UTC' })} - #{end.toLocaleString(language, { timeZone: 'UTC' })} + li.table-row + .col + :translate(prop='template.username') + Username + .col + | #{username} + li.table-row + .col + :translate(prop='template.email') + Email + .col + | #{email} + + mixin commonContentHeader + if block + div(class='common-content-header') + block + + mixin contentTitle + h2(class='content-title uppercase-text'). + #{reportName} + if block + block + + + block header + block pageHeader + div(id='pageHeader' class='header') + +logo + +title + +statementDetails + +commonContentHeader + +contentTitle + block commonHeader + + table.report-container.none-if-phantomjs + thead.report-header + tr + td + .header-space + |   + tbody.report-content + tr + td + div.content + if isError + p + :translate(prop='template.errorMessage') + Your file could not be completed, please try again + else + block content + tfoot.report-footer + tr + td + .footer-space + |   + + div.content.none-if-no-phantomjs + if isError + p + :translate(prop='template.errorMessage') + Your file could not be completed, please try again + else + block content + + block footer + div(id='pageFooter' class='footer') + div.footer-container + span.footer-timestamp + :translate(prop='template.reportGenAt') + Report generated at + | : #{date.toLocaleString(language, { timeZone: 'UTC' })} + + div.copyright + :translate(prop='template.copyright') + Copyright © 2013-2024 iFinex Inc. All rights reserved. + + span.footer-page.none-if-no-phantomjs + :translate(prop='template.page') + Page + | {{page}} + :translate(prop='template.from') + from + | {{pages}} + + script + |const isPphantomJS = /phantomjs/gi.test(navigator.userAgent) + |if (isPphantomJS) document.body.classList.add('phantomjs') + |else document.body.classList.add('no-phantomjs') diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Black.ttf b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Black.ttf new file mode 100644 index 00000000..b27822ba Binary files /dev/null and b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Black.ttf differ diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Bold.ttf b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Bold.ttf new file mode 100644 index 00000000..fe23eeb9 Binary files /dev/null and b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Bold.ttf differ diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-ExtraBold.ttf b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-ExtraBold.ttf new file mode 100644 index 00000000..874b1b0d Binary files /dev/null and b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-ExtraBold.ttf differ diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-ExtraLight.ttf b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-ExtraLight.ttf new file mode 100644 index 00000000..c993e822 Binary files /dev/null and b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-ExtraLight.ttf differ diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Light.ttf b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Light.ttf new file mode 100644 index 00000000..71188f5c Binary files /dev/null and b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Light.ttf differ diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Medium.ttf b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Medium.ttf new file mode 100644 index 00000000..a01f3777 Binary files /dev/null and b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Medium.ttf differ diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Regular.ttf b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Regular.ttf new file mode 100644 index 00000000..5e4851f0 Binary files /dev/null and b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Regular.ttf differ diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-SemiBold.ttf b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-SemiBold.ttf new file mode 100644 index 00000000..ecc7041e Binary files /dev/null and b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-SemiBold.ttf differ diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Thin.ttf b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Thin.ttf new file mode 100644 index 00000000..fe77243f Binary files /dev/null and b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/Inter-Thin.ttf differ diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/fonts.css b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/fonts.css new file mode 100644 index 00000000..09c151b0 --- /dev/null +++ b/workers/loc.api/generate-report-file/pdf-writer/templates/fonts/fonts.css @@ -0,0 +1,53 @@ +@font-face { + font-family: "Inter"; + src: url(data:font/truetype;charset=utf-8;base64,${interThin}) format("truetype"); + font-weight: 100; +} + +@font-face { + font-family: "Inter"; + src: url(data:font/truetype;charset=utf-8;base64,${interExtraLight}) format("truetype"); + font-weight: 200; +} + +@font-face { + font-family: "Inter"; + src: url(data:font/truetype;charset=utf-8;base64,${interLight}) format("truetype"); + font-weight: 300; +} + +@font-face { + font-family: "Inter"; + src: url(data:font/truetype;charset=utf-8;base64,${interRegular}) format("truetype"); + font-weight: 400; +} + +@font-face { + font-family: "Inter"; + src: url(data:font/truetype;charset=utf-8;base64,${interMedium}) format("truetype"); + font-weight: 500; +} + +@font-face { + font-family: "Inter"; + src: url(data:font/truetype;charset=utf-8;base64,${interSemiBold}) format("truetype"); + font-weight: 600; +} + +@font-face { + font-family: "Inter"; + src: url(data:font/truetype;charset=utf-8;base64,${interBold}) format("truetype"); + font-weight: 700; +} + +@font-face { + font-family: "Inter"; + src: url(data:font/truetype;charset=utf-8;base64,${interExtraBold}) format("truetype"); + font-weight: 800; +} + +@font-face { + font-family: "Inter"; + src: url(data:font/truetype;charset=utf-8;base64,${interBlack}) format("truetype"); + font-weight: 900; +} diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/main.pug b/workers/loc.api/generate-report-file/pdf-writer/templates/main.pug new file mode 100644 index 00000000..48cde845 --- /dev/null +++ b/workers/loc.api/generate-report-file/pdf-writer/templates/main.pug @@ -0,0 +1,20 @@ +extends base.pug + +block commonHeader + - const columns = reportColumns ?? Object.keys(apiData?.[0] ?? {}) + + div(class='content') + ul.responsive-table + li.table-header + each columnVal, columnKey in columns + .col #{columnVal} + +block content + - const data = Array.isArray(apiData) ? apiData : [apiData ?? {}] + - const columns = reportColumns ?? Object.keys(data?.[0] ?? {}) + + ul.responsive-table + each dataItem, dataIndex in data + li.table-row + each columnVal, columnKey in columns + .col #{dataItem[columnKey]} diff --git a/workers/loc.api/generate-report-file/pdf-writer/templates/style.css b/workers/loc.api/generate-report-file/pdf-writer/templates/style.css new file mode 100644 index 00000000..5336a681 --- /dev/null +++ b/workers/loc.api/generate-report-file/pdf-writer/templates/style.css @@ -0,0 +1,352 @@ +html, body, div, span, applet, object, iframe, +h1, h2, h3, h4, h5, h6, p, blockquote, pre, +a, abbr, acronym, address, big, cite, code, +del, dfn, em, img, ins, kbd, q, s, samp, +small, strike, strong, sub, sup, tt, var, +b, u, i, center, +dl, dt, dd, ol, ul, li, +fieldset, form, label, legend, +table, caption, tbody, tfoot, thead, tr, th, td, +article, aside, canvas, details, embed, +figure, figcaption, footer, header, hgroup, +menu, nav, output, ruby, section, summary, +time, mark, audio, video { + margin: 0; + padding: 0; + border: 0; + font-size: 100%; + font: inherit; + vertical-align: baseline; +} + +ol, ul { + list-style: none; +} + +blockquote, q { + quotes: none; +} + +blockquote:before, blockquote:after, +q:before, q:after { + content: ''; + content: none; +} + +html, body { + -moz-print-color-adjust: exact; + -webkit-print-color-adjust: exact; + print-color-adjust: exact; + box-sizing: border-box; +} + + +html { + line-height: 1; +} + +body { + padding: 0 10px; +} + +html, body, .header, .footer { + font-family: "Inter", sans-serif; + font-weight: 400; + font-size: 8px; +} + +.sm { + font-size: 5px; +} + +h1 { + font-size: 16px; +} + +h2 { + font-size: 14px; +} + +h3 { + font-size: 12px; +} + +h4 { + font-size: 11px; +} + +h5 { + font-size: 10px; +} + +h6 { + font-size: 9px; +} + +.footer { + width: 100%; +} + +.header { + position: relative; + width: calc(100% - 20px); + line-height: 40px; + z-index: 9; +} + +.phantomjs .none-if-phantomjs { + display: none; +} + +.no-phantomjs .none-if-no-phantomjs { + display: none; +} + +.no-phantomjs .header { + position: absolute; +} + +.logo { + position: absolute; + width: 200px; + height: 40px; +} + +.logo-svg { + vertical-align: middle; +} + +.title { + vertical-align: middle; + margin: auto; + position: relative; + text-align: center; + height: 40px; +} + +.title-text { + margin: 0; + text-transform: uppercase; +} + +.footer { + position: relative; +} + +.footer-timestamp { + position: absolute; + bottom: 0; + left: 0; +} + +.footer-page { + position: absolute; + bottom: 0; + right: 0; +} + +.common-content-header { + line-height: 1; + position: absolute; + bottom: 0; + width: 100%; +} + +.content { + position: relative; + margin-left: auto; + margin-right: auto; +} + +.body > .content, +.body > .report-container td > .content { + margin-bottom: 20px; +} + +.content-title { + margin-bottom: 10px; + text-align: center; + color: #3e4444; +} + +.responsive-table { + margin-bottom: 20px; +} + +.responsive-table:nth-last-child(1) { + margin-bottom: 0; +} + +.responsive-table .table-header, +.responsive-table .table-row { + display: -moz-flex; + display: -webkit-flex; + display: flex; + padding: 10px 10px; + -webkit-justify-content: space-between; + justify-content: space-between; +} + +.header:first-of-type + .header { + display: none; +} + +@media print { + .responsive-table .table-header, + .responsive-table .table-row { + page-break-inside: avoid; + } + + .header:first-of-type + .header { + display: block; + } + + .header, + .no-phantomjs .header, + .footer { + position: fixed; + } + + .footer { + bottom: 0; + width: calc(100% - 20px); + } +} + +.responsive-table .table-header { + background-color: #95A5A6; + text-transform: uppercase; +} + +.responsive-table .table-row { + background-color: #f7f7f7; +} + +.responsive-table .table-row:nth-of-type(even) { + background-color: #eeeeee; +} + +.responsive-table .col { + -moz-flex-basis: 25%; + -webkit-flex-basis: 25%; + flex-basis: 25%; + text-align: right; + word-break: break-all; + margin-left: 5px; +} + +.responsive-table .col:nth-of-type(1) { + text-align: left; + margin-left: 0; +} + +.left-text { + text-align: left; +} + +.right-text { + text-align: right; +} + +.uppercase-text { + text-transform: uppercase; +} + +.responsive-table.width-by-content { + width: -moz-max-content; + width: -webkit-max-content; + width: max-content; +} + +.responsive-table.width-by-content .col { + -moz-flex-basis: auto; + -webkit-flex-basis: auto; + flex-basis: auto; +} + +.no-margin-bottom, +.responsive-table.no-margin-bottom, +.responsive-table .no-margin-bottom { + margin-bottom: 0; +} + +.no-margin-top, +.responsive-table.no-margin-top, +.responsive-table .no-margin-top:nth-of-type(1), +.responsive-table .no-margin-top { + margin-top: 0; +} + +.table-title { + padding-left: 10px; +} + +.copyright { + font-style: italic; + position: absolute; + top: 0; + left: 0; + right: 0; + text-align: center; +} + +.statement-details-content { + position: absolute; + top: 11px; + right: 0; + line-height: 1; + margin-bottom: 0; +} + +.statement-details-content .responsive-table .table-header:nth-of-type(1), +.statement-details-content .responsive-table .table-row:nth-of-type(1) { + margin-top: 0; +} + +.statement-details-content .table-row, +.statement-details-content .table-header { + padding: 5px 5px; +} + +.report-container { + width: 100%; + border-collapse: collapse; + border-spacing: 0; +} + +/* Uses `mm` for height to support default settings of `html-pdf` lib */ +.header-space, .header { + height: 43.5mm; +} + +.footer-space, .footer { + height: 28mm; +} + +@media screen { + .footer-space { + height: 0; + } +} + +.report-header { + display: -moz-table-header-group; + display: -webkit-table-header-group; + display: table-header-group; + font-size: 0; +} + +.report-footer { + display: -moz-table-footer-group; + display: -webkit-table-footer-group; + display: table-footer-group; +} + +.footer-container { + position: relative; + height: 100%; + border: 10px solid transparent; + border-right-width: 0; + border-left-width: 0; + box-sizing: border-box; +} diff --git a/workers/loc.api/generate-report-file/pdf-writer/translations.yml b/workers/loc.api/generate-report-file/pdf-writer/translations.yml new file mode 100644 index 00000000..8ba0d5b4 --- /dev/null +++ b/workers/loc.api/generate-report-file/pdf-writer/translations.yml @@ -0,0 +1,29 @@ +en: + template: + title: Report + statementDetails: Statement Details + statementDate: Statement date + snapshotAt: Snapshot at + period: Period + username: Username + email: Email + errorMessage: Your file could not be completed, please try again + reportGenAt: Report generated at + copyright: Copyright © 2013-2024 iFinex Inc. All rights reserved. + page: Page + from: from + +ru: + template: + title: Отчет + statementDetails: Детали ведомости + statementDate: Дата ведомости + snapshotAt: Снимок в + period: Период + username: Имя пользователя + email: Эл. почта + errorMessage: Ваш файл не может быть завершен, пожалуйста, попробуйте еще раз + reportGenAt: Отчет сгенерирован в + copyright: Авторское право © 2013-2024 iFinex Inc. Все права защищены. + page: Стр. + from: из diff --git a/workers/loc.api/generate-csv/csv.job.data.js b/workers/loc.api/generate-report-file/report.file.job.data.js similarity index 81% rename from workers/loc.api/generate-csv/csv.job.data.js rename to workers/loc.api/generate-report-file/report.file.job.data.js index 963388bb..8037c02f 100644 --- a/workers/loc.api/generate-csv/csv.job.data.js +++ b/workers/loc.api/generate-report-file/report.file.job.data.js @@ -4,13 +4,13 @@ const { decorateInjectable } = require('../di/utils') const { checkParams, - getCsvArgs, + getReportFileArgs, checkTimeLimit, checkJobAndGetUserData, parsePositionsAuditId } = require('../helpers') const { - FindMethodToGetCsvFileError, + FindMethodToGetReportFileError, SymbolsTypeError } = require('../errors') @@ -18,7 +18,7 @@ const depsTypes = (TYPES) => [ TYPES.RService, TYPES.WeightedAveragesReportCsvWriter ] -class CsvJobData { +class ReportFileJobData { constructor ( rService, weightedAveragesReportCsvWriter @@ -27,7 +27,7 @@ class CsvJobData { this.weightedAveragesReportCsvWriter = weightedAveragesReportCsvWriter } - async getTradesCsvJobData ( + async getTradesFileJobData ( args, uId, uInfo @@ -43,13 +43,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'trades') + const reportFileArgs = getReportFileArgs(args, 'trades') const jobData = { userInfo, userId, name: 'getTrades', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsCreate', columnsCsv: { id: '#', @@ -71,7 +71,7 @@ class CsvJobData { return jobData } - async getFundingTradesCsvJobData ( + async getFundingTradesFileJobData ( args, uId, uInfo @@ -87,13 +87,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'fundingTrades') + const reportFileArgs = getReportFileArgs(args, 'fundingTrades') const jobData = { userInfo, userId, name: 'getFundingTrades', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsCreate', columnsCsv: { id: '#', @@ -114,12 +114,12 @@ class CsvJobData { return jobData } - async getTickersHistoryCsvJobData ( + async getTickersHistoryFileJobData ( args, uId, uInfo ) { - checkParams(args, 'paramsSchemaForCsv', ['symbol']) + checkParams(args, 'paramsSchemaForFile', ['symbol']) const { userId, @@ -130,7 +130,7 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'tickersHistory') + const reportFileArgs = getReportFileArgs(args, 'tickersHistory') const symb = Array.isArray(args.params.symbol) ? args.params.symbol : [args.params.symbol] @@ -163,7 +163,7 @@ class CsvJobData { userInfo, userId, name: 'getTickersHistory', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsUpdate', columnsCsv: isTrading ? tTickerHistColumns : fTickerHistColumns, formatSettings: { @@ -175,12 +175,12 @@ class CsvJobData { return jobData } - async getWalletsCsvJobData ( + async getWalletsFileJobData ( args, uId, uInfo ) { - checkParams(args, 'paramsSchemaForWalletsCsv') + checkParams(args, 'paramsSchemaForWalletsFile') const { userId, @@ -213,7 +213,7 @@ class CsvJobData { return jobData } - async getPositionsHistoryCsvJobData ( + async getPositionsHistoryFileJobData ( args, uId, uInfo @@ -229,13 +229,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'positionsHistory') + const reportFileArgs = getReportFileArgs(args, 'positionsHistory') const jobData = { userInfo, userId, name: 'getPositionsHistory', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsUpdate', columnsCsv: { id: '#', @@ -260,12 +260,12 @@ class CsvJobData { return jobData } - async getActivePositionsCsvJobData ( + async getActivePositionsFileJobData ( args, uId, uInfo ) { - checkParams(args, 'paramsSchemaForActivePositionsCsv') + checkParams(args, 'paramsSchemaForActivePositionsFile') const { userId, @@ -310,13 +310,13 @@ class CsvJobData { return jobData } - async getPositionsAuditCsvJobData ( + async getPositionsAuditFileJobData ( args, uId, uInfo ) { const _args = parsePositionsAuditId(args) - checkParams(_args, 'paramsSchemaForPositionsAuditCsv', ['id']) + checkParams(_args, 'paramsSchemaForPositionsAuditFile', ['id']) const { userId, @@ -327,13 +327,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(_args, 'positionsAudit') + const reportFileArgs = getReportFileArgs(_args, 'positionsAudit') const jobData = { userInfo, userId, name: 'getPositionsAudit', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsUpdate', columnsCsv: { id: '#', @@ -360,12 +360,12 @@ class CsvJobData { return jobData } - async getPublicTradesCsvJobData ( + async getPublicTradesFileJobData ( args, uId, uInfo ) { - checkParams(args, 'paramsSchemaForPublicTradesCsv', ['symbol']) + checkParams(args, 'paramsSchemaForPublicTradesFile', ['symbol']) checkTimeLimit(args) const { @@ -381,7 +381,7 @@ class CsvJobData { const isTradingPair = Array.isArray(params.symbol) ? params.symbol[0].startsWith('t') : params.symbol.startsWith('t') - const csvArgs = getCsvArgs(args, 'publicTrades', { isTradingPair }) + const reportFileArgs = getReportFileArgs(args, 'publicTrades', { isTradingPair }) const columnsCsv = (isTradingPair) ? { id: '#', @@ -403,7 +403,7 @@ class CsvJobData { userInfo, userId, name: 'getPublicTrades', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mts', columnsCsv, formatSettings: { @@ -415,12 +415,12 @@ class CsvJobData { return jobData } - async getStatusMessagesCsvJobData ( + async getStatusMessagesFileJobData ( args, uId, uInfo ) { - checkParams(args, 'paramsSchemaForStatusMessagesCsv') + checkParams(args, 'paramsSchemaForStatusMessagesFile') const { userId, @@ -455,12 +455,12 @@ class CsvJobData { return jobData } - async getCandlesCsvJobData ( + async getCandlesFileJobData ( args, uId, uInfo ) { - checkParams(args, 'paramsSchemaForCandlesCsv', ['symbol']) + checkParams(args, 'paramsSchemaForCandlesFile', ['symbol']) checkTimeLimit(args) const { @@ -496,7 +496,7 @@ class CsvJobData { return jobData } - async getLedgersCsvJobData ( + async getLedgersFileJobData ( args, uId, uInfo @@ -512,14 +512,18 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'ledgers') + const reportFileArgs = getReportFileArgs(args, 'ledgers') const jobData = { userInfo, userId, name: 'getLedgers', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mts', + /* + * Example how to overwrite column order for pdf + * columnsPdf: {}, + */ columnsCsv: { id: '#', description: 'DESCRIPTION', @@ -537,7 +541,7 @@ class CsvJobData { return jobData } - async getPayInvoiceListCsvJobData ( + async getPayInvoiceListFileJobData ( args, uId, uInfo @@ -553,13 +557,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'payInvoiceList') + const reportFileArgs = getReportFileArgs(args, 'payInvoiceList') const jobData = { userInfo, userId, name: 'getPayInvoiceList', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 't', columnsCsv: { id: '#', @@ -580,12 +584,12 @@ class CsvJobData { return jobData } - async getOrderTradesCsvJobData ( + async getOrderTradesFileJobData ( args, uId, uInfo ) { - checkParams(args, 'paramsSchemaForOrderTradesCsv') + checkParams(args, 'paramsSchemaForOrderTradesFile') const { userId, @@ -596,13 +600,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'orderTrades') + const reportFileArgs = getReportFileArgs(args, 'orderTrades') const jobData = { userInfo, userId, name: 'getOrderTrades', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsCreate', columnsCsv: { id: '#', @@ -623,7 +627,7 @@ class CsvJobData { return jobData } - async getOrdersCsvJobData ( + async getOrdersFileJobData ( args, uId, uInfo @@ -639,13 +643,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'orders') + const reportFileArgs = getReportFileArgs(args, 'orders') const jobData = { userInfo, userId, name: 'getOrders', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsUpdate', columnsCsv: { id: '#', @@ -672,7 +676,7 @@ class CsvJobData { return jobData } - async getActiveOrdersCsvJobData ( + async getActiveOrdersFileJobData ( args, uId, uInfo @@ -718,7 +722,7 @@ class CsvJobData { return jobData } - async getMovementsCsvJobData ( + async getMovementsFileJobData ( args, uId, uInfo @@ -734,13 +738,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'movements') + const reportFileArgs = getReportFileArgs(args, 'movements') const jobData = { userInfo, userId, name: 'getMovements', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsUpdated', columnsCsv: { id: '#', @@ -761,7 +765,7 @@ class CsvJobData { return jobData } - async getFundingOfferHistoryCsvJobData ( + async getFundingOfferHistoryFileJobData ( args, uId, uInfo @@ -777,13 +781,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'fundingOfferHistory') + const reportFileArgs = getReportFileArgs(args, 'fundingOfferHistory') const jobData = { userInfo, userId, name: 'getFundingOfferHistory', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsUpdate', columnsCsv: { id: '#', @@ -807,7 +811,7 @@ class CsvJobData { return jobData } - async getFundingLoanHistoryCsvJobData ( + async getFundingLoanHistoryFileJobData ( args, uId, uInfo @@ -823,13 +827,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'fundingLoanHistory') + const reportFileArgs = getReportFileArgs(args, 'fundingLoanHistory') const jobData = { userInfo, userId, name: 'getFundingLoanHistory', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsUpdate', columnsCsv: { id: '#', @@ -855,7 +859,7 @@ class CsvJobData { return jobData } - async getFundingCreditHistoryCsvJobData ( + async getFundingCreditHistoryFileJobData ( args, uId, uInfo @@ -871,13 +875,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'fundingCreditHistory') + const reportFileArgs = getReportFileArgs(args, 'fundingCreditHistory') const jobData = { userInfo, userId, name: 'getFundingCreditHistory', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsUpdate', columnsCsv: { id: '#', @@ -904,7 +908,7 @@ class CsvJobData { return jobData } - async getLoginsCsvJobData ( + async getLoginsFileJobData ( args, uId, uInfo @@ -920,13 +924,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'logins') + const reportFileArgs = getReportFileArgs(args, 'logins') const jobData = { userInfo, userId, name: 'getLogins', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'time', columnsCsv: { id: '#', @@ -941,7 +945,7 @@ class CsvJobData { return jobData } - async getChangeLogsCsvJobData ( + async getChangeLogsFileJobData ( args, uId, uInfo @@ -957,13 +961,13 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args, 'changeLogs') + const reportFileArgs = getReportFileArgs(args, 'changeLogs') const jobData = { userInfo, userId, name: 'getChangeLogs', - args: csvArgs, + args: reportFileArgs, propNameForPagination: 'mtsCreate', columnsCsv: { mtsCreate: 'DATE', @@ -979,12 +983,12 @@ class CsvJobData { return jobData } - async getMultipleCsvJobData ( + async getMultipleFileJobData ( args, uId, uInfo ) { - checkParams(args, 'paramsSchemaForMultipleCsv', false, true) + checkParams(args, 'paramsSchemaForMultipleFile', false, true) const { userId, @@ -995,18 +999,23 @@ class CsvJobData { uInfo ) - const { language } = { ...args.params } + const { + language, + isPDFRequired + } = args?.params ?? {} const jobsData = [] for (const params of args.params.multiExport) { - const getJobDataMethodName = `${params.method}JobData` + // Handle `csv` string for back compatibility + const methodName = params?.method.replace(/csv$/i, 'File') + const getJobDataMethodName = `${methodName}JobData` if ( - getJobDataMethodName === 'getMultipleCsvJobData' || + getJobDataMethodName === 'getMultipleFileJobData' || typeof this[getJobDataMethodName] !== 'function' || - typeof this.rService[params.method] !== 'function' + typeof this.rService[methodName] !== 'function' ) { - throw new FindMethodToGetCsvFileError() + throw new FindMethodToGetReportFileError() } const jobData = await this[getJobDataMethodName]( @@ -1014,7 +1023,12 @@ class CsvJobData { ...args, params: { ...params, - language + ...(language && typeof language === 'string' + ? { language } + : {}), + ...(typeof isPDFRequired === 'boolean' + ? { isPDFRequired } + : {}) } }, userId, @@ -1033,12 +1047,12 @@ class CsvJobData { } } - async getWeightedAveragesReportCsvJobData ( + async getWeightedAveragesReportFileJobData ( args, uId, uInfo ) { - checkParams(args, 'paramsSchemaForWeightedAveragesReportApiCsv', ['symbol']) + checkParams(args, 'paramsSchemaForWeightedAveragesReportFile', ['symbol']) const { userId, @@ -1049,14 +1063,14 @@ class CsvJobData { uInfo ) - const csvArgs = getCsvArgs(args) + const reportFileArgs = getReportFileArgs(args) const jobData = { userInfo, userId, name: 'getWeightedAveragesReport', fileNamesMap: [['getWeightedAveragesReport', 'weighted-averages-report']], - args: csvArgs, + args: reportFileArgs, columnsCsv: { symbol: 'PAIR', buyingWeightedPrice: 'WEIGHTED PRICE', @@ -1081,6 +1095,6 @@ class CsvJobData { } } -decorateInjectable(CsvJobData, depsTypes) +decorateInjectable(ReportFileJobData, depsTypes) -module.exports = CsvJobData +module.exports = ReportFileJobData diff --git a/workers/loc.api/has.grc.service/index.js b/workers/loc.api/has.grc.service/index.js index 1001b42b..65459346 100644 --- a/workers/loc.api/has.grc.service/index.js +++ b/workers/loc.api/has.grc.service/index.js @@ -63,6 +63,14 @@ class HasGrcService { return !!countSlackService } + + async hasPDFService () { + const countPDFServices = await this.lookUpFunction( + 'rest:ext:pdf' + ) + + return !!countPDFServices + } } decorateInjectable(HasGrcService, depsTypes) diff --git a/workers/loc.api/helpers/check-params.js b/workers/loc.api/helpers/check-params.js index 31503916..22de7e8d 100644 --- a/workers/loc.api/helpers/check-params.js +++ b/workers/loc.api/helpers/check-params.js @@ -12,7 +12,7 @@ const { module.exports = ( args, - schemaName = 'paramsSchemaForCsv', + schemaName = 'paramsSchemaForFile', requireFields = [], checkParamsField = false, additionalSchema = {} diff --git a/workers/loc.api/helpers/get-translator.js b/workers/loc.api/helpers/get-translator.js new file mode 100644 index 00000000..80f5e343 --- /dev/null +++ b/workers/loc.api/helpers/get-translator.js @@ -0,0 +1,61 @@ +'use strict' + +const LANGUAGES = require('./languages') + +const getTranslator = (params) => { + const { + language = 'en', + translations, + isNotDefaultTranslatorUsed = false + } = params ?? {} + + const normLang = LANGUAGES?.[language] ?? 'en' + const translatorByDefault = ( + !isNotDefaultTranslatorUsed && + getTranslator({ + language: 'en', + translations, + isNotDefaultTranslatorUsed: true + }) + ) + + return (defVal = '', opts) => { + const prop = typeof opts === 'string' + ? opts + : opts?.prop + + if ( + !translations?.[normLang] || + typeof translations[normLang] !== 'object' || + Object.keys(translations[normLang]) === 0 || + typeof prop !== 'string' || + !prop + ) { + return translatorByDefault + ? translatorByDefault(defVal, prop) + : defVal + } + + const res = prop.split('.').reduce((accum, curr) => { + if ( + typeof accum[curr] === 'object' || + typeof accum[curr] === 'string' || + Number.isFinite(accum[curr]) + ) { + return accum[curr] + } + + return accum + }, translations[normLang]) + + if (typeof res === 'object') { + return translatorByDefault + ? translatorByDefault(defVal, prop) + : defVal + } + + return res + } +} + +module.exports = getTranslator diff --git a/workers/loc.api/helpers/index.js b/workers/loc.api/helpers/index.js index 894a52e0..add351b3 100644 --- a/workers/loc.api/helpers/index.js +++ b/workers/loc.api/helpers/index.js @@ -9,7 +9,7 @@ const { const checkParams = require('./check-params') const { getMethodLimit, - getCsvArgs, + getReportFileArgs, getLimitNotMoreThan } = require('./limit-param.helpers') const { @@ -59,6 +59,8 @@ const FILTER_CONDITIONS = require('./filter.conditions') const getDataFromApi = require('./get-data-from-api') const splitSymbolPairs = require('./split-symbol-pairs') const FOREX_SYMBS = require('./forex.symbs') +const getTranslator = require('./get-translator') +const LANGUAGES = require('./languages') module.exports = { getREST, @@ -92,7 +94,7 @@ module.exports = { prepareResponse, prepareApiResponse, prepareSymbolResponse, - getCsvArgs, + getReportFileArgs, getMethodLimit, checkJobAndGetUserData, grcBfxReq, @@ -105,5 +107,7 @@ module.exports = { getDataFromApi, parsePositionsAuditId, splitSymbolPairs, - FOREX_SYMBS + FOREX_SYMBS, + getTranslator, + LANGUAGES } diff --git a/workers/loc.api/queue/send-mail/languages.js b/workers/loc.api/helpers/languages.js similarity index 100% rename from workers/loc.api/queue/send-mail/languages.js rename to workers/loc.api/helpers/languages.js diff --git a/workers/loc.api/helpers/limit-param.helpers.js b/workers/loc.api/helpers/limit-param.helpers.js index e842eb43..2f1a917e 100644 --- a/workers/loc.api/helpers/limit-param.helpers.js +++ b/workers/loc.api/helpers/limit-param.helpers.js @@ -60,8 +60,8 @@ const getMethodLimit = (sendLimit, method, methodsLimits = {}) => { return getLimitNotMoreThan(base, max) } -const getCsvArgs = (args, method, extraParams = {}) => { - const csvArgs = { +const getReportFileArgs = (args, method, extraParams = {}) => { + const reportFileArgs = { ...args, params: { ...args.params, @@ -70,7 +70,7 @@ const getCsvArgs = (args, method, extraParams = {}) => { } } - return csvArgs + return reportFileArgs } const getLimitNotMoreThan = (limit, maxLimit = 25) => { @@ -80,6 +80,6 @@ const getLimitNotMoreThan = (limit, maxLimit = 25) => { module.exports = { getMethodLimit, - getCsvArgs, + getReportFileArgs, getLimitNotMoreThan } diff --git a/workers/loc.api/helpers/schema.js b/workers/loc.api/helpers/schema.js index f2123115..94fd02a9 100644 --- a/workers/loc.api/helpers/schema.js +++ b/workers/loc.api/helpers/schema.js @@ -84,7 +84,7 @@ const paramsSchemaForPayInvoiceList = { } } -const paramsSchemaForPayInvoiceListCsv = { +const paramsSchemaForPayInvoiceListFile = { type: 'object', properties: { ...paramsSchemaForPayInvoiceList.properties, @@ -164,7 +164,7 @@ const paramsSchemaForMovementInfo = { } } -const paramsSchemaForCandlesCsv = { +const paramsSchemaForCandlesFile = { type: 'object', properties: { ...paramsSchemaForCandlesApi.properties, @@ -195,7 +195,7 @@ const paramsSchemaForStatusMessagesApi = { } } -const paramsSchemaForStatusMessagesCsv = { +const paramsSchemaForStatusMessagesFile = { type: 'object', properties: { ...paramsSchemaForStatusMessagesApi.properties, @@ -205,7 +205,7 @@ const paramsSchemaForStatusMessagesCsv = { } } -const paramsSchemaForCsv = { +const paramsSchemaForFile = { ...paramsSchemaForApi, properties: { ...paramsSchemaForApi.properties, @@ -215,10 +215,10 @@ const paramsSchemaForCsv = { } } -const paramsSchemaForPublicTradesCsv = { - ...paramsSchemaForCsv, +const paramsSchemaForPublicTradesFile = { + ...paramsSchemaForFile, properties: { - ...paramsSchemaForCsv.properties, + ...paramsSchemaForFile.properties, symbol: _publicTradesSymbol } } @@ -252,10 +252,10 @@ const paramsSchemaForPositionsAudit = { } } -const paramsSchemaForPositionsAuditCsv = { - ...paramsSchemaForCsv, +const paramsSchemaForPositionsAuditFile = { + ...paramsSchemaForFile, properties: { - ...paramsSchemaForCsv.properties, + ...paramsSchemaForFile.properties, ...paramsSchemaForPositionsAudit.properties } } @@ -269,7 +269,7 @@ const paramsSchemaForWallets = { } } -const paramsSchemaForWalletsCsv = { +const paramsSchemaForWalletsFile = { type: 'object', properties: { end: { @@ -281,7 +281,7 @@ const paramsSchemaForWalletsCsv = { } } -const paramsSchemaForActivePositionsCsv = { +const paramsSchemaForActivePositionsFile = { type: 'object', properties: { timezone, @@ -290,10 +290,13 @@ const paramsSchemaForActivePositionsCsv = { } } -const paramsSchemaForMultipleCsv = { +const paramsSchemaForMultipleFile = { type: 'object', required: ['multiExport'], properties: { + isPDFRequired: { + type: 'boolean' + }, email: { type: 'string' }, @@ -346,7 +349,7 @@ const paramsSchemaForOrderTradesApi = { } } -const paramsSchemaForOrderTradesCsv = { +const paramsSchemaForOrderTradesFile = { ...paramsSchemaForOrderTradesApi, properties: { ...paramsSchemaForOrderTradesApi.properties, @@ -356,7 +359,7 @@ const paramsSchemaForOrderTradesCsv = { } } -const paramsSchemaForWeightedAveragesReportApiCsv = { +const paramsSchemaForWeightedAveragesReportFile = { type: 'object', properties: { ...cloneDeep(paramsSchemaForWeightedAveragesReportApi.properties), @@ -368,24 +371,24 @@ const paramsSchemaForWeightedAveragesReportApiCsv = { module.exports = { paramsSchemaForApi, - paramsSchemaForCsv, + paramsSchemaForFile, paramsSchemaForPayInvoiceList, - paramsSchemaForPayInvoiceListCsv, - paramsSchemaForPublicTradesCsv, + paramsSchemaForPayInvoiceListFile, + paramsSchemaForPublicTradesFile, paramsSchemaForPublicTrades, paramsSchemaForPositionsAudit, - paramsSchemaForPositionsAuditCsv, + paramsSchemaForPositionsAuditFile, paramsSchemaForWallets, - paramsSchemaForWalletsCsv, - paramsSchemaForMultipleCsv, - paramsSchemaForActivePositionsCsv, + paramsSchemaForWalletsFile, + paramsSchemaForMultipleFile, + paramsSchemaForActivePositionsFile, paramsSchemaForOrderTradesApi, - paramsSchemaForOrderTradesCsv, + paramsSchemaForOrderTradesFile, paramsSchemaForStatusMessagesApi, - paramsSchemaForStatusMessagesCsv, + paramsSchemaForStatusMessagesFile, paramsSchemaForCandlesApi, - paramsSchemaForCandlesCsv, + paramsSchemaForCandlesFile, paramsSchemaForWeightedAveragesReportApi, - paramsSchemaForWeightedAveragesReportApiCsv, + paramsSchemaForWeightedAveragesReportFile, paramsSchemaForMovementInfo } diff --git a/workers/loc.api/queue/aggregator.js b/workers/loc.api/queue/aggregator.js index 870686fd..46a893f5 100644 --- a/workers/loc.api/queue/aggregator.js +++ b/workers/loc.api/queue/aggregator.js @@ -10,7 +10,7 @@ const { } = require('./helpers') module.exports = ( - { isAddedUniqueEndingToCsvName }, + { isAddedUniqueEndingToReportFileName }, rootPath, aggregatorQueue, hasGrcService, @@ -39,7 +39,7 @@ module.exports = ( ) const newFilePaths = [] - const csvFilesMetadata = [] + const reportFilesMetadata = [] if (isEnableToSendEmail) { const s3Data = await uploadToS3( @@ -70,7 +70,7 @@ module.exports = ( await unlink(filePath) - csvFilesMetadata.push({ + reportFilesMetadata.push({ name: _name, filePath: null }) @@ -79,7 +79,7 @@ module.exports = ( job.done() aggregatorQueue.emit('completed', { newFilePaths, - csvFilesMetadata, + reportFilesMetadata, userInfo }) @@ -97,12 +97,12 @@ module.exports = ( _name, { ...subParamsArr[i] }, userInfo.username, - isAddedUniqueEndingToCsvName, + isAddedUniqueEndingToReportFileName, chunkCommonFolders[i] ) newFilePaths.push(newFilePath) - csvFilesMetadata.push({ + reportFilesMetadata.push({ name: _name, filePath: newFilePath }) @@ -111,7 +111,7 @@ module.exports = ( job.done() aggregatorQueue.emit('completed', { newFilePaths, - csvFilesMetadata, + reportFilesMetadata, userInfo }) } catch (err) { diff --git a/workers/loc.api/queue/helpers/get-complete-file-name.js b/workers/loc.api/queue/helpers/get-complete-file-name.js index 1c44805b..a28e2fdd 100644 --- a/workers/loc.api/queue/helpers/get-complete-file-name.js +++ b/workers/loc.api/queue/helpers/get-complete-file-name.js @@ -1,6 +1,9 @@ 'use strict' const { v4: uuidv4 } = require('uuid') +const { capitalize } = require('lib-js-util-base') + +const getReportFileExtName = require('./get-report-file-ext-name') const _fileNamesMap = new Map([ ['getTrades', 'trades'], @@ -92,12 +95,12 @@ module.exports = ( params, { userInfo, - ext = 'csv', isMultiExport, - isAddedUniqueEndingToCsvName, + isAddedUniqueEndingToReportFileName, uniqEnding = '' } = {} ) => { + const ext = getReportFileExtName(params) const { start, end, @@ -122,16 +125,35 @@ module.exports = ( : formattedDateNow const _ext = ext ? `.${ext}` : '' const _userInfo = userInfo ? `${userInfo}_` : '' - const _uniqEnding = isAddedUniqueEndingToCsvName + const _uniqEnding = isAddedUniqueEndingToReportFileName ? _getUniqEnding(uniqEnding) : '' + const mainMetadata = { + userInfo: _userInfo, + baseName, + readableBaseName: capitalize(baseName.replace(/_/g, ' ')), + date, + formattedDateNow, + timestamp, + uniqEnding, + ext + } + if ( isBaseNameInName || isMultiExport ) { - return `${_userInfo}${baseName}_${formattedDateNow}${_uniqEnding}${_ext}` + return { + ...mainMetadata, + + fileName: `${_userInfo}${baseName}_${formattedDateNow}${_uniqEnding}${_ext}` + } } - return `${_userInfo}${baseName}_FROM_${startDate}_TO_${endDate}_ON_${timestamp}${_uniqEnding}${_ext}` + return { + ...mainMetadata, + + fileName: `${_userInfo}${baseName}_FROM_${startDate}_TO_${endDate}_ON_${timestamp}${_uniqEnding}${_ext}` + } } diff --git a/workers/loc.api/queue/helpers/get-local-csv-folder-paths.js b/workers/loc.api/queue/helpers/get-local-csv-folder-paths.js deleted file mode 100644 index 0a05ff7e..00000000 --- a/workers/loc.api/queue/helpers/get-local-csv-folder-paths.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -const path = require('path') -const argv = require('yargs').argv - -module.exports = (rootPath) => { - const localCsvFolderPath = path.isAbsolute(argv.csvFolder) - ? argv.csvFolder - : path.join(rootPath, argv.csvFolder) - const tempCsvFolderPath = path.isAbsolute(argv.tempFolder) - ? argv.tempFolder - : path.join(rootPath, argv.tempFolder) - - return { - localCsvFolderPath, - tempCsvFolderPath - } -} diff --git a/workers/loc.api/queue/helpers/get-local-report-folder-paths.js b/workers/loc.api/queue/helpers/get-local-report-folder-paths.js new file mode 100644 index 00000000..d79b2fa8 --- /dev/null +++ b/workers/loc.api/queue/helpers/get-local-report-folder-paths.js @@ -0,0 +1,18 @@ +'use strict' + +const path = require('path') +const argv = require('yargs').argv + +module.exports = (rootPath) => { + const localReportFolderPath = path.isAbsolute(argv.reportFolder) + ? argv.reportFolder + : path.join(rootPath, argv.reportFolder) + const tempReportFolderPath = path.isAbsolute(argv.tempFolder) + ? argv.tempFolder + : path.join(rootPath, argv.tempFolder) + + return { + localReportFolderPath, + tempReportFolderPath + } +} diff --git a/workers/loc.api/queue/helpers/get-report-content-type.js b/workers/loc.api/queue/helpers/get-report-content-type.js new file mode 100644 index 00000000..02b88cac --- /dev/null +++ b/workers/loc.api/queue/helpers/get-report-content-type.js @@ -0,0 +1,17 @@ +'use strict' + +module.exports = (params) => { + const { + isСompress, + isPDFRequired + } = params ?? {} + + if (isСompress) { + return 'application/zip' + } + if (isPDFRequired) { + return 'application/pdf' + } + + return 'text/csv' +} diff --git a/workers/loc.api/queue/helpers/get-report-file-ext-name.js b/workers/loc.api/queue/helpers/get-report-file-ext-name.js new file mode 100644 index 00000000..3e1c6604 --- /dev/null +++ b/workers/loc.api/queue/helpers/get-report-file-ext-name.js @@ -0,0 +1,17 @@ +'use strict' + +module.exports = (params) => { + const { + isPDFRequired, + isHTMLRequired + } = params ?? {} + + if (isPDFRequired) { + return 'pdf' + } + if (isHTMLRequired) { + return 'html' + } + + return 'csv' +} diff --git a/workers/loc.api/queue/helpers/index.js b/workers/loc.api/queue/helpers/index.js index 04eac03e..df574553 100644 --- a/workers/loc.api/queue/helpers/index.js +++ b/workers/loc.api/queue/helpers/index.js @@ -6,14 +6,18 @@ const { writableToPromise, createUniqueFileName } = require('./utils') -const getLocalCsvFolderPaths = require( - './get-local-csv-folder-paths' +const getLocalReportFolderPaths = require( + './get-local-report-folder-paths' ) +const getReportFileExtName = require('./get-report-file-ext-name') +const getReportContentType = require('./get-report-content-type') module.exports = { moveFileToLocalStorage, writableToPromise, createUniqueFileName, getCompleteFileName, - getLocalCsvFolderPaths + getLocalReportFolderPaths, + getReportFileExtName, + getReportContentType } diff --git a/workers/loc.api/queue/helpers/utils.js b/workers/loc.api/queue/helpers/utils.js index 76d2149d..9c743e02 100644 --- a/workers/loc.api/queue/helpers/utils.js +++ b/workers/loc.api/queue/helpers/utils.js @@ -14,9 +14,10 @@ const chmod = promisify(fs.chmod) const isElectronjsEnv = argv.isElectronjsEnv const getCompleteFileName = require('./get-complete-file-name') -const getLocalCsvFolderPaths = require( - './get-local-csv-folder-paths' +const getLocalReportFolderPaths = require( + './get-local-report-folder-paths' ) +const getReportFileExtName = require('./get-report-file-ext-name') const _checkAndCreateDir = async (dirPath) => { try { @@ -82,20 +83,20 @@ const moveFileToLocalStorage = async ( name, params, userInfo, - isAddedUniqueEndingToCsvName, + isAddedUniqueEndingToReportFileName, chunkCommonFolder ) => { - const { localCsvFolderPath } = getLocalCsvFolderPaths(rootPath) - const fullCsvDirPath = ( + const { localReportFolderPath } = getLocalReportFolderPaths(rootPath) + const fullReportDirPath = ( chunkCommonFolder && typeof chunkCommonFolder === 'string' ) - ? path.join(localCsvFolderPath, chunkCommonFolder) - : localCsvFolderPath + ? path.join(localReportFolderPath, chunkCommonFolder) + : localReportFolderPath - await _checkAndCreateDir(fullCsvDirPath) + await _checkAndCreateDir(fullReportDirPath) - let fileName = getCompleteFileName( + let { fileName } = getCompleteFileName( name, params, { userInfo } @@ -109,7 +110,7 @@ const moveFileToLocalStorage = async ( } else throw err } - const files = await readdir(fullCsvDirPath) + const files = await readdir(fullReportDirPath) let count = 0 while (files.some(file => file === fileName)) { @@ -120,13 +121,13 @@ const moveFileToLocalStorage = async ( params, { userInfo, - isAddedUniqueEndingToCsvName, + isAddedUniqueEndingToReportFileName, uniqEnding: `(${count})` } - ) + ).fileName } - const newFilePath = path.join(fullCsvDirPath, fileName) + const newFilePath = path.join(fullReportDirPath, fileName) await _moveFileAcrossDevice(filePath, newFilePath) if (isElectronjsEnv) { @@ -136,26 +137,27 @@ const moveFileToLocalStorage = async ( return { newFilePath } } -const createUniqueFileName = async (rootPath, count = 0) => { +const createUniqueFileName = async (rootPath, params, count = 0) => { count += 1 if (count > 20) { throw new Error('ERR_CREATE_UNIQUE_FILE_NAME') } - const { tempCsvFolderPath } = getLocalCsvFolderPaths(rootPath) + const { tempReportFolderPath } = getLocalReportFolderPaths(rootPath) - await _checkAndCreateDir(tempCsvFolderPath) + await _checkAndCreateDir(tempReportFolderPath) - const uniqueFileName = `${uuidv4()}.csv` + const ext = getReportFileExtName(params) + const uniqueFileName = `${uuidv4()}.${ext}` - const files = await readdir(tempCsvFolderPath) + const files = await readdir(tempReportFolderPath) if (files.some(file => file === uniqueFileName)) { - return createUniqueFileName(rootPath, count) + return createUniqueFileName(rootPath, params, count) } - return path.join(tempCsvFolderPath, uniqueFileName) + return path.join(tempReportFolderPath, uniqueFileName) } const writableToPromise = stream => { diff --git a/workers/loc.api/queue/processor.js b/workers/loc.api/queue/processor.js index 7e8fdbb1..d739bc49 100644 --- a/workers/loc.api/queue/processor.js +++ b/workers/loc.api/queue/processor.js @@ -18,12 +18,72 @@ const { const { isAuthError } = require('../helpers') +const processReportFile = async (deps, args) => { + const { + data, + filePath + } = args + + const write = data?.isUnauth + ? 'Your file could not be completed, please try again' + : data + + const writable = fs.createWriteStream(filePath) + const writablePromise = writableToPromise(writable) + + if (data?.args?.params?.isPDFRequired) { + const pdfStream = await deps.pdfWriter + .createPDFStream({ + jobData: data, + pdfCustomTemplateName: data?.pdfCustomTemplateName, + language: data?.args?.params.language, + isError: data?.isUnauth + }) + + pipeline(pdfStream, writable, () => {}) + + await deps.writeDataToStream( + pdfStream, + write + ) + + pdfStream.end() + + return writablePromise + } + if (typeof data?.csvCustomWriter === 'function') { + await data.csvCustomWriter( + writable, + write + ) + + return writablePromise + } + + const stringifier = stringify({ + header: true, + columns: data?.columnsCsv + }) + + pipeline(stringifier, writable, () => {}) + + await deps.writeDataToStream( + stringifier, + write + ) + + stringifier.end() + + return writablePromise +} + module.exports = ( conf, rootPath, processorQueue, aggregatorQueue, - writeDataToStream + writeDataToStream, + pdfWriter ) => { processorQueue.on('completed', (result) => { aggregatorQueue.addJob({ @@ -77,7 +137,8 @@ module.exports = ( data.args.params = { ...data.args.params } const filePath = await createUniqueFileName( - rootPath + rootPath, + data.args.params ) filePaths.push(filePath) @@ -85,10 +146,8 @@ module.exports = ( chunkCommonFolder, args: { params }, name, - fileNamesMap, - columnsCsv, - csvCustomWriter - } = { ...data } + fileNamesMap + } = data ?? {} subParamsArr.push({ ...omit(params, ['name', 'fileNamesMap']), name, @@ -96,35 +155,16 @@ module.exports = ( }) chunkCommonFolders.push(chunkCommonFolder) - const write = isUnauth - ? 'Your file could not be completed, please try again' - : data - - const writable = fs.createWriteStream(filePath) - const writablePromise = writableToPromise(writable) - - if (typeof csvCustomWriter === 'function') { - await csvCustomWriter( - writable, - write - ) - } else { - const stringifier = stringify({ - header: true, - columns: columnsCsv - }) - - pipeline(stringifier, writable, () => {}) - - await writeDataToStream( - stringifier, - write - ) - - stringifier.end() - } - - await writablePromise + await processReportFile( + { + writeDataToStream, + pdfWriter + }, + { + data, + filePath + } + ) } job.done() diff --git a/workers/loc.api/queue/send-mail/index.js b/workers/loc.api/queue/send-mail/index.js index fa1c7630..18e533ba 100644 --- a/workers/loc.api/queue/send-mail/index.js +++ b/workers/loc.api/queue/send-mail/index.js @@ -5,7 +5,8 @@ const fs = require('fs') const pug = require('pug') const yaml = require('js-yaml') -const LANGUAGES = require('./languages') +const getTranslator = require('../../helpers/get-translator') +const LANGUAGES = require('../../helpers/languages') const basePathToViews = path.join(__dirname, 'views') const pathToTrans = path.join( @@ -16,57 +17,6 @@ const translations = yaml.load( fs.readFileSync(pathToTrans, 'utf8') ) -const _getTranslator = ( - language = 'en', - trans = translations, - isNotDefaultTranslatorUsed = false -) => { - const translatorByDefault = ( - !isNotDefaultTranslatorUsed && - _getTranslator('en', trans, true) - ) - - return (defVal = '', opts) => { - const prop = typeof opts === 'string' - ? opts - : ({ ...opts }).prop - - if ( - !trans || - typeof trans !== 'object' || - !trans[language] || - typeof trans[language] !== 'object' || - Object.keys(trans[language]) === 0 || - typeof prop !== 'string' || - !prop - ) { - return translatorByDefault - ? translatorByDefault(defVal, prop) - : defVal - } - - const res = prop.split('.').reduce((accum, curr) => { - if ( - typeof accum[curr] === 'object' || - typeof accum[curr] === 'string' || - Number.isFinite(accum[curr]) - ) { - return accum[curr] - } - - return accum - }, trans[language]) - - if (typeof res === 'object') { - return translatorByDefault - ? translatorByDefault(defVal, prop) - : defVal - } - - return res - } -} - module.exports = (grcBfxReq) => { return async ( configs, @@ -82,7 +32,10 @@ module.exports = (grcBfxReq) => { language = 'en' } = { ...data } const normLang = LANGUAGES?.[language] ?? 'en' - const translate = _getTranslator(normLang) + const translate = getTranslator({ + language: normLang, + translations + }) const subject = translate( configs.subject, 'template.subject' @@ -97,7 +50,7 @@ module.exports = (grcBfxReq) => { const button = { url, text: translate( - 'Download CSV', + 'Download Report', 'template.btnText' ) } diff --git a/workers/loc.api/queue/send-mail/translations/email.yml b/workers/loc.api/queue/send-mail/translations/email.yml index 8f6dad5f..3880f74e 100644 --- a/workers/loc.api/queue/send-mail/translations/email.yml +++ b/workers/loc.api/queue/send-mail/translations/email.yml @@ -1,7 +1,7 @@ en: template: subject: Your report is ready - btnText: Download CSV + btnText: Download Report fileName: File name unauth: Your file could not be completed, please try again readyForDownload: The report you request is ready for download @@ -15,7 +15,7 @@ en: ru: template: subject: Ваш отчет готов - btnText: Скачать CSV + btnText: Скачать Отчет fileName: Имя файла unauth: Ваш файл не может быть завершен, пожалуйста, попробуйте еще раз readyForDownload: Запрашиваемый вами отчет готов к загрузке @@ -29,7 +29,7 @@ ru: zh-CN: template: subject: 您的报告已备妥 - btnText: 下载CSV档案 + btnText: 下载报告档案 fileName: 档案名称 unauth: 档案未下载成功,请重试一次 readyForDownload: 您请求的报告已可下载 @@ -43,7 +43,7 @@ zh-CN: zh-TW: template: subject: 您的報告已備妥 - btnText: 下載CSV檔案 + btnText: 下載报告檔案 fileName: 檔案名稱 unauth: 檔案未下載成功,請重試一次 readyForDownload: 您請求的報告已可下載 @@ -57,7 +57,7 @@ zh-TW: es-EM: template: subject: Tu reporte esta listo - btnText: Descargar el CSV + btnText: Descargar el Informe fileName: Nombre del archivo unauth: Tu archivo no se pudo completar, intente de nuevo por favor. readyForDownload: Tu reporte esta listo para ser descargado @@ -71,7 +71,7 @@ es-EM: tr: template: subject: Raporunuz hazır - btnText: CSV İndir + btnText: Raporu İndir fileName: Dosya adı unauth: Dosyanız tamamlanamadı, lütfen tekrar deneyin readyForDownload: İstediğiniz rapor indirilmeye hazır @@ -85,7 +85,7 @@ tr: pt-BR: template: subject: Seu relatório está pronto - btnText: Baixar CSV + btnText: Baixar Relatório fileName: Nome do arquivo unauth: Não foi possível concluir seu arquivo, tente novamente readyForDownload: O relatório solicitado está pronto para download diff --git a/workers/loc.api/queue/upload-to-s3/index.js b/workers/loc.api/queue/upload-to-s3/index.js index 54a17e1c..7565f0a1 100644 --- a/workers/loc.api/queue/upload-to-s3/index.js +++ b/workers/loc.api/queue/upload-to-s3/index.js @@ -3,7 +3,10 @@ const fs = require('fs') const { Readable } = require('stream') -const { getCompleteFileName } = require('../helpers') +const { + getCompleteFileName, + getReportContentType +} = require('../helpers') const _uploadSignToS3 = async ( isСompress, @@ -54,7 +57,7 @@ const _uploadSignToS3 = async ( module.exports = ( { isСompress, - isAddedUniqueEndingToCsvName + isAddedUniqueEndingToReportFileName }, deflateFac, hasGrcService, @@ -75,14 +78,14 @@ module.exports = ( subParamsArr.length > 1 ) ) - const fileNameWithoutExt = getCompleteFileName( + const { fileName: fileNameWithoutExt } = getCompleteFileName( subParamsArr[0].name, subParamsArr[0], { userInfo: userInfo.username, ext: false, isMultiExport, - isAddedUniqueEndingToCsvName + isAddedUniqueEndingToReportFileName } ) const isUppedPGPService = await hasGrcService.hasGPGService() @@ -97,9 +100,9 @@ module.exports = ( subParamsArr[i], { userInfo: userInfo.username, - isAddedUniqueEndingToCsvName + isAddedUniqueEndingToReportFileName } - ) + ).fileName } } }) @@ -112,13 +115,20 @@ module.exports = ( )) const promises = buffers.map(async (buffer, i) => { + const ext = isСompress ? 'zip' : '' + const singleFileName = isСompress + ? streams[i].data.name.slice(0, -3) + : streams[i].data.name const fileName = isСompress && isMultiExport ? `${fileNameWithoutExt}.zip` - : `${streams[i].data.name.slice(0, -3)}${isСompress ? 'zip' : 'csv'}` + : `${singleFileName}${ext}` const opts = { ...configs, contentDisposition: `attachment; filename="${fileName}"`, - contentType: isСompress ? 'application/zip' : 'text/csv' + contentType: getReportContentType({ + isСompress, + isPDFRequired: subParamsArr[i].isPDFRequired + }) } const hexStrBuff = buffer.toString('hex') diff --git a/workers/loc.api/queue/write-data-to-stream/helpers.js b/workers/loc.api/queue/write-data-to-stream/helpers.js index d30d4749..431b3cf3 100644 --- a/workers/loc.api/queue/write-data-to-stream/helpers.js +++ b/workers/loc.api/queue/write-data-to-stream/helpers.js @@ -83,28 +83,43 @@ const _formatters = { } } -const _dataFormatter = (obj, formatSettings, params) => { +const _dataFormatter = (data, formatSettings, params) => { if ( - typeof obj !== 'object' || + typeof data !== 'object' || typeof formatSettings !== 'object' ) { - return obj + return data } - const res = cloneDeep(obj) - - Object.entries(formatSettings).forEach(([key, val]) => { - try { - if ( - typeof obj[key] !== 'undefined' && - typeof _formatters[val] === 'function' - ) { - res[key] = _formatters[val](obj[key], params) - } - } catch (err) {} - }) + const isArray = Array.isArray(data) + const clonedData = cloneDeep(data) + const objArr = isArray ? clonedData : [clonedData] + + for (const obj of objArr) { + for (const [key, val] of Object.entries(formatSettings)) { + try { + if ( + typeof obj[key] !== 'undefined' && + val && + typeof val === 'object' + ) { + obj[key] = _dataFormatter(obj[key], val, params) + + continue + } + if ( + typeof obj[key] !== 'undefined' && + typeof _formatters[val] === 'function' + ) { + obj[key] = _formatters[val](obj[key], params) + + continue + } + } catch (err) {} + } + } - return res + return isArray ? objArr : objArr[0] } const write = ( @@ -114,12 +129,16 @@ const write = ( params, method ) => { - res.forEach((item) => { + const resArr = Array.isArray(res) + ? res + : [res] + + for (const item of resArr) { const _item = dataNormalizer(item, method, params) const res = _dataFormatter(_item, formatSettings, params) stream.write(res) - }) + } } const writeMessageToStream = ( diff --git a/workers/loc.api/queue/write-data-to-stream/index.js b/workers/loc.api/queue/write-data-to-stream/index.js index 098cdc4c..5e128f94 100644 --- a/workers/loc.api/queue/write-data-to-stream/index.js +++ b/workers/loc.api/queue/write-data-to-stream/index.js @@ -49,7 +49,7 @@ module.exports = ( const _res = await getDataFromApi({ getData, args: currIterationArgs, - callerName: 'CSV_WRITER' + callerName: 'REPORT_FILE_WRITER' }) const isGetWalletsMethod = method === 'getWallets' @@ -84,6 +84,23 @@ module.exports = ( serialRequestsCount = 0 + if ( + res && + typeof res === 'object' && + !Array.isArray(res) && + Object.keys(res).length > 0 + ) { + write( + res, + stream, + formatSettings, + { ..._args.params }, + method + ) + processorQueue.emit('progress', 100) + + break + } if ( !res || !Array.isArray(res) || diff --git a/workers/loc.api/service.report.js b/workers/loc.api/service.report.js index bb56abf8..5c1399b2 100644 --- a/workers/loc.api/service.report.js +++ b/workers/loc.api/service.report.js @@ -648,211 +648,326 @@ class ReportService extends Api { }, 'getWeightedAveragesReport', args, cb) } - getMultipleCsv (space, args, cb) { + /** + * @deprecated + */ + getMultipleCsv (...args) { return this.getMultipleFile(...args) } + + getMultipleFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getMultipleCsvJobData', + return this._generateReportFile( + 'getMultipleFileJobData', args ) - }, 'getMultipleCsv', args, cb) + }, 'getMultipleFile', args, cb) } - getTradesCsv (space, args, cb) { + /** + * @deprecated + */ + getTradesCsv (...args) { return this.getTradesFile(...args) } + + getTradesFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getTradesCsvJobData', + return this._generateReportFile( + 'getTradesFileJobData', args ) - }, 'getTradesCsv', args, cb) + }, 'getTradesFile', args, cb) } - getFundingTradesCsv (space, args, cb) { + /** + * @deprecated + */ + getFundingTradesCsv (...args) { return this.getFundingTradesFile(...args) } + + getFundingTradesFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getFundingTradesCsvJobData', + return this._generateReportFile( + 'getFundingTradesFileJobData', args ) - }, 'getFundingTradesCsv', args, cb) + }, 'getFundingTradesFile', args, cb) } - getTickersHistoryCsv (space, args, cb) { + /** + * @deprecated + */ + getTickersHistoryCsv (...args) { return this.getTickersHistoryFile(...args) } + + getTickersHistoryFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getTickersHistoryCsvJobData', + return this._generateReportFile( + 'getTickersHistoryFileJobData', args ) - }, 'getTickersHistoryCsv', args, cb) + }, 'getTickersHistoryFile', args, cb) } - getWalletsCsv (space, args, cb) { + /** + * @deprecated + */ + getWalletsCsv (...args) { return this.getWalletsFile(...args) } + + getWalletsFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getWalletsCsvJobData', + return this._generateReportFile( + 'getWalletsFileJobData', args ) - }, 'getWalletsCsv', args, cb) + }, 'getWalletsFile', args, cb) } - getPositionsHistoryCsv (space, args, cb) { + /** + * @deprecated + */ + getPositionsHistoryCsv (...args) { return this.getPositionsHistoryFile(...args) } + + getPositionsHistoryFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getPositionsHistoryCsvJobData', + return this._generateReportFile( + 'getPositionsHistoryFileJobData', args ) - }, 'getPositionsHistoryCsv', args, cb) + }, 'getPositionsHistoryFile', args, cb) } - getActivePositionsCsv (space, args, cb) { + /** + * @deprecated + */ + getActivePositionsCsv (...args) { return this.getActivePositionsFile(...args) } + + getActivePositionsFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getActivePositionsCsvJobData', + return this._generateReportFile( + 'getActivePositionsFileJobData', args ) - }, 'getActivePositionsCsv', args, cb) + }, 'getActivePositionsFile', args, cb) } - getPositionsAuditCsv (space, args, cb) { + /** + * @deprecated + */ + getPositionsAuditCsv (...args) { return this.getPositionsAuditFile(...args) } + + getPositionsAuditFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getPositionsAuditCsvJobData', + return this._generateReportFile( + 'getPositionsAuditFileJobData', args ) - }, 'getPositionsAuditCsv', args, cb) + }, 'getPositionsAuditFile', args, cb) } - getPublicTradesCsv (space, args, cb) { + /** + * @deprecated + */ + getPublicTradesCsv (...args) { return this.getPublicTradesFile(...args) } + + getPublicTradesFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getPublicTradesCsvJobData', + return this._generateReportFile( + 'getPublicTradesFileJobData', args ) - }, 'getPublicTradesCsv', args, cb) + }, 'getPublicTradesFile', args, cb) } - getStatusMessagesCsv (space, args, cb) { + /** + * @deprecated + */ + getStatusMessagesCsv (...args) { return this.getStatusMessagesFile(...args) } + + getStatusMessagesFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getStatusMessagesCsvJobData', + return this._generateReportFile( + 'getStatusMessagesFileJobData', args ) - }, 'getStatusMessagesCsv', args, cb) + }, 'getStatusMessagesFile', args, cb) } - getCandlesCsv (space, args, cb) { + /** + * @deprecated + */ + getCandlesCsv (...args) { return this.getCandlesFile(...args) } + + getCandlesFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getCandlesCsvJobData', + return this._generateReportFile( + 'getCandlesFileJobData', args ) - }, 'getCandlesCsv', args, cb) + }, 'getCandlesFile', args, cb) } - getLedgersCsv (space, args, cb) { + /** + * @deprecated + */ + getLedgersCsv (...args) { return this.getLedgersFile(...args) } + + getLedgersFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getLedgersCsvJobData', + return this._generateReportFile( + 'getLedgersFileJobData', args ) - }, 'getLedgersCsv', args, cb) + }, 'getLedgersFile', args, cb) } - getPayInvoiceListCsv (space, args, cb) { + /** + * @deprecated + */ + getPayInvoiceListCsv (...args) { return this.getPayInvoiceListFile(...args) } + + getPayInvoiceListFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getPayInvoiceListCsvJobData', + return this._generateReportFile( + 'getPayInvoiceListFileJobData', args ) - }, 'getPayInvoiceListCsv', args, cb) + }, 'getPayInvoiceListFile', args, cb) } - getOrderTradesCsv (space, args, cb) { + /** + * @deprecated + */ + getOrderTradesCsv (...args) { return this.getOrderTradesFile(...args) } + + getOrderTradesFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getOrderTradesCsvJobData', + return this._generateReportFile( + 'getOrderTradesFileJobData', args ) - }, 'getOrderTradesCsv', args, cb) + }, 'getOrderTradesFile', args, cb) } - getOrdersCsv (space, args, cb) { + /** + * @deprecated + */ + getOrdersCsv (...args) { return this.getOrdersFile(...args) } + + getOrdersFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getOrdersCsvJobData', + return this._generateReportFile( + 'getOrdersFileJobData', args ) - }, 'getOrdersCsv', args, cb) + }, 'getOrdersFile', args, cb) } - getActiveOrdersCsv (space, args, cb) { + /** + * @deprecated + */ + getActiveOrdersCsv (...args) { return this.getActiveOrdersFile(...args) } + + getActiveOrdersFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getActiveOrdersCsvJobData', + return this._generateReportFile( + 'getActiveOrdersFileJobData', args ) - }, 'getActiveOrdersCsv', args, cb) + }, 'getActiveOrdersFile', args, cb) } - getMovementsCsv (space, args, cb) { + /** + * @deprecated + */ + getMovementsCsv (...args) { return this.getMovementsFile(...args) } + + getMovementsFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getMovementsCsvJobData', + return this._generateReportFile( + 'getMovementsFileJobData', args ) - }, 'getMovementsCsv', args, cb) + }, 'getMovementsFile', args, cb) } - getFundingOfferHistoryCsv (space, args, cb) { + /** + * @deprecated + */ + getFundingOfferHistoryCsv (...args) { return this.getFundingOfferHistoryFile(...args) } + + getFundingOfferHistoryFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getFundingOfferHistoryCsvJobData', + return this._generateReportFile( + 'getFundingOfferHistoryFileJobData', args ) - }, 'getFundingOfferHistoryCsv', args, cb) + }, 'getFundingOfferHistoryFile', args, cb) } - getFundingLoanHistoryCsv (space, args, cb) { + /** + * @deprecated + */ + getFundingLoanHistoryCsv (...args) { return this.getFundingLoanHistoryFile(...args) } + + getFundingLoanHistoryFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getFundingLoanHistoryCsvJobData', + return this._generateReportFile( + 'getFundingLoanHistoryFileJobData', args ) - }, 'getFundingLoanHistoryCsv', args, cb) + }, 'getFundingLoanHistoryFile', args, cb) } - getFundingCreditHistoryCsv (space, args, cb) { + /** + * @deprecated + */ + getFundingCreditHistoryCsv (...args) { return this.getFundingCreditHistoryFile(...args) } + + getFundingCreditHistoryFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getFundingCreditHistoryCsvJobData', + return this._generateReportFile( + 'getFundingCreditHistoryFileJobData', args ) - }, 'getFundingCreditHistoryCsv', args, cb) + }, 'getFundingCreditHistoryFile', args, cb) } - getLoginsCsv (space, args, cb) { + /** + * @deprecated + */ + getLoginsCsv (...args) { return this.getLoginsFile(...args) } + + getLoginsFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getLoginsCsvJobData', + return this._generateReportFile( + 'getLoginsFileJobData', args ) - }, 'getLoginsCsv', args, cb) + }, 'getLoginsFile', args, cb) } - getChangeLogsCsv (space, args, cb) { + /** + * @deprecated + */ + getChangeLogsCsv (...args) { return this.getChangeLogsFile(...args) } + + getChangeLogsFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getChangeLogsCsvJobData', + return this._generateReportFile( + 'getChangeLogsFileJobData', args ) - }, 'getChangeLogsCsv', args, cb) + }, 'getChangeLogsFile', args, cb) } - getWeightedAveragesReportCsv (space, args, cb) { + /** + * @deprecated + */ + getWeightedAveragesReportCsv (...args) { return this.getWeightedAveragesReportFile(...args) } + + getWeightedAveragesReportFile (space, args, cb) { return this._responder(() => { - return this._generateCsv( - 'getWeightedAveragesReportCsvJobData', + return this._generateReportFile( + 'getWeightedAveragesReportFileJobData', args ) - }, 'getWeightedAveragesReportCsv', args, cb) + }, 'getWeightedAveragesReportFile', args, cb) } }