Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Wr/export junction objects #1092

Merged
merged 14 commits into from
Oct 21, 2024
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions messages/exportApi.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,7 @@ Processed %s records from query: %s
Query returned more than 200 records. Run the command using the --plan flag instead.
Record Count: %s
Query: %s

# noRecordsReturned

The query for, %s, returned 0 records.
WillieRuemmele marked this conversation as resolved.
Show resolved Hide resolved
9 changes: 6 additions & 3 deletions src/commands/data/export/tree.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,12 @@ import { orgFlags, prefixValidation } from '../../../flags.js';
import { ExportConfig, runExport } from '../../../export.js';
import type { DataPlanPart, SObjectTreeFileContents } from '../../../types.js';

export type ExportReturnType = DataPlanPart[] | SObjectTreeFileContents;
shetzel marked this conversation as resolved.
Show resolved Hide resolved

Messages.importMessagesDirectoryFromMetaUrl(import.meta.url);
const messages = Messages.loadMessages('@salesforce/plugin-data', 'tree.export');

export default class Export extends SfCommand<DataPlanPart[] | SObjectTreeFileContents> {
export default class Export extends SfCommand<ExportReturnType> {
public static readonly summary = messages.getMessage('summary');
public static readonly description = messages.getMessage('description');
public static readonly examples = messages.getMessages('examples');
Expand All @@ -24,6 +26,7 @@ export default class Export extends SfCommand<DataPlanPart[] | SObjectTreeFileCo
public static readonly flags = {
...orgFlags,
query: Flags.string({
multiple: true,
char: 'q',
summary: messages.getMessage('flags.query.summary'),
required: true,
Expand All @@ -45,7 +48,7 @@ export default class Export extends SfCommand<DataPlanPart[] | SObjectTreeFileCo
}),
};

public async run(): Promise<DataPlanPart[] | SObjectTreeFileContents> {
public async run(): Promise<ExportReturnType> {
const { flags } = await this.parse(Export);
if (flags.plan) {
this.warn(messages.getMessage('PlanJsonWarning'));
Expand All @@ -54,7 +57,7 @@ export default class Export extends SfCommand<DataPlanPart[] | SObjectTreeFileCo
outputDir: flags['output-dir'],
plan: flags.plan,
prefix: flags.prefix,
query: flags.query,
queries: flags.query,
conn: flags['target-org'].getConnection(flags['api-version']),
ux: new Ux({ jsonEnabled: this.jsonEnabled() }),
};
Expand Down
144 changes: 96 additions & 48 deletions src/export.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,29 +8,30 @@
import path from 'node:path';
import fs from 'node:fs';

import { Logger, Messages, SfError, Lifecycle, Connection } from '@salesforce/core';
import { Connection, Lifecycle, Logger, Messages, SfError } from '@salesforce/core';
import type { DescribeSObjectResult, QueryResult } from '@jsforce/jsforce-node';
import { Ux } from '@salesforce/sf-plugins-core';
import { ensure } from '@salesforce/ts-types';
import {
BasicRecord,
DataPlanPart,
hasNonEmptyNestedRecords,
GenericEntry,
hasNestedRecords,
hasNestedRecordsFilter,
hasNonEmptyNestedRecords,
SObjectTreeFileContents,
SObjectTreeInput,
GenericEntry,
hasNestedRecords,
} from './types.js';
import { hasUnresolvedRefs } from './api/data/tree/functions.js';
import { ExportReturnType } from './commands/data/export/tree.js';

Messages.importMessagesDirectoryFromMetaUrl(import.meta.url);
const messages = Messages.loadMessages('@salesforce/plugin-data', 'exportApi');

const DATA_PLAN_FILENAME_PART = 'plan.json';

export type ExportConfig = {
query: string;
queries: string[];
outputDir?: string;
plan?: boolean;
prefix?: string;
Expand All @@ -44,33 +45,50 @@ export type RefFromIdByType = Map<string, Map<string, string>>;
/** only used internally, but a more useful structure than the original */
type PlanFile = Omit<DataPlanPart, 'files'> & { contents: SObjectTreeFileContents; file: string; dir: string };

export const runExport = async (configInput: ExportConfig): Promise<DataPlanPart[] | SObjectTreeFileContents> => {
const { outputDir, plan, query, conn, prefix, ux } = validate(configInput);
const logger = Logger.childFromRoot('runExport');
const { records: recordsFromQuery } = await queryRecords(conn)(query);
const describe = await cacheAllMetadata(conn)(recordsFromQuery);

export const runExport = async (configInput: ExportConfig): Promise<ExportReturnType> => {
const { outputDir, plan, queries, conn, prefix, ux } = validate(configInput);
const refFromIdByType: RefFromIdByType = new Map();
const flatRecords = recordsFromQuery.flatMap(flattenNestedRecords);
flatRecords.map(buildRefMap(refFromIdByType)); // get a complete map of ID<->Ref

logger.debug(messages.getMessage('dataExportRecordCount', [flatRecords.length, query]));
const logger = Logger.childFromRoot('runExport');

const objectRecordMap = new Map<string, BasicRecord[]>();
if (outputDir) {
await fs.promises.mkdir(outputDir, { recursive: true });
}

await Promise.all(
queries.map(async (query) => {
const { records: recordsFromQuery } = await queryRecords(conn)(query);
await cacheAllMetadata(conn)(recordsFromQuery);

objectRecordMap.set(
recordsFromQuery.at(0)?.attributes.type ??
// try to match the object being queried
new RegExp(/from (\w+)/gim).exec(query)?.at(1) ??
'',
recordsFromQuery
);

// get a complete map of ID<->Ref
const flatRecords = recordsFromQuery.flatMap(flattenNestedRecords).map(buildRefMap(refFromIdByType));

logger.debug(messages.getMessage('dataExportRecordCount', [flatRecords.length, query]));
})
);

const allQueryValues = Array.from(objectRecordMap.values()).flat();
const describe = await cacheAllMetadata(conn)(allQueryValues);

if (plan) {
const planMap = reduceByType(
recordsFromQuery
allQueryValues
.flatMap(flattenWithChildRelationships(describe)())
.map(addReferenceIdToAttributes(refFromIdByType))
.map(removeChildren)
.map(replaceParentReferences(describe)(refFromIdByType))
.map(removeNonPlanProperties)
);

const planFiles = [...planMap.entries()].map(
const contentFiles = [...planMap.entries()].map(
([sobject, records]): PlanFile => ({
sobject,
contents: { records },
Expand All @@ -80,25 +98,48 @@ export const runExport = async (configInput: ExportConfig): Promise<DataPlanPart
dir: outputDir ?? '',
})
);
const output = planFiles.map(planFileToDataPartPlan);
const planName = getPrefixedFileName([...describe.keys(), DATA_PLAN_FILENAME_PART].join('-'), prefix);

const planContent = contentFiles.map(planFileToDataPartPlan);

const planName =
queries.length > 1
? DATA_PLAN_FILENAME_PART
: getPrefixedFileName([...describe.keys(), DATA_PLAN_FILENAME_PART].join('-'), prefix);

await Promise.all([
...planFiles.map(writePlanDataFile(ux)),
fs.promises.writeFile(path.join(outputDir ?? '', planName), JSON.stringify(output, null, 4)),
...contentFiles.map(writePlanDataFile(ux)),
fs.promises.writeFile(path.join(outputDir ?? '', planName), JSON.stringify(planContent, null, 4)),
]);
return output;
return planContent;
} else {
if (flatRecords.length > 200) {
// use lifecycle so warnings show up in stdout and in the json
await Lifecycle.getInstance().emitWarning(
messages.getMessage('dataExportRecordCountWarning', [flatRecords.length, query])
);
}
const contents = { records: processRecordsForNonPlan(describe)(refFromIdByType)(recordsFromQuery) };
const filename = path.join(outputDir ?? '', getPrefixedFileName(`${[...describe.keys()].join('-')}.json`, prefix));
ux.log(`wrote ${flatRecords.length} records to ${filename}`);
fs.writeFileSync(filename, JSON.stringify(contents, null, 4));
return contents;
const records: SObjectTreeInput[] = [];
objectRecordMap.forEach((basicRecords, query) => {
if (basicRecords.length > 200) {
// use lifecycle so warnings show up in stdout and in the json
void Lifecycle.getInstance().emitWarning(
messages.getMessage('dataExportRecordCountWarning', [basicRecords.length, query])
);
}
if (!basicRecords.length) {
// use lifecycle so warnings show up in stdout and in the json
void Lifecycle.getInstance().emitWarning(messages.getMessage('noRecordsReturned', [query]));
} else {
const contents = { records: processRecordsForNonPlan(describe)(refFromIdByType)(basicRecords) };
// if we have multiple queries, we'll accumulate all the processed records
records.push(...contents.records);
const filename = path.join(
outputDir ?? '',
// if we have multiple queries, name each file according to the object being queried, otherwise, join them together for previous behavior
getPrefixedFileName(`${queries.length > 1 ? query : [...describe.keys()].join('-')}.json`, prefix)
);
ux.log(`wrote ${basicRecords.length} records to ${filename}`);
fs.writeFileSync(filename, JSON.stringify(contents, null, 4));
}
});

return {
records,
};
}
};

Expand All @@ -121,7 +162,7 @@ const writePlanDataFile =
(ux: Ux) =>
async (p: PlanFile): Promise<void> => {
await fs.promises.writeFile(path.join(p.dir, p.file), JSON.stringify(p.contents, null, 4));
ux.log(`wrote ${p.contents.records.length} records to ${p.file}`);
ux.log(`wrote ${p.contents.records.length} records to ${path.join(p.dir, p.file)}`);
};

// future: use Map.groupBy() when it's available
Expand Down Expand Up @@ -207,7 +248,8 @@ export const replaceParentReferences =
const typeDescribe = ensure(describe.get(record.attributes.type), `Missing describe for ${record.attributes.type}`);
const replacedReferences = Object.fromEntries(
Object.entries(record)
.filter(isRelationshipFieldFilter(typeDescribe)) // only look at the fields that are references
.filter(isRelationshipFieldFilter(typeDescribe))
// only look at the fields that are references
// We can check describe to see what the type could be.
// If it narrows to only 1 type, pass that to refFromId.
// If it's polymorphic, don't pass a type because refFromId will need to check all the types.
Expand Down Expand Up @@ -261,23 +303,29 @@ const addReferenceIdToAttributes =
* @param config - The export configuration.
*/
const validate = (config: ExportConfig): ExportConfig => {
if (!config.query) {
if (!config.queries) {
throw new SfError(messages.getMessage('queryNotProvided'), 'queryNotProvided');
}

const filepath = path.resolve(process.cwd(), config.query);
if (fs.existsSync(filepath)) {
config.query = fs.readFileSync(filepath, 'utf8');

if (!config.query) {
throw messages.createError('queryNotProvided');
const queries: string[] = [];
config.queries.map((q) => {
const filepath = path.resolve(process.cwd(), q);
if (fs.existsSync(filepath)) {
const query = fs.readFileSync(filepath, 'utf8');

if (!config.queries) {
throw messages.createError('queryNotProvided');
}
// we've validate that the passed file is a valid soql statement, continue the iteration based on the assumption
// that q is a soql string, rather than a file name - this combines logic for --query contact.txt and --query "SELECT..."
q = query;
}
}

config.query = config.query.trim();
if (!config.query.toLowerCase().startsWith('select')) {
throw messages.createError('soqlInvalid', [config.query]);
}
if (!q.toLowerCase().startsWith('select')) {
throw messages.createError('soqlInvalid', [q]);
}
queries.push(q);
});
config.queries = queries;

return config;
};
Expand Down
4 changes: 2 additions & 2 deletions test/commands/data/tree/dataTree.nut.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ describe('data:tree commands', () => {

it('should error with invalid soql', () => {
const result = execCmd(
`data:export:tree --query 'SELECT' --prefix INT --outputdir ${path.join('.', 'export_data')}`
`data:export:tree --query 'SELECT' --prefix INT --output-dir ${path.join('.', 'export_data')}`
);
const stdError = getString(result, 'shellOutput.stderr', '').toLowerCase();
const errorKeywords = ['malformed', 'check the soql', 'invalid soql query'];
Expand All @@ -54,7 +54,7 @@ describe('data:tree commands', () => {
});

execCmd(
`data:export:tree --query "${query}" --prefix INT --outputdir ${path.join('.', 'export_data')} --plan --json`,
`data:export:tree --query "${query}" --prefix INT --output-dir ${path.join('.', 'export_data')} --plan --json`,
{ ensureExitCode: 0 }
);

Expand Down
2 changes: 1 addition & 1 deletion test/commands/data/tree/dataTreeCommonChild.nut.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ describe('data:tree commands with a polymorphic whatId (on tasks) shared between
);

execCmd(
`data:export:tree --query "${query}" --prefix ${prefix} --outputdir ${path.join(
`data:export:tree --query "${query}" --prefix ${prefix} --output-dir ${path.join(
'.',
'export_data'
)} --plan --json`,
Expand Down
4 changes: 2 additions & 2 deletions test/commands/data/tree/dataTreeDeep.nut.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ describe('data:tree commands with more than 2 levels', () => {

it('should error with invalid soql', () => {
const result = execCmd(
`data:export:tree --query 'SELECT' --prefix INT --outputdir ${path.join('.', 'export_data')}`
`data:export:tree --query 'SELECT' --prefix INT --output-dir ${path.join('.', 'export_data')}`
);
const stdError = getString(result, 'shellOutput.stderr', '').toLowerCase();
const errorKeywords = ['malformed', 'check the soql', 'invalid soql query'];
Expand All @@ -54,7 +54,7 @@ describe('data:tree commands with more than 2 levels', () => {
});

execCmd(
`data:export:tree --query "${query}" --prefix INT --outputdir ${path.join('.', 'export_data')} --plan --json`,
`data:export:tree --query "${query}" --prefix INT --output-dir ${path.join('.', 'export_data')} --plan --json`,
{ ensureExitCode: 0 }
);

Expand Down
4 changes: 2 additions & 2 deletions test/commands/data/tree/dataTreeDeepBeta.nut.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ describe('data:tree beta commands with more than 2 levels', () => {

it('should error with invalid soql', () => {
const result = execCmd(
`data:export:tree --query 'SELECT' --prefix ${prefix} --outputdir ${path.join('.', 'export_data')}`
`data:export:tree --query 'SELECT' --prefix ${prefix} --output-dir ${path.join('.', 'export_data')}`
);
const stdError = getString(result, 'shellOutput.stderr', '').toLowerCase();
const errorKeywords = ['malformed', 'check the soql', 'invalid soql query'];
Expand All @@ -55,7 +55,7 @@ describe('data:tree beta commands with more than 2 levels', () => {
});

execCmd(
`data:export:tree --query "${query}" --prefix ${prefix} --outputdir ${path.join(
`data:export:tree --query "${query}" --prefix ${prefix} --output-dir ${path.join(
'.',
'export_data'
)} --plan --json`,
Expand Down
23 changes: 22 additions & 1 deletion test/commands/data/tree/dataTreeJunction.nut.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import path from 'node:path';
import { expect } from 'chai';
import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit';
import { ImportResult } from '../../../../src/api/data/tree/importTypes.js';
import { ExportReturnType } from '../../../../src/commands/data/export/tree.js';

describe('data:tree commands', () => {
let testSession: TestSession;
Expand Down Expand Up @@ -50,7 +51,7 @@ describe('data:tree commands', () => {
const query =
"select Id, Name, (Select Id, FirstName, LastName, (select AccountId, ContactId from AccountContactRoles) from Contacts), (select Id, ContactId, AccountId from AccountContactRelations where Account.Name != 'We Know Everybody') from Account where Name != 'Sample Account for Entitlements'";

execCmd(`data:export:tree --query "${query}" --outputdir ${path.join('.', 'export_data')} --plan --json`, {
execCmd(`data:export:tree --query "${query}" --output-dir ${path.join('.', 'export_data')} --plan --json`, {
ensureExitCode: 0,
});

Expand All @@ -67,4 +68,24 @@ describe('data:tree commands', () => {
);
expect(importResult.jsonOutput?.result.length).to.equal(12);
});

it('can export -> import junction with multiple queries', async () => {
const exportResult = execCmd<ExportReturnType>(
`data:export:tree --plan --output-dir ${path.join(
'.',
'junction'
)} --query "select AccountId, ContactId from AccountContactRole" --query "Select Id, AccountId, FirstName, LastName from Contact" --query "select Id, ContactId, AccountId from AccountContactRelation where Account.Name != 'We Know Everybody'" --query "select ID, Name from Account where Name != 'Sample Account for Entitlements'"`
);

expect(exportResult.shellOutput.stdout).to.include(
`records to ${path.join('.', 'junction', 'AccountContactRelation.json')}`
);
expect(exportResult.shellOutput.stdout).to.include(`records to ${path.join('junction', 'Account.json')}`);
expect(exportResult.shellOutput.stdout).to.include(`records to ${path.join('junction', 'Contact.json')}`);

execCmd<ImportResult[]>(
`data:import:tree --target-org importOrg --plan ${path.join('.', 'junction', 'plan.json')}`,
{ ensureExitCode: 0 }
);
});
});
2 changes: 1 addition & 1 deletion test/commands/data/tree/dataTreeMoreThan200.nut.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ describe('data:tree commands with more than 200 records are batches in safe grou
expect(importResult.jsonOutput?.result.length).to.equal(265, 'Expected 265 records to be imported');

execCmd(
`data:export:tree --query "${query}" --prefix ${prefix} --outputdir ${path.join(
`data:export:tree --query "${query}" --prefix ${prefix} --output-dir ${path.join(
'.',
'export_data'
)} --plan --json`,
Expand Down
2 changes: 1 addition & 1 deletion test/commands/data/tree/dataTreeSelfReferencing.nut.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ describe('data:tree commands with records that refer to other records of the sam
});

execCmd(
`data:export:tree --query "${query}" --prefix INT --outputdir ${path.join('.', 'export_data')} --plan --json`,
`data:export:tree --query "${query}" --prefix INT --output-dir ${path.join('.', 'export_data')} --plan --json`,
{ ensureExitCode: 0 }
);

Expand Down