Skip to content

Commit

Permalink
fix(PostgreSQL): issue exporting tables with primary keys
Browse files Browse the repository at this point in the history
  • Loading branch information
Fabio286 committed Jul 30, 2024
1 parent ba0ffcc commit c434855
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 151 deletions.
2 changes: 1 addition & 1 deletion src/main/ipc-handlers/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ export default (connections: Record<string, antares.Client>) => {
setTimeout(() => { // Ensures that writing thread has finished
exporter?.terminate();
exporter = null;
}, 2000);
}, 500);
resolve({ status: 'success', response: payload });
break;
case 'cancel':
Expand Down
80 changes: 40 additions & 40 deletions src/main/libs/clients/PostgreSQLClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -591,8 +591,8 @@ export class PostgreSQLClient extends BaseClient {
}
/* eslint-enable camelcase */

if (schema !== 'public')
await this.use(schema);
// if (schema !== 'public')
await this.use(schema);

const { rows } = await this.raw<antares.QueryResult<ShowIntexesResult>>(`WITH ndx_list AS (
SELECT pg_index.indexrelid, pg_class.oid
Expand Down Expand Up @@ -636,35 +636,7 @@ export class PostgreSQLClient extends BaseClient {
}, {} as {table: string; schema: string}[]);
}

// eslint-disable-next-line @typescript-eslint/no-unused-vars
async getTableDll ({ schema, table }: { schema: string; table: string }) {
// const { rows } = await this.raw<antares.QueryResult<{'ddl'?: string}>>(`
// SELECT
// 'CREATE TABLE ' || relname || E'\n(\n' ||
// array_to_string(
// array_agg(' ' || column_name || ' ' || type || ' '|| not_null)
// , E',\n'
// ) || E'\n);\n' AS ddl
// FROM (
// SELECT
// a.attname AS column_name
// , pg_catalog.format_type(a.atttypid, a.atttypmod) AS type
// , CASE WHEN a.attnotnull THEN 'NOT NULL' ELSE 'NULL' END AS not_null
// , c.relname
// FROM pg_attribute a, pg_class c, pg_type t
// WHERE a.attnum > 0
// AND a.attrelid = c.oid
// AND a.atttypid = t.oid
// AND c.relname = '${table}'
// ORDER BY a.attnum
// ) AS tabledefinition
// GROUP BY relname
// `);

// if (rows.length)
// return rows[0].ddl;
// else return '';

/* eslint-disable camelcase */
interface SequenceRecord {
sequence_catalog: string;
Expand Down Expand Up @@ -706,6 +678,34 @@ export class PostgreSQLClient extends BaseClient {

if (!rows.length) return '';

const indexes = await this.getTableIndexes({ schema, table });
const primaryKey = indexes
.filter(i => i.type === 'PRIMARY')
.reduce((acc, cur) => {
if (!Object.keys(acc).length) {
cur.column = `"${cur.column}"`;
acc = cur;
}
else
acc.column += `, "${cur.column}"`;
return acc;
}, {} as { name: string; column: string; type: string});

const remappedIndexes = indexes
.filter(i => i.type !== 'PRIMARY')
.reduce((acc, cur) => {
const existingIndex = acc.findIndex(i => i.name === cur.name);

if (existingIndex >= 0)
acc[existingIndex].column += `, "${cur.column}"`;
else {
cur.column = `"${cur.column}"`;
acc.push(cur);
}

return acc;
}, [] as { name: string; column: string; type: string}[]);

for (const column of rows) {
let fieldType = column.data_type;
if (fieldType === 'USER-DEFINED') fieldType = `"${schema}".${column.udt_name}`;
Expand Down Expand Up @@ -733,6 +733,9 @@ export class PostgreSQLClient extends BaseClient {
columnsSql.push(columnArr.join(' '));
}

if (primaryKey)
columnsSql.push(`CONSTRAINT "${primaryKey.name}" PRIMARY KEY (${primaryKey.column})`);

// Table sequences
for (let sequence of sequences) {
if (sequence.includes('.')) sequence = sequence.split('.')[1];
Expand All @@ -749,25 +752,22 @@ export class PostgreSQLClient extends BaseClient {
INCREMENT BY ${rows[0].increment}
MINVALUE ${rows[0].minimum_value}
MAXVALUE ${rows[0].maximum_value}
CACHE 1;\n`;
CACHE 1;\n\n`;
}
}

// Table create
createSql += `\nCREATE TABLE "${schema}"."${table}"(
createSql += `CREATE TABLE "${schema}"."${table}"(
${columnsSql.join(',\n ')}
);\n`;

// Table indexes
createSql += '\n';
const { rows: indexes } = await this.select('*')
.schema('pg_catalog')
.from('pg_indexes')
.where({ schemaname: `= '${schema}'`, tablename: `= '${table}'` })
.run<{indexdef: string}>();

for (const index of indexes)
createSql += `${index.indexdef};\n`;

for (const index of remappedIndexes) {
if (index.type !== 'PRIMARY')
createSql += `CREATE ${index.type}${index.type === 'UNIQUE' ? ' INDEX' : ''} "${index.name}" ON "${schema}"."${table}" (${index.column});\n`;
}

return createSql;
}
Expand Down
3 changes: 2 additions & 1 deletion src/main/libs/exporters/sql/MysqlExporter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,8 @@ CREATE TABLE \`${view.Name}\`(
const connection = await this._client.getConnection();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const stream = (connection as any).connection.query(sql).stream();
const dispose = () => connection.end();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const dispose = () => (connection as any).release();

stream.on('end', dispose);
stream.on('error', dispose);
Expand Down
110 changes: 1 addition & 109 deletions src/main/libs/exporters/sql/PostgreSQLExporter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,115 +39,7 @@ SET row_security = off;\n\n\n`;
}

async getCreateTable (tableName: string) {
/* eslint-disable camelcase */
interface SequenceRecord {
sequence_catalog: string;
sequence_schema: string;
sequence_name: string;
data_type: string;
numeric_precision: number;
numeric_precision_radix: number;
numeric_scale: number;
start_value: string;
minimum_value: string;
maximum_value: string;
increment: string;
cycle_option: string;
}
/* eslint-enable camelcase */

let createSql = '';
const sequences = [];
const columnsSql = [];
const arrayTypes: Record<string, string> = {
_int2: 'smallint',
_int4: 'integer',
_int8: 'bigint',
_float4: 'real',
_float8: 'double precision',
_char: '"char"',
_varchar: 'character varying'
};

// Table columns
const { rows } = await this._client.raw(`
SELECT *
FROM "information_schema"."columns"
WHERE "table_schema" = '${this.schemaName}'
AND "table_name" = '${tableName}'
ORDER BY "ordinal_position" ASC
`, { schema: 'information_schema' });

if (!rows.length) return '';

for (const column of rows) {
let fieldType = column.data_type;
if (fieldType === 'USER-DEFINED') fieldType = `"${this.schemaName}".${column.udt_name}`;
else if (fieldType === 'ARRAY') {
if (Object.keys(arrayTypes).includes(fieldType))
fieldType = arrayTypes[column.udt_name] + '[]';
else
fieldType = column.udt_name.replaceAll('_', '') + '[]';
}

const columnArr = [
`"${column.column_name}"`,
`${fieldType}${column.character_maximum_length ? `(${column.character_maximum_length})` : ''}`
];

if (column.column_default) {
columnArr.push(`DEFAULT ${column.column_default}`);
if (column.column_default.includes('nextval')) {
const sequenceName = column.column_default.split('\'')[1];
sequences.push(sequenceName);
}
}
if (column.is_nullable === 'NO') columnArr.push('NOT NULL');

columnsSql.push(columnArr.join(' '));
}

// Table sequences
for (let sequence of sequences) {
if (sequence.includes('.')) sequence = sequence.split('.')[1];

const { rows } = await this._client
.select('*')
.schema('information_schema')
.from('sequences')
.where({ sequence_schema: `= '${this.schemaName}'`, sequence_name: `= '${sequence}'` })
.run<SequenceRecord>();

if (rows.length) {
createSql += `CREATE SEQUENCE "${this.schemaName}"."${sequence}"
START WITH ${rows[0].start_value}
INCREMENT BY ${rows[0].increment}
MINVALUE ${rows[0].minimum_value}
MAXVALUE ${rows[0].maximum_value}
CACHE 1;\n`;

// createSql += `\nALTER TABLE "${sequence}" OWNER TO ${this._client._params.user};\n\n`;
}
}

// Table create
createSql += `\nCREATE TABLE "${this.schemaName}"."${tableName}"(
${columnsSql.join(',\n ')}
);\n`;

// createSql += `\nALTER TABLE "${tableName}" OWNER TO ${this._client._params.user};\n\n`;

// Table indexes
createSql += '\n';
const { rows: indexes } = await this._client
.select('*')
.schema('pg_catalog')
.from('pg_indexes')
.where({ schemaname: `= '${this.schemaName}'`, tablename: `= '${tableName}'` })
.run<{indexdef: string}>();

for (const index of indexes)
createSql += `${index.indexdef};\n`;
const createSql = await this._client.getTableDll({ schema: this.schemaName, table: tableName });

// Table foreigns
const { rows: foreigns } = await this._client.raw(`
Expand Down
1 change: 1 addition & 0 deletions src/renderer/components/ModalExportSchema.vue
Original file line number Diff line number Diff line change
Expand Up @@ -380,6 +380,7 @@ const startExport = async () => {
try {
const { status, response } = await Schema.export(params);
if (status === 'success')
progressStatus.value = response.cancelled ? t('general.aborted') : t('general.completed');
else {
Expand Down

0 comments on commit c434855

Please sign in to comment.