From 78b6fc3d92e47b9beb1c5c079a4d57212c932433 Mon Sep 17 00:00:00 2001 From: Paula Rinta-Harri Date: Wed, 29 Mar 2023 15:40:00 +0300 Subject: [PATCH 1/3] Revert changes after las release (#60) --- ...0308952_create-agegroup-completed-table.js | 45 ------- src/database/ageGroups.js | 49 -------- src/groups.js | 12 -- src/index.js | 40 ------- src/scripts/delete-activities-by-id.js | 35 ------ src/scripts/get-unused-activities.js | 106 ---------------- .../get_activity_data_with_correct_ids.js | 113 ------------------ src/scripts/migrate-activitydata.js | 92 -------------- src/scripts/migrate-activitygroupdata.js | 102 ---------------- 9 files changed, 594 deletions(-) delete mode 100644 migrations/1671010308952_create-agegroup-completed-table.js delete mode 100644 src/database/ageGroups.js delete mode 100644 src/scripts/delete-activities-by-id.js delete mode 100644 src/scripts/get-unused-activities.js delete mode 100644 src/scripts/get_activity_data_with_correct_ids.js delete mode 100644 src/scripts/migrate-activitydata.js delete mode 100644 src/scripts/migrate-activitygroupdata.js diff --git a/migrations/1671010308952_create-agegroup-completed-table.js b/migrations/1671010308952_create-agegroup-completed-table.js deleted file mode 100644 index cbaa38a..0000000 --- a/migrations/1671010308952_create-agegroup-completed-table.js +++ /dev/null @@ -1,45 +0,0 @@ -/* eslint-disable camelcase */ - -exports.shorthands = undefined - -exports.up = (pgm) => { - pgm.createType('agegroup_state', ['COMPLETED']) - pgm.createTable('completed_agegroup_entries', { - id: 'id', - user_guid: { - type: 'text', - notNull: true, - }, - created_at: { - type: 'timestamp', - default: pgm.func('NOW()'), - }, - created_by: { - type: 'text', - notNull: true, - }, - agegroup_guid: { - type: 'text', - notNull: true, - }, - completion_status: { - type: 'agegroup_state', - notNull: true, - }, - }) -} - -exports.down = async (pgm) => { - // Make sure we don't have any data we can lose in the table when doing a migrate down - const hasRows = - (await pgm.db.query('SELECT id FROM completed_agegroup_entries')).rows - .length > 0 - if (hasRows) { - throw new Error(` - Migrate down aborted - possible lose of data if completed_agegroup_entries table is dropped: \n - Make sure you've emptied the table before dropping. - `) - } - pgm.dropTable('completed_agegroup_entries') - pgm.dropType('agegroup_state') -} diff --git a/src/database/ageGroups.js b/src/database/ageGroups.js deleted file mode 100644 index b2d30c9..0000000 --- a/src/database/ageGroups.js +++ /dev/null @@ -1,49 +0,0 @@ -import { db } from './index' -import { createNotification } from './notifications' - -export async function postAgegroupEntry(ageGroupEntry) { - const { user_guid, created_by, agegroup_guid, completed, group_leader_name } = - ageGroupEntry - - try { - // Create an entry for the agegroup entry state change - const data = await db.one( - 'INSERT INTO completed_agegroup_entries(user_guid, created_by, agegroup_guid, completion_status) VALUES ($1, $2, $3, $4) RETURNING id', - [user_guid, created_by, agegroup_guid, completed] - ) - - const entry = await db.one( - 'SELECT agegroup_guid, completion_status FROM completed_agegroup_entries WHERE id = $1', - data.id - ) - - // Create a notification about the state change - const notification = await createNotification({ - itemGuid: agegroup_guid, - itemType: 'AGE_GROUP', - notificationType: completed, - userGuid: user_guid, - createdBy: created_by, - groupLeaderName: group_leader_name, - }) - if (!notification) { - throw new Error('Failed to create a notification.') - } - - return entry - } catch (error) { - console.error('post agegroup entry - error: ', error) - } -} - -export async function getAgeGroupEntries(user_guid) { - try { - const data = await db.any( - 'SELECT agegroup_guid, completion_status FROM completed_agegroup_entries WHERE user_guid = $1 ORDER BY created_at ASC', - user_guid.toString() - ) - return data - } catch (error) { - console.log('error', error) - } -} diff --git a/src/groups.js b/src/groups.js index 0257357..a6afed3 100644 --- a/src/groups.js +++ b/src/groups.js @@ -5,7 +5,6 @@ import { getGroupMembers, } from './kuksa' import { getTaskEntries, getTaskGroupEntries } from './database' -import { getAgeGroupEntries } from './database/ageGroups' async function getMemberData(groupMembers) { return Promise.all( @@ -14,9 +13,6 @@ async function getMemberData(groupMembers) { const allMemberTaskGroupEntries = await getTaskGroupEntries( groupMember.id.id ) - const allMemberAgeGroupEntries = await getAgeGroupEntries( - groupMember.id.id - ) const taskEntries = allMemberTaskEntries.reduce((acc, task) => { acc[task.task_guid] = task.completion_status return acc @@ -28,13 +24,6 @@ async function getMemberData(groupMembers) { }, {} ) - const ageGroupEntries = allMemberAgeGroupEntries.reduce( - (acc, agegroup) => { - acc[agegroup.agegroup_guid] = agegroup.completion_status - return acc - }, - {} - ) return Object.assign( {}, { @@ -44,7 +33,6 @@ async function getMemberData(groupMembers) { groupMember.name.firstname + ' ' + groupMember.name.lastname, memberTasks: taskEntries, memberTaskGroups: taskgroupEntries, - memberAgeGroups: ageGroupEntries, } ) }) diff --git a/src/index.js b/src/index.js index 0146367..dfdfbd5 100644 --- a/src/index.js +++ b/src/index.js @@ -25,7 +25,6 @@ import taskGroups from './taskGroups' import { deleteOldNotifications } from './database/notifications' import https from 'https' import fs from 'fs' -import { postAgegroupEntry, getAgeGroupEntries } from './database/ageGroups' require('dotenv').config() @@ -160,45 +159,6 @@ const main = async () => { }) }) - app.post( - '/groups/mark-agegroup-done/:agegroup_guid', - isLoggedIn, - isGroupLeader, - async (req, res) => { - try { - const userData = req.body - const promises = Object.values(userData.groups).map((userIds) => { - const promises = userIds.map((user_guid) => - Promise.resolve( - postAgegroupEntry({ - user_guid: Number(user_guid), - created_by: Number(req.user.membernumber), - agegroup_guid: req.params.agegroup_guid, - completed: 'COMPLETED', - group_leader_name: userData.group_leader_name, - }) - ) - ) - return promises - }) - const iterablePromises = [].concat.apply([], promises) - const entries = await Promise.all(iterablePromises) - res.json(entries).status(200) - } catch (e) { - res.status(e.statusCode).send(e.message) - } - } - ) - - app.get('/agegroup-entries', isLoggedIn, async (req, res) => { - try { - const entries = await getAgeGroupEntries(req.user.membernumber) - res.json(entries).status(200) - } catch (e) { - res.status(e.statusCode).send(e.message) - } - }) - app.post('/task-entry', isLoggedIn, async (req, res) => { try { const data = req.body diff --git a/src/scripts/delete-activities-by-id.js b/src/scripts/delete-activities-by-id.js deleted file mode 100644 index 733fa62..0000000 --- a/src/scripts/delete-activities-by-id.js +++ /dev/null @@ -1,35 +0,0 @@ -const fs = require('fs') -const path = require('path') -require('dotenv').config() -const pgp = require('pg-promise')() - -const db = pgp(process.env.DATABASE_URL) -const unusedActivities = require('./get-unused-activities') - -async function deleteTaskEntriesById(task_guid) { - try { - const data = await db.any( - 'DELETE FROM task_entries WHERE task_guid = $1 RETURNING task_guid', - [task_guid.toString()] - ) - - console.log('entries successfully deleted', data) - return data - } catch (error) { - console.log('delete taskentries - error', error) - } -} - -async function main() { - const oldIds = unusedActivities.main() - const resolvedIds = await oldIds - resolvedIds.forEach((task_guid) => { - try { - return deleteTaskEntriesById(task_guid) - } catch (error) { - console.log('error while migrating', error) - } - }) -} - -main() diff --git a/src/scripts/get-unused-activities.js b/src/scripts/get-unused-activities.js deleted file mode 100644 index eb64646..0000000 --- a/src/scripts/get-unused-activities.js +++ /dev/null @@ -1,106 +0,0 @@ -require('dotenv').config() -const fs = require('fs') -var path = require('path') -var request = require('request-promise') - -const DBURL = process.env.POF_BACKEND_STAGING -async function fetchActivitiesFromStrapi() { - try { - const countRes = await request(`${DBURL}/activities/count?_locale=fi`) - const activities = await request(`${DBURL}/activities?_limit=${countRes}`) - - return activities - } catch (e) { - console.log(`Error getting activities: ${e}`) - return null - } -} - -const sortArraysAscending = (array) => { - return array.sort(function (a, b) { - return a - b - }) -} - -const uniqueValues = (value, index, self) => { - return self.indexOf(value) === index -} - -function writeUnusedActivitiesToTxtFile(ids) { - const writeStream = fs.createWriteStream('unusedActivityIds.txt') - const pathName = writeStream.path - - // write each value of the array on the file breaking line - ids.forEach((value) => writeStream.write(`${value}\n`)) - - writeStream.on('finish', () => { - console.log(`wrote all the array data to file ${pathName}`) - }) - writeStream.on('error', (err) => { - console.error(`There is an error writing the file ${pathName} => ${err}`) - }) - writeStream.end() -} - -async function main() { - // Change filename below to match the csv-file with migration activities - const filePath = path.join(__dirname, './aktiviteetti_aa.csv') - // Read CSV - let file = fs.readFileSync(filePath, { encoding: 'utf-8' }, function (err) { - console.log(err) - }) - - // Split on row - file = file.split('\n') - - // Get first row for column headers - headers = file.shift().split(',') - - let json = [] - file.forEach(function (row) { - // Loop through each row - tmp = {} - row = row.split(',') - for (let i = 0; i < headers.length; i++) { - tmp[headers[i]] = row[i] - } - // Add object to list - json.push(tmp) - }) - - const activityIdsFromKuksa = json.map((row) => { - return row.activities_Partioaktiviteetti_Yhdistä1_aktiviteetti_View_id - }) - - const uniqueIdValuesInOrder = sortArraysAscending( - activityIdsFromKuksa.filter(uniqueValues) - ) - - const activityidsFromStrapiPromise = fetchActivitiesFromStrapi().then( - function (activities) { - const activitiesJson = JSON.parse(activities) - const ids = activitiesJson.map((activity) => { - return activity.id.toString() - }) - return sortArraysAscending(ids) - } - ) - - const activityIdsFromStrapi = await Promise.resolve( - activityidsFromStrapiPromise - ) - - const oldIdsFromKuksa = uniqueIdValuesInOrder.filter( - (x) => !activityIdsFromStrapi.includes(x) - ) - - if (oldIdsFromKuksa.length) { - writeUnusedActivitiesToTxtFile(oldIdsFromKuksa) - return oldIdsFromKuksa - } else { - console.log('No old ids') - } -} - -main() -module.exports = { main } diff --git a/src/scripts/get_activity_data_with_correct_ids.js b/src/scripts/get_activity_data_with_correct_ids.js deleted file mode 100644 index caa20a6..0000000 --- a/src/scripts/get_activity_data_with_correct_ids.js +++ /dev/null @@ -1,113 +0,0 @@ -// Run script with command: node get_activity_data_with_correct_ids.js FILEPATH activity/activitygroup - -require('dotenv').config({ path: '../../.env' }) -const fs = require('fs') -var path = require('path') -var request = require('request-promise') - -const appArgs2 = process.argv.slice(2) -const fileName = appArgs2[0] - -const appArgs3 = process.argv.slice(3) -const dataType = appArgs3[0] - -const DBURL = process.env.POF_BACKEND_PRODUCTION - -var strapiUrl = '' -var idColumnName = '' - -if (dataType == 'activity') { - strapiUrl = 'activities' - idColumnName = 'task_guid' -} else { - strapiUrl = 'activity-groups' - idColumnName = 'taskgroup_guid' -} - -// Fetch all activities from POF -async function fetchActivitiesFromStrapi() { - try { - const countRes = await request(`${DBURL}/${strapiUrl}/count?_locale=fi`) - const activities = await request(`${DBURL}/${strapiUrl}?_limit=${countRes}`) - return activities - } catch (e) { - console.log(`Error getting activities: ${e}`) - return null - } -} - -async function main() { - const activityidsFromStrapiPromise = fetchActivitiesFromStrapi().then( - function (activities) { - const activitiesJson = JSON.parse(activities) - return activitiesJson - } - ) - - const activityIdsFromStrapi = await Promise.resolve( - activityidsFromStrapiPromise - ) - - const activitiesJsonStrapio = activityIdsFromStrapi - console.log('Activities retrieved from pof') - - // Read CSV - const filePath = path.join(fileName) - let file = fs.readFileSync(filePath, { encoding: 'utf-8' }, function (err) { - console.log(err) - }) - - // Split on row - file = file.split('\n') - // Get first row for column headers - headers = file.shift().split(',') - let json = [] - console.log('Comparing csv file data and pof data') - file.forEach(function (row) { - // Loop through each row - rowJson = {} - row = row.split(',') - for (var i = 0; i < headers.length; i++) { - rowJson[headers[i]] = row[i] - } - - // Finf all wp_guid id's - if (rowJson[idColumnName].length > 7) { - rowJson[idColumnName] - for (var i = 0; i < activitiesJsonStrapio.length; i++) { - // Compare POF activity wp_guid to csv file task_guid id and if it is the same, replace task_guid with the correct id from POF - if (activitiesJsonStrapio[i].wp_guid == rowJson[idColumnName]) { - rowJson[idColumnName] = activitiesJsonStrapio[i].id - } - } - } - json.push(rowJson) - }) - convertJsonToCsv(json) -} - -// Convert corrected datat to csv and write it to file -function convertJsonToCsv(json) { - console.log('Creating CSV file') - var fields = Object.keys(json[0]) - var replacer = function (key, value) { - return value === null ? '' : value - } - var csv = json.map(function (row) { - return fields - .map(function (fieldName) { - return JSON.stringify(row[fieldName], replacer) - }) - .join(',') - }) - csv.unshift(fields.join(',')) - csv = csv.join('\r\n') - - fs.writeFile(`${strapiUrl}_data_281122.csv`, csv, (err) => { - if (err) console.error(err) - else console.log('New csv file created!') - }) - return csv -} - -main() diff --git a/src/scripts/migrate-activitydata.js b/src/scripts/migrate-activitydata.js deleted file mode 100644 index da92a1b..0000000 --- a/src/scripts/migrate-activitydata.js +++ /dev/null @@ -1,92 +0,0 @@ -const fs = require('fs') -var path = require('path') -require('dotenv').config() -const pgp = require('pg-promise')() - -const db = pgp(process.env.DATABASE_URL) - -// Give csv-file as an argument for script -const appArgs = process.argv.slice(2) -const fileName = appArgs[0] - -async function main() { - const filePath = path.join(__dirname, fileName) - // Read CSV - let file = fs.readFileSync(filePath, { encoding: 'utf-8' }, function (err) { - console.log(err) - }) - - // Split on row - file = file.split('\n') - - // Get first row for column headers - headers = file.shift().split(',') - - let json = [] - file.forEach(function (row) { - // Loop through each row - rowJson = {} - row = row.split(',') - for (var i = 0; i < headers.length; i++) { - rowJson[headers[i]] = row[i] - } - // Add object to list - json.push(rowJson) - }) - - const cs = new pgp.helpers.ColumnSet( - ['user_guid', 'created_at', 'created_by', 'task_guid', 'completion_status'], - { table: 'task_entries' } - ) - - function getNextData(t, pageIndex) { - let data = null - data = [] - let lowerLimit = 5000 * pageIndex - let upperLimit = lowerLimit + 5000 - if (upperLimit > json.length) { - upperLimit = json.length - } - - return new Promise((resolve, reject) => { - for (lowerLimit; lowerLimit < upperLimit; lowerLimit++) { - let entry = json[lowerLimit] - data.push({ - user_guid: entry.TAHTahoId, - created_at: entry.TPALuotu, - created_by: entry.TPALuoja, - task_guid: - entry.activities_Partioaktiviteetti_Yhdistä1_aktiviteetti_View_id, - completion_status: 'COMPLETED', - }) - } - if (data.length === 0) { - resolve(undefined) - } else { - resolve(data) - } - }) - } - - db.tx('massive-insert', (t) => { - const processData = (json) => { - if (json) { - const insert = pgp.helpers.insert(json, cs) - return t.none(insert) - } - } - return t.sequence((index) => getNextData(t, index).then(processData), { - track: true, - }) - }) - .then((data) => { - // COMMIT has been executed - console.log('Total batches:', data.total, ', Duration:', data.duration) - }) - .catch((error) => { - // ROLLBACK has been executed - console.log(error) - }) -} - -main() diff --git a/src/scripts/migrate-activitygroupdata.js b/src/scripts/migrate-activitygroupdata.js deleted file mode 100644 index 1bcc391..0000000 --- a/src/scripts/migrate-activitygroupdata.js +++ /dev/null @@ -1,102 +0,0 @@ -const fs = require('fs') -var path = require('path') -require('dotenv').config() -const pgp = require('pg-promise')() - -const db = pgp(process.env.DATABASE_URL) - -// Give csv-file as an argument for script -const appArgs = process.argv.slice(2) -const fileName = appArgs[0] - -async function main() { - const filePath = path.join(__dirname, fileName) - // Read CSV - let file = fs.readFileSync(filePath, { encoding: 'utf-8' }, function (err) { - console.log(err) - }) - - console.log(fileName) - // Split on row - file = file.split('\n') - - // Get first row for column headers - headers = file.shift().split(';') - - console.log() - let json = [] - file.forEach(function (row) { - // Loop through each row - rowJson = {} - row = row.split(';') - for (var i = 0; i < headers.length; i++) { - rowJson[headers[i]] = row[i] - } - // Add object to list - json.push(rowJson) - }) - - const cs = new pgp.helpers.ColumnSet( - ['user_guid', 'created_at', 'created_by', 'taskgroup_guid', 'completed'], - { table: 'task_group_entries' } - ) - - function getNextData(t, pageIndex) { - let data = null - data = [] - let lowerLimit = 5000 * pageIndex - let upperLimit = lowerLimit + 5000 - if (upperLimit > json.length) { - upperLimit = json.length - } - - return new Promise((resolve, reject) => { - for (lowerLimit; lowerLimit < upperLimit; lowerLimit++) { - let entry = json[lowerLimit] - if (!entry.TMELuoja) { - console.log(entry) - } - const [date, time] = entry.TMELuotu.split(' ') - const [day, month, year] = date.split('.') - const [hours, minutes] = time.split(':') - const createdAtDate = new Date(year, month - 1, day, hours, minutes) - const createdBy = entry.TMELuoja.split(' ')[1] - data.push({ - user_guid: entry.TAHTahoId, - created_at: createdAtDate, - created_by: createdBy, - taskgroup_guid: entry.activity_group, - completed: 'COMPLETED', - }) - } - // console.log(data) - if (data.length === 0) { - resolve(undefined) - } else { - resolve(data) - } - }) - } - - db.tx('massive-insert', (t) => { - const processData = (json) => { - if (json) { - const insert = pgp.helpers.insert(json, cs) - return t.none(insert) - } - } - return t.sequence((index) => getNextData(t, index).then(processData), { - track: true, - }) - }) - .then((data) => { - // COMMIT has been executed - console.log('Total batches:', data.total, ', Duration:', data.duration) - }) - .catch((error) => { - // ROLLBACK has been executed - console.log('Error in massive-insert', error) - }) -} - -main() From 6c003cb25f5b978f3533a6c542a6c4703a1ef4d8 Mon Sep 17 00:00:00 2001 From: Paula Rinta-Harri Date: Wed, 29 Mar 2023 16:10:53 +0300 Subject: [PATCH 2/3] Fix Kuksa parameters to match with new response (#61) --- src/auth.js | 4 ++-- src/groups.js | 8 ++++---- src/profile.js | 8 ++++---- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/auth.js b/src/auth.js index 7dd1135..47c5e7e 100644 --- a/src/auth.js +++ b/src/auth.js @@ -54,8 +54,8 @@ module.exports.configurePassport = async (clientUrl) => { ) let ageGroup = 4 //Sudenpennut - if (memberData.age_groupId !== null) { - ageGroup = memberData.age_groupId + if (memberData.ageGroupId !== null) { + ageGroup = memberData.ageGroupId } scout.canMarkDone = !restrictedAgeGroups.includes(ageGroup) diff --git a/src/groups.js b/src/groups.js index a6afed3..8bb2bf8 100644 --- a/src/groups.js +++ b/src/groups.js @@ -28,7 +28,7 @@ async function getMemberData(groupMembers) { {}, { memberId: groupMember.id.id, - isGroupLeader: groupMember.is_leader, + isGroupLeader: groupMember.isLeader, memberName: groupMember.name.firstname + ' ' + groupMember.name.lastname, memberTasks: taskEntries, @@ -60,17 +60,17 @@ async function getAllGroups(userNumber) { const memberData = await getMemberData(groupMembers) const ageGroupId = (memberData[0] && - (await getMember(memberData[0].memberId)).age_groupId) || + (await getMember(memberData[0].memberId)).ageGroupId) || 4 return Object.assign( {}, { id: group.id, name: group.name, - ageGroup: groupInfo.age_groups[0], + ageGroup: groupInfo.ageGroups[0], ageGroupId, troop: groupInfo.troops.filter( - (troop) => troop.id === member.default_troop_id + (troop) => troop.id === member.defaultTroopId ), members: memberData, } diff --git a/src/profile.js b/src/profile.js index d2d6350..6e548fc 100644 --- a/src/profile.js +++ b/src/profile.js @@ -8,12 +8,12 @@ export async function getProfile(userNumber) { {}, { name: `${member.name.firstname} ${member.name.lastname}`, - defaultTroopId: member.default_troop_id, - ageGroupId: member.age_groupId || 4, - ageGroup: member.age_group || 'Sudenpennut', + defaultTroopId: member.defaultTroopId, + ageGroupId: member.ageGroupId || 4, + ageGroup: member.ageGroup || 'Sudenpennut', language: 'fi', troops: member.troops, - isLeader: member.is_leader || false, + isLeader: member.isLeader || false, image: memberImage, } ) From 42c44b807455085d606f5d1d24122c5ef78ba9da Mon Sep 17 00:00:00 2001 From: Paula Rinta-Harri Date: Fri, 31 Mar 2023 12:47:22 +0300 Subject: [PATCH 3/3] Undo revert (#62) --- ...0308952_create-agegroup-completed-table.js | 45 +++++++ src/database/ageGroups.js | 49 ++++++++ src/groups.js | 12 ++ src/index.js | 40 +++++++ src/scripts/delete-activities-by-id.js | 35 ++++++ src/scripts/get-unused-activities.js | 106 ++++++++++++++++ .../get_activity_data_with_correct_ids.js | 113 ++++++++++++++++++ src/scripts/migrate-activitydata.js | 92 ++++++++++++++ src/scripts/migrate-activitygroupdata.js | 102 ++++++++++++++++ 9 files changed, 594 insertions(+) create mode 100644 migrations/1671010308952_create-agegroup-completed-table.js create mode 100644 src/database/ageGroups.js create mode 100644 src/scripts/delete-activities-by-id.js create mode 100644 src/scripts/get-unused-activities.js create mode 100644 src/scripts/get_activity_data_with_correct_ids.js create mode 100644 src/scripts/migrate-activitydata.js create mode 100644 src/scripts/migrate-activitygroupdata.js diff --git a/migrations/1671010308952_create-agegroup-completed-table.js b/migrations/1671010308952_create-agegroup-completed-table.js new file mode 100644 index 0000000..cbaa38a --- /dev/null +++ b/migrations/1671010308952_create-agegroup-completed-table.js @@ -0,0 +1,45 @@ +/* eslint-disable camelcase */ + +exports.shorthands = undefined + +exports.up = (pgm) => { + pgm.createType('agegroup_state', ['COMPLETED']) + pgm.createTable('completed_agegroup_entries', { + id: 'id', + user_guid: { + type: 'text', + notNull: true, + }, + created_at: { + type: 'timestamp', + default: pgm.func('NOW()'), + }, + created_by: { + type: 'text', + notNull: true, + }, + agegroup_guid: { + type: 'text', + notNull: true, + }, + completion_status: { + type: 'agegroup_state', + notNull: true, + }, + }) +} + +exports.down = async (pgm) => { + // Make sure we don't have any data we can lose in the table when doing a migrate down + const hasRows = + (await pgm.db.query('SELECT id FROM completed_agegroup_entries')).rows + .length > 0 + if (hasRows) { + throw new Error(` + Migrate down aborted - possible lose of data if completed_agegroup_entries table is dropped: \n + Make sure you've emptied the table before dropping. + `) + } + pgm.dropTable('completed_agegroup_entries') + pgm.dropType('agegroup_state') +} diff --git a/src/database/ageGroups.js b/src/database/ageGroups.js new file mode 100644 index 0000000..b2d30c9 --- /dev/null +++ b/src/database/ageGroups.js @@ -0,0 +1,49 @@ +import { db } from './index' +import { createNotification } from './notifications' + +export async function postAgegroupEntry(ageGroupEntry) { + const { user_guid, created_by, agegroup_guid, completed, group_leader_name } = + ageGroupEntry + + try { + // Create an entry for the agegroup entry state change + const data = await db.one( + 'INSERT INTO completed_agegroup_entries(user_guid, created_by, agegroup_guid, completion_status) VALUES ($1, $2, $3, $4) RETURNING id', + [user_guid, created_by, agegroup_guid, completed] + ) + + const entry = await db.one( + 'SELECT agegroup_guid, completion_status FROM completed_agegroup_entries WHERE id = $1', + data.id + ) + + // Create a notification about the state change + const notification = await createNotification({ + itemGuid: agegroup_guid, + itemType: 'AGE_GROUP', + notificationType: completed, + userGuid: user_guid, + createdBy: created_by, + groupLeaderName: group_leader_name, + }) + if (!notification) { + throw new Error('Failed to create a notification.') + } + + return entry + } catch (error) { + console.error('post agegroup entry - error: ', error) + } +} + +export async function getAgeGroupEntries(user_guid) { + try { + const data = await db.any( + 'SELECT agegroup_guid, completion_status FROM completed_agegroup_entries WHERE user_guid = $1 ORDER BY created_at ASC', + user_guid.toString() + ) + return data + } catch (error) { + console.log('error', error) + } +} diff --git a/src/groups.js b/src/groups.js index 8bb2bf8..9b358ff 100644 --- a/src/groups.js +++ b/src/groups.js @@ -5,6 +5,7 @@ import { getGroupMembers, } from './kuksa' import { getTaskEntries, getTaskGroupEntries } from './database' +import { getAgeGroupEntries } from './database/ageGroups' async function getMemberData(groupMembers) { return Promise.all( @@ -13,6 +14,9 @@ async function getMemberData(groupMembers) { const allMemberTaskGroupEntries = await getTaskGroupEntries( groupMember.id.id ) + const allMemberAgeGroupEntries = await getAgeGroupEntries( + groupMember.id.id + ) const taskEntries = allMemberTaskEntries.reduce((acc, task) => { acc[task.task_guid] = task.completion_status return acc @@ -24,6 +28,13 @@ async function getMemberData(groupMembers) { }, {} ) + const ageGroupEntries = allMemberAgeGroupEntries.reduce( + (acc, agegroup) => { + acc[agegroup.agegroup_guid] = agegroup.completion_status + return acc + }, + {} + ) return Object.assign( {}, { @@ -33,6 +44,7 @@ async function getMemberData(groupMembers) { groupMember.name.firstname + ' ' + groupMember.name.lastname, memberTasks: taskEntries, memberTaskGroups: taskgroupEntries, + memberAgeGroups: ageGroupEntries, } ) }) diff --git a/src/index.js b/src/index.js index dfdfbd5..0146367 100644 --- a/src/index.js +++ b/src/index.js @@ -25,6 +25,7 @@ import taskGroups from './taskGroups' import { deleteOldNotifications } from './database/notifications' import https from 'https' import fs from 'fs' +import { postAgegroupEntry, getAgeGroupEntries } from './database/ageGroups' require('dotenv').config() @@ -159,6 +160,45 @@ const main = async () => { }) }) + app.post( + '/groups/mark-agegroup-done/:agegroup_guid', + isLoggedIn, + isGroupLeader, + async (req, res) => { + try { + const userData = req.body + const promises = Object.values(userData.groups).map((userIds) => { + const promises = userIds.map((user_guid) => + Promise.resolve( + postAgegroupEntry({ + user_guid: Number(user_guid), + created_by: Number(req.user.membernumber), + agegroup_guid: req.params.agegroup_guid, + completed: 'COMPLETED', + group_leader_name: userData.group_leader_name, + }) + ) + ) + return promises + }) + const iterablePromises = [].concat.apply([], promises) + const entries = await Promise.all(iterablePromises) + res.json(entries).status(200) + } catch (e) { + res.status(e.statusCode).send(e.message) + } + } + ) + + app.get('/agegroup-entries', isLoggedIn, async (req, res) => { + try { + const entries = await getAgeGroupEntries(req.user.membernumber) + res.json(entries).status(200) + } catch (e) { + res.status(e.statusCode).send(e.message) + } + }) + app.post('/task-entry', isLoggedIn, async (req, res) => { try { const data = req.body diff --git a/src/scripts/delete-activities-by-id.js b/src/scripts/delete-activities-by-id.js new file mode 100644 index 0000000..733fa62 --- /dev/null +++ b/src/scripts/delete-activities-by-id.js @@ -0,0 +1,35 @@ +const fs = require('fs') +const path = require('path') +require('dotenv').config() +const pgp = require('pg-promise')() + +const db = pgp(process.env.DATABASE_URL) +const unusedActivities = require('./get-unused-activities') + +async function deleteTaskEntriesById(task_guid) { + try { + const data = await db.any( + 'DELETE FROM task_entries WHERE task_guid = $1 RETURNING task_guid', + [task_guid.toString()] + ) + + console.log('entries successfully deleted', data) + return data + } catch (error) { + console.log('delete taskentries - error', error) + } +} + +async function main() { + const oldIds = unusedActivities.main() + const resolvedIds = await oldIds + resolvedIds.forEach((task_guid) => { + try { + return deleteTaskEntriesById(task_guid) + } catch (error) { + console.log('error while migrating', error) + } + }) +} + +main() diff --git a/src/scripts/get-unused-activities.js b/src/scripts/get-unused-activities.js new file mode 100644 index 0000000..eb64646 --- /dev/null +++ b/src/scripts/get-unused-activities.js @@ -0,0 +1,106 @@ +require('dotenv').config() +const fs = require('fs') +var path = require('path') +var request = require('request-promise') + +const DBURL = process.env.POF_BACKEND_STAGING +async function fetchActivitiesFromStrapi() { + try { + const countRes = await request(`${DBURL}/activities/count?_locale=fi`) + const activities = await request(`${DBURL}/activities?_limit=${countRes}`) + + return activities + } catch (e) { + console.log(`Error getting activities: ${e}`) + return null + } +} + +const sortArraysAscending = (array) => { + return array.sort(function (a, b) { + return a - b + }) +} + +const uniqueValues = (value, index, self) => { + return self.indexOf(value) === index +} + +function writeUnusedActivitiesToTxtFile(ids) { + const writeStream = fs.createWriteStream('unusedActivityIds.txt') + const pathName = writeStream.path + + // write each value of the array on the file breaking line + ids.forEach((value) => writeStream.write(`${value}\n`)) + + writeStream.on('finish', () => { + console.log(`wrote all the array data to file ${pathName}`) + }) + writeStream.on('error', (err) => { + console.error(`There is an error writing the file ${pathName} => ${err}`) + }) + writeStream.end() +} + +async function main() { + // Change filename below to match the csv-file with migration activities + const filePath = path.join(__dirname, './aktiviteetti_aa.csv') + // Read CSV + let file = fs.readFileSync(filePath, { encoding: 'utf-8' }, function (err) { + console.log(err) + }) + + // Split on row + file = file.split('\n') + + // Get first row for column headers + headers = file.shift().split(',') + + let json = [] + file.forEach(function (row) { + // Loop through each row + tmp = {} + row = row.split(',') + for (let i = 0; i < headers.length; i++) { + tmp[headers[i]] = row[i] + } + // Add object to list + json.push(tmp) + }) + + const activityIdsFromKuksa = json.map((row) => { + return row.activities_Partioaktiviteetti_Yhdistä1_aktiviteetti_View_id + }) + + const uniqueIdValuesInOrder = sortArraysAscending( + activityIdsFromKuksa.filter(uniqueValues) + ) + + const activityidsFromStrapiPromise = fetchActivitiesFromStrapi().then( + function (activities) { + const activitiesJson = JSON.parse(activities) + const ids = activitiesJson.map((activity) => { + return activity.id.toString() + }) + return sortArraysAscending(ids) + } + ) + + const activityIdsFromStrapi = await Promise.resolve( + activityidsFromStrapiPromise + ) + + const oldIdsFromKuksa = uniqueIdValuesInOrder.filter( + (x) => !activityIdsFromStrapi.includes(x) + ) + + if (oldIdsFromKuksa.length) { + writeUnusedActivitiesToTxtFile(oldIdsFromKuksa) + return oldIdsFromKuksa + } else { + console.log('No old ids') + } +} + +main() +module.exports = { main } diff --git a/src/scripts/get_activity_data_with_correct_ids.js b/src/scripts/get_activity_data_with_correct_ids.js new file mode 100644 index 0000000..caa20a6 --- /dev/null +++ b/src/scripts/get_activity_data_with_correct_ids.js @@ -0,0 +1,113 @@ +// Run script with command: node get_activity_data_with_correct_ids.js FILEPATH activity/activitygroup + +require('dotenv').config({ path: '../../.env' }) +const fs = require('fs') +var path = require('path') +var request = require('request-promise') + +const appArgs2 = process.argv.slice(2) +const fileName = appArgs2[0] + +const appArgs3 = process.argv.slice(3) +const dataType = appArgs3[0] + +const DBURL = process.env.POF_BACKEND_PRODUCTION + +var strapiUrl = '' +var idColumnName = '' + +if (dataType == 'activity') { + strapiUrl = 'activities' + idColumnName = 'task_guid' +} else { + strapiUrl = 'activity-groups' + idColumnName = 'taskgroup_guid' +} + +// Fetch all activities from POF +async function fetchActivitiesFromStrapi() { + try { + const countRes = await request(`${DBURL}/${strapiUrl}/count?_locale=fi`) + const activities = await request(`${DBURL}/${strapiUrl}?_limit=${countRes}`) + return activities + } catch (e) { + console.log(`Error getting activities: ${e}`) + return null + } +} + +async function main() { + const activityidsFromStrapiPromise = fetchActivitiesFromStrapi().then( + function (activities) { + const activitiesJson = JSON.parse(activities) + return activitiesJson + } + ) + + const activityIdsFromStrapi = await Promise.resolve( + activityidsFromStrapiPromise + ) + + const activitiesJsonStrapio = activityIdsFromStrapi + console.log('Activities retrieved from pof') + + // Read CSV + const filePath = path.join(fileName) + let file = fs.readFileSync(filePath, { encoding: 'utf-8' }, function (err) { + console.log(err) + }) + + // Split on row + file = file.split('\n') + // Get first row for column headers + headers = file.shift().split(',') + let json = [] + console.log('Comparing csv file data and pof data') + file.forEach(function (row) { + // Loop through each row + rowJson = {} + row = row.split(',') + for (var i = 0; i < headers.length; i++) { + rowJson[headers[i]] = row[i] + } + + // Finf all wp_guid id's + if (rowJson[idColumnName].length > 7) { + rowJson[idColumnName] + for (var i = 0; i < activitiesJsonStrapio.length; i++) { + // Compare POF activity wp_guid to csv file task_guid id and if it is the same, replace task_guid with the correct id from POF + if (activitiesJsonStrapio[i].wp_guid == rowJson[idColumnName]) { + rowJson[idColumnName] = activitiesJsonStrapio[i].id + } + } + } + json.push(rowJson) + }) + convertJsonToCsv(json) +} + +// Convert corrected datat to csv and write it to file +function convertJsonToCsv(json) { + console.log('Creating CSV file') + var fields = Object.keys(json[0]) + var replacer = function (key, value) { + return value === null ? '' : value + } + var csv = json.map(function (row) { + return fields + .map(function (fieldName) { + return JSON.stringify(row[fieldName], replacer) + }) + .join(',') + }) + csv.unshift(fields.join(',')) + csv = csv.join('\r\n') + + fs.writeFile(`${strapiUrl}_data_281122.csv`, csv, (err) => { + if (err) console.error(err) + else console.log('New csv file created!') + }) + return csv +} + +main() diff --git a/src/scripts/migrate-activitydata.js b/src/scripts/migrate-activitydata.js new file mode 100644 index 0000000..da92a1b --- /dev/null +++ b/src/scripts/migrate-activitydata.js @@ -0,0 +1,92 @@ +const fs = require('fs') +var path = require('path') +require('dotenv').config() +const pgp = require('pg-promise')() + +const db = pgp(process.env.DATABASE_URL) + +// Give csv-file as an argument for script +const appArgs = process.argv.slice(2) +const fileName = appArgs[0] + +async function main() { + const filePath = path.join(__dirname, fileName) + // Read CSV + let file = fs.readFileSync(filePath, { encoding: 'utf-8' }, function (err) { + console.log(err) + }) + + // Split on row + file = file.split('\n') + + // Get first row for column headers + headers = file.shift().split(',') + + let json = [] + file.forEach(function (row) { + // Loop through each row + rowJson = {} + row = row.split(',') + for (var i = 0; i < headers.length; i++) { + rowJson[headers[i]] = row[i] + } + // Add object to list + json.push(rowJson) + }) + + const cs = new pgp.helpers.ColumnSet( + ['user_guid', 'created_at', 'created_by', 'task_guid', 'completion_status'], + { table: 'task_entries' } + ) + + function getNextData(t, pageIndex) { + let data = null + data = [] + let lowerLimit = 5000 * pageIndex + let upperLimit = lowerLimit + 5000 + if (upperLimit > json.length) { + upperLimit = json.length + } + + return new Promise((resolve, reject) => { + for (lowerLimit; lowerLimit < upperLimit; lowerLimit++) { + let entry = json[lowerLimit] + data.push({ + user_guid: entry.TAHTahoId, + created_at: entry.TPALuotu, + created_by: entry.TPALuoja, + task_guid: + entry.activities_Partioaktiviteetti_Yhdistä1_aktiviteetti_View_id, + completion_status: 'COMPLETED', + }) + } + if (data.length === 0) { + resolve(undefined) + } else { + resolve(data) + } + }) + } + + db.tx('massive-insert', (t) => { + const processData = (json) => { + if (json) { + const insert = pgp.helpers.insert(json, cs) + return t.none(insert) + } + } + return t.sequence((index) => getNextData(t, index).then(processData), { + track: true, + }) + }) + .then((data) => { + // COMMIT has been executed + console.log('Total batches:', data.total, ', Duration:', data.duration) + }) + .catch((error) => { + // ROLLBACK has been executed + console.log(error) + }) +} + +main() diff --git a/src/scripts/migrate-activitygroupdata.js b/src/scripts/migrate-activitygroupdata.js new file mode 100644 index 0000000..1bcc391 --- /dev/null +++ b/src/scripts/migrate-activitygroupdata.js @@ -0,0 +1,102 @@ +const fs = require('fs') +var path = require('path') +require('dotenv').config() +const pgp = require('pg-promise')() + +const db = pgp(process.env.DATABASE_URL) + +// Give csv-file as an argument for script +const appArgs = process.argv.slice(2) +const fileName = appArgs[0] + +async function main() { + const filePath = path.join(__dirname, fileName) + // Read CSV + let file = fs.readFileSync(filePath, { encoding: 'utf-8' }, function (err) { + console.log(err) + }) + + console.log(fileName) + // Split on row + file = file.split('\n') + + // Get first row for column headers + headers = file.shift().split(';') + + console.log() + let json = [] + file.forEach(function (row) { + // Loop through each row + rowJson = {} + row = row.split(';') + for (var i = 0; i < headers.length; i++) { + rowJson[headers[i]] = row[i] + } + // Add object to list + json.push(rowJson) + }) + + const cs = new pgp.helpers.ColumnSet( + ['user_guid', 'created_at', 'created_by', 'taskgroup_guid', 'completed'], + { table: 'task_group_entries' } + ) + + function getNextData(t, pageIndex) { + let data = null + data = [] + let lowerLimit = 5000 * pageIndex + let upperLimit = lowerLimit + 5000 + if (upperLimit > json.length) { + upperLimit = json.length + } + + return new Promise((resolve, reject) => { + for (lowerLimit; lowerLimit < upperLimit; lowerLimit++) { + let entry = json[lowerLimit] + if (!entry.TMELuoja) { + console.log(entry) + } + const [date, time] = entry.TMELuotu.split(' ') + const [day, month, year] = date.split('.') + const [hours, minutes] = time.split(':') + const createdAtDate = new Date(year, month - 1, day, hours, minutes) + const createdBy = entry.TMELuoja.split(' ')[1] + data.push({ + user_guid: entry.TAHTahoId, + created_at: createdAtDate, + created_by: createdBy, + taskgroup_guid: entry.activity_group, + completed: 'COMPLETED', + }) + } + // console.log(data) + if (data.length === 0) { + resolve(undefined) + } else { + resolve(data) + } + }) + } + + db.tx('massive-insert', (t) => { + const processData = (json) => { + if (json) { + const insert = pgp.helpers.insert(json, cs) + return t.none(insert) + } + } + return t.sequence((index) => getNextData(t, index).then(processData), { + track: true, + }) + }) + .then((data) => { + // COMMIT has been executed + console.log('Total batches:', data.total, ', Duration:', data.duration) + }) + .catch((error) => { + // ROLLBACK has been executed + console.log('Error in massive-insert', error) + }) +} + +main()