-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
7058cf2
commit 859adfa
Showing
5 changed files
with
448 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
const fs = require('fs') | ||
const path = require('path') | ||
require('dotenv').config() | ||
const pgp = require('pg-promise')() | ||
|
||
const db = pgp(process.env.DATABASE_URL) | ||
const unusedActivities = require('./get-unused-activities') | ||
|
||
async function deleteTaskEntriesById(task_guid) { | ||
try { | ||
const data = await db.any( | ||
'DELETE FROM task_entries WHERE task_guid = $1 RETURNING task_guid', | ||
[task_guid.toString()] | ||
) | ||
|
||
console.log('entries successfully deleted', data) | ||
return data | ||
} catch (error) { | ||
console.log('delete taskentries - error', error) | ||
} | ||
} | ||
|
||
async function main() { | ||
const oldIds = unusedActivities.main() | ||
const resolvedIds = await oldIds | ||
resolvedIds.forEach((task_guid) => { | ||
try { | ||
return deleteTaskEntriesById(task_guid) | ||
} catch (error) { | ||
console.log('error while migrating', error) | ||
} | ||
}) | ||
} | ||
|
||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,106 @@ | ||
require('dotenv').config() | ||
const fs = require('fs') | ||
var path = require('path') | ||
var request = require('request-promise') | ||
|
||
const DBURL = process.env.POF_BACKEND_STAGING | ||
async function fetchActivitiesFromStrapi() { | ||
try { | ||
const countRes = await request(`${DBURL}/activities/count?_locale=fi`) | ||
const activities = await request(`${DBURL}/activities?_limit=${countRes}`) | ||
|
||
return activities | ||
} catch (e) { | ||
console.log(`Error getting activities: ${e}`) | ||
return null | ||
} | ||
} | ||
|
||
const sortArraysAscending = (array) => { | ||
return array.sort(function (a, b) { | ||
return a - b | ||
}) | ||
} | ||
|
||
const uniqueValues = (value, index, self) => { | ||
return self.indexOf(value) === index | ||
} | ||
|
||
function writeUnusedActivitiesToTxtFile(ids) { | ||
const writeStream = fs.createWriteStream('unusedActivityIds.txt') | ||
const pathName = writeStream.path | ||
|
||
// write each value of the array on the file breaking line | ||
ids.forEach((value) => writeStream.write(`${value}\n`)) | ||
|
||
writeStream.on('finish', () => { | ||
console.log(`wrote all the array data to file ${pathName}`) | ||
}) | ||
writeStream.on('error', (err) => { | ||
console.error(`There is an error writing the file ${pathName} => ${err}`) | ||
}) | ||
writeStream.end() | ||
} | ||
|
||
async function main() { | ||
// Change filename below to match the csv-file with migration activities | ||
const filePath = path.join(__dirname, './aktiviteetti_aa.csv') | ||
// Read CSV | ||
let file = fs.readFileSync(filePath, { encoding: 'utf-8' }, function (err) { | ||
console.log(err) | ||
}) | ||
|
||
// Split on row | ||
file = file.split('\n') | ||
|
||
// Get first row for column headers | ||
headers = file.shift().split(',') | ||
|
||
let json = [] | ||
file.forEach(function (row) { | ||
// Loop through each row | ||
tmp = {} | ||
row = row.split(',') | ||
for (let i = 0; i < headers.length; i++) { | ||
tmp[headers[i]] = row[i] | ||
} | ||
// Add object to list | ||
json.push(tmp) | ||
}) | ||
|
||
const activityIdsFromKuksa = json.map((row) => { | ||
return row.activities_Partioaktiviteetti_Yhdistä1_aktiviteetti_View_id | ||
}) | ||
|
||
const uniqueIdValuesInOrder = sortArraysAscending( | ||
activityIdsFromKuksa.filter(uniqueValues) | ||
) | ||
|
||
const activityidsFromStrapiPromise = fetchActivitiesFromStrapi().then( | ||
function (activities) { | ||
const activitiesJson = JSON.parse(activities) | ||
const ids = activitiesJson.map((activity) => { | ||
return activity.id.toString() | ||
}) | ||
return sortArraysAscending(ids) | ||
} | ||
) | ||
|
||
const activityIdsFromStrapi = await Promise.resolve( | ||
activityidsFromStrapiPromise | ||
) | ||
|
||
const oldIdsFromKuksa = uniqueIdValuesInOrder.filter( | ||
(x) => !activityIdsFromStrapi.includes(x) | ||
) | ||
|
||
if (oldIdsFromKuksa.length) { | ||
writeUnusedActivitiesToTxtFile(oldIdsFromKuksa) | ||
return oldIdsFromKuksa | ||
} else { | ||
console.log('No old ids') | ||
} | ||
} | ||
|
||
main() | ||
module.exports = { main } |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,113 @@ | ||
// Run script with command: node get_activity_data_with_correct_ids.js FILEPATH activity/activitygroup | ||
|
||
require('dotenv').config({ path: '../../.env' }) | ||
const fs = require('fs') | ||
var path = require('path') | ||
var request = require('request-promise') | ||
|
||
const appArgs2 = process.argv.slice(2) | ||
const fileName = appArgs2[0] | ||
|
||
const appArgs3 = process.argv.slice(3) | ||
const dataType = appArgs3[0] | ||
|
||
const DBURL = process.env.POF_BACKEND_PRODUCTION | ||
|
||
var strapiUrl = '' | ||
var idColumnName = '' | ||
|
||
if (dataType == 'activity') { | ||
strapiUrl = 'activities' | ||
idColumnName = 'task_guid' | ||
} else { | ||
strapiUrl = 'activity-groups' | ||
idColumnName = 'taskgroup_guid' | ||
} | ||
|
||
// Fetch all activities from POF | ||
async function fetchActivitiesFromStrapi() { | ||
try { | ||
const countRes = await request(`${DBURL}/${strapiUrl}/count?_locale=fi`) | ||
const activities = await request(`${DBURL}/${strapiUrl}?_limit=${countRes}`) | ||
return activities | ||
} catch (e) { | ||
console.log(`Error getting activities: ${e}`) | ||
return null | ||
} | ||
} | ||
|
||
async function main() { | ||
const activityidsFromStrapiPromise = fetchActivitiesFromStrapi().then( | ||
function (activities) { | ||
const activitiesJson = JSON.parse(activities) | ||
return activitiesJson | ||
} | ||
) | ||
|
||
const activityIdsFromStrapi = await Promise.resolve( | ||
activityidsFromStrapiPromise | ||
) | ||
|
||
const activitiesJsonStrapio = activityIdsFromStrapi | ||
console.log('Activities retrieved from pof') | ||
|
||
// Read CSV | ||
const filePath = path.join(fileName) | ||
let file = fs.readFileSync(filePath, { encoding: 'utf-8' }, function (err) { | ||
console.log(err) | ||
}) | ||
|
||
// Split on row | ||
file = file.split('\n') | ||
// Get first row for column headers | ||
headers = file.shift().split(',') | ||
let json = [] | ||
console.log('Comparing csv file data and pof data') | ||
file.forEach(function (row) { | ||
// Loop through each row | ||
rowJson = {} | ||
row = row.split(',') | ||
for (var i = 0; i < headers.length; i++) { | ||
rowJson[headers[i]] = row[i] | ||
} | ||
|
||
// Finf all wp_guid id's | ||
if (rowJson[idColumnName].length > 7) { | ||
rowJson[idColumnName] | ||
for (var i = 0; i < activitiesJsonStrapio.length; i++) { | ||
// Compare POF activity wp_guid to csv file task_guid id and if it is the same, replace task_guid with the correct id from POF | ||
if (activitiesJsonStrapio[i].wp_guid == rowJson[idColumnName]) { | ||
rowJson[idColumnName] = activitiesJsonStrapio[i].id | ||
} | ||
} | ||
} | ||
json.push(rowJson) | ||
}) | ||
convertJsonToCsv(json) | ||
} | ||
|
||
// Convert corrected datat to csv and write it to file | ||
function convertJsonToCsv(json) { | ||
console.log('Creating CSV file') | ||
var fields = Object.keys(json[0]) | ||
var replacer = function (key, value) { | ||
return value === null ? '' : value | ||
} | ||
var csv = json.map(function (row) { | ||
return fields | ||
.map(function (fieldName) { | ||
return JSON.stringify(row[fieldName], replacer) | ||
}) | ||
.join(',') | ||
}) | ||
csv.unshift(fields.join(',')) | ||
csv = csv.join('\r\n') | ||
|
||
fs.writeFile(`${strapiUrl}_data_281122.csv`, csv, (err) => { | ||
if (err) console.error(err) | ||
else console.log('New csv file created!') | ||
}) | ||
return csv | ||
} | ||
|
||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,92 @@ | ||
const fs = require('fs') | ||
var path = require('path') | ||
require('dotenv').config() | ||
const pgp = require('pg-promise')() | ||
|
||
const db = pgp(process.env.DATABASE_URL) | ||
|
||
// Give csv-file as an argument for script | ||
const appArgs = process.argv.slice(2) | ||
const fileName = appArgs[0] | ||
|
||
async function main() { | ||
const filePath = path.join(__dirname, fileName) | ||
// Read CSV | ||
let file = fs.readFileSync(filePath, { encoding: 'utf-8' }, function (err) { | ||
console.log(err) | ||
}) | ||
|
||
// Split on row | ||
file = file.split('\n') | ||
|
||
// Get first row for column headers | ||
headers = file.shift().split(',') | ||
|
||
let json = [] | ||
file.forEach(function (row) { | ||
// Loop through each row | ||
rowJson = {} | ||
row = row.split(',') | ||
for (var i = 0; i < headers.length; i++) { | ||
rowJson[headers[i]] = row[i] | ||
} | ||
// Add object to list | ||
json.push(rowJson) | ||
}) | ||
|
||
const cs = new pgp.helpers.ColumnSet( | ||
['user_guid', 'created_at', 'created_by', 'task_guid', 'completion_status'], | ||
{ table: 'task_entries' } | ||
) | ||
|
||
function getNextData(t, pageIndex) { | ||
let data = null | ||
data = [] | ||
let lowerLimit = 5000 * pageIndex | ||
let upperLimit = lowerLimit + 5000 | ||
if (upperLimit > json.length) { | ||
upperLimit = json.length | ||
} | ||
|
||
return new Promise((resolve, reject) => { | ||
for (lowerLimit; lowerLimit < upperLimit; lowerLimit++) { | ||
let entry = json[lowerLimit] | ||
data.push({ | ||
user_guid: entry.TAHTahoId, | ||
created_at: entry.TPALuotu, | ||
created_by: entry.TPALuoja, | ||
task_guid: | ||
entry.activities_Partioaktiviteetti_Yhdistä1_aktiviteetti_View_id, | ||
completion_status: 'COMPLETED', | ||
}) | ||
} | ||
if (data.length === 0) { | ||
resolve(undefined) | ||
} else { | ||
resolve(data) | ||
} | ||
}) | ||
} | ||
|
||
db.tx('massive-insert', (t) => { | ||
const processData = (json) => { | ||
if (json) { | ||
const insert = pgp.helpers.insert(json, cs) | ||
return t.none(insert) | ||
} | ||
} | ||
return t.sequence((index) => getNextData(t, index).then(processData), { | ||
track: true, | ||
}) | ||
}) | ||
.then((data) => { | ||
// COMMIT has been executed | ||
console.log('Total batches:', data.total, ', Duration:', data.duration) | ||
}) | ||
.catch((error) => { | ||
// ROLLBACK has been executed | ||
console.log(error) | ||
}) | ||
} | ||
|
||
main() |
Oops, something went wrong.