Commit 46b2b721 authored by Cole Erickson's avatar Cole Erickson
Browse files

Base script

parents
{
"pg": {
"host": "escp-postgres.byu.edu",
"post": 5432,
"database": "trouble-test",
"user": "csr",
"password": "password"
},
"sqlite": "/path/to/kanboard/db.sqlite"
}
const config = require('./config.json')
const pgClient = require('pg').Client;
const sqliteLib = require('sqlite');
let sqlite;
let pg;
// Used to determine if a table or column name is invalid. Allows only alphanumeric and underscore
const validName = /^[a-z0-9_]+$/i
// Async wrapper function
const doMigrate = async ()=>{
sqlite = await sqliteLib.open(config.sqlite);
pg = new pgClient(config.pg);
await pg.connect();
console.log('Beginning transfer . . .');
// Begin PG transaction
await pg.query('BEGIN');
// Wrap in try/catch in case something happens
try{
// Clear db
await pg.query('truncate projects,links,groups,users,tags,settings,custom_filters,currencies,invites cascade')
// Migrate tables. The order here is important - tables that are depended on other tables
// must be migrated first to avoid foreign key constraints. The second (optional) parameter
// to migrateTable is a config object.
// Columns listed with -1 will not be transferred (they don't exist in the new Kanboard schema),
// and columns listed with a string will be renamed. The "end" column is merely escaped to avoid
// issues with Postgres' end keyword
await migrateTable('projects', {email: -1, predefined_email_subjects: -1});
await migrateTable('groups');
await migrateTable('users', {is_admin: -1, is_project_admin: -1, default_project_id: -1});
await migrateTable('settings');
await migrateTable('project_daily_stats')
await migrateTable('project_has_categories')
await migrateTable('project_has_files')
await migrateTable('project_has_metadata')
await migrateTable('project_has_notification_types')
await migrateTable('swimlanes')
await migrateTable('columns')
await migrateTable('project_has_roles')
await migrateTable('project_has_groups')
await migrateTable('project_has_users', {id: -1, is_owner: -1})
await migrateTable('user_has_notifications')
await migrateTable('group_has_users')
await migrateTable('last_logins')
await migrateTable('password_reset')
await migrateTable('remember_me')
await migrateTable('user_has_metadata')
await migrateTable('user_has_notification_types')
await migrateTable('user_has_unread_notifications')
await migrateTable('actions')
await migrateTable('custom_filters')
await migrateTable('tags')
await migrateTable('links')
await migrateTable('column_has_move_restrictions')
await migrateTable('tasks')
await migrateTable('project_daily_column_stats')
await migrateTable('column_has_restrictions')
await migrateTable('project_role_has_restrictions')
await migrateTable('action_has_params')
await migrateTable('transitions')
await migrateTable('task_has_tags')
await migrateTable('task_has_links')
await migrateTable('subtasks')
await migrateTable('comments')
await migrateTable('task_has_external_links')
await migrateTable('task_has_files')
await migrateTable('task_has_metadata')
await migrateTable('project_activities')
await migrateTable('subtask_time_tracking', {end: '"end"'})
// End pg transaction
console.log('Committing all changes')
await pg.query('COMMIT');
} catch (err) {
console.log('ERROR: Rolling back transaction')
console.log(err);
await pg.query('ROLLBACK')
} finally {
pg.end();
console.log('Transfer complete.');
}
}
// This helper function will migrate a single table
const migrateTable = async (tableName, columnConfig)=>{
if(!validName.test(tableName))
throw new Error('Invalid table name ' + tableName);
// Retrieve all rows
const rows = await sqlite.all('select * from ' + tableName);
// If there's no rows in the table don't bother
if(!rows.length)
return;
// Handle no column config given
if(!columnConfig) columnConfig = {};
// Extract columns from the keys of the first row
const columns = Object.keys(rows[0])
// Filter columns
.filter(column=>{
// Throw error if invalid name
if(!validName.test(column))
throw new Error('Invalid column name ' + column);
// Remove if it's marked invalid in columnConfig
if(columnConfig[column] === -1)
return false;
// Otherwise keep the column
return true;
})
// Map renamed columns
.map(column=>{
// Take column config for that column if it exists, otherwise keep the current name
return columnConfig[column] || column;
});
// Query text: tablename - join columns with commas, join column indexes (1-based) with commas
const queryText = `INSERT INTO ${tableName}(${columns.join(',')}) VALUES(${columns.map((col,i)=>'$' + (i+1)).join(',')})`
logWrap(queryText)
// Insert each row
for(let row of rows){
// Build a query with our queryText
const query = {
text: queryText,
// Use map to make sure the values are in the same order as how we joined the columns in the query text
values: columns.map(col=>row[col])
}
// Actually execute the query
await pg.query(query);
}
// Log how many were inserted
console.log(' ' + rows.length + ' records inserted')
// If the table has an id column, reset its auto-increment
if(columns.includes('id'))
await pg.query(`select setval(pg_get_serial_sequence('${tableName}', 'id'), coalesce(max(id),0) + 1, false) FROM ${tableName};`);
}
// Helper function for logging
const logWrap = str=>{
if(str.length > 77)
return console.log(str.substring(0,77) + '...');
console.log(str);
}
// After everything's defined, call our async function
doMigrate();
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment