Backend | Part 2 — Adding a PostgreSQL Database

Part 0: Code + Demo + Directory Structure

Part 1: Root

1.a: /package.json

{
  "scripts": {
    "dev": "nodemon index.js",
    "tsc": "tsc --watch",
    "start": "node index.js",
    
    "e2e": "npx cypress open",
    "test": "cross-env NODE_ENV=testing jest --runInBand",
    "test:verbose": "cross-env NODE_ENV=testing jest --verbose --runInBand",
    "test:watch": "cross-env NODE_ENV=testing jest --watchAll --runInBand",
    
    "migrate": "knex migrate:latest",
    "migrateh": "heroku run knex migrate:latest -a postgresql-project",
    
    "rollback": "knex migrate:rollback",
    "rollbackh": "heroku run knex migrate:rollback -a postgresql-project",
    
    "seed": "knex seed:run",
    "seedh": "heroku run knex seed:run -a postgresql-project",
    
    "migrate-test": "NODE_ENV=testing   knex migrate:latest",
    "rollback-test": "NODE_ENV=testing  knex migrate:rollback",
    
    "seed-test": "NODE_ENV=testing      knex seed:run",
    
    "reset-db": "npm run rollback && npm run migrate && npm run seed",
    "reset-dbh": "npm run rollbackh && npm run migrateh && npm run seedh",
    
    "databaseh": "heroku pg:psql -a postgresql-project",
    "deploy": "git add . && git commit -m 'deploy' && git push heroku main && git push"
  },
  "engines": {
    "node": "18.15.0",
    "npm": "9.5.0"
  },
  "dependencies": {
    "bcryptjs": "^2.4.3",
    "body-parser": "^1.20.2",
    "compression": "^1.7.4",
    "cors": "2.8.5",
    "dotenv": "14.3.0",
    "ejs": "^3.1.9",
    "express": "4.17.2",
    "helmet": "5.0.2",
    "jsonwebtoken": "^8.5.1",
    "knex": "1.0.1",
    "knex-cleaner": "1.3.1",
    "morgan": "^1.10.0",
    "pg": "8.7.1",
    "stripe": "8.165.0"
  },
  "devDependencies": {
    "@types/express": "^4.17.17",
    "@types/jest": "27.4.0",
    "@types/node": "^20.5.9",
    "cross-env": "7.0.3",
    "cypress": "^13.1.0",
    "eslint": "8.7.0",
    "jest": "27.4.7",
    "nodemon": "2.0.15",
    "supertest": "6.2.2",
    "typescript": "^5.2.2"
  }
}

Note that we need to set <HEROKU-PROJECT-NAME> in the Heroku database scripts. These scripts allow us to control the deployed production database remotely.

  "migrateh": "heroku run knex migrate:latest -a <HEROKU-PROJECT-NAME>",
  "rollbackh": "heroku run knex migrate:rollback -a <HEROKU-PROJECT-NAME>",
  "seedh": "heroku run knex seed:run -a <HEROKU-PROJECT-NAME>",
  "databaseh": "heroku pg:psql -a <HEROKU-PROJECT-NAME>"

1.b: /index.js

const server = require('./src/api/server');

const port = process.env.PORT;

server.listen(port, () => {
  console.cyan(`server.js: http://localhost:${port}`);
});

1.c: /knexfile.js

require('dotenv').config(); // this file runs outside of index.js when we run migrations
require('./src/util/console');
console.magenta('knexfile.js');

// ==============================================

const NODE_ENV = process.env.NODE_ENV ?? 'development';
console.log('NODE_ENV: ', NODE_ENV);

// ==============================================

let DB_URL; 
if (NODE_ENV === 'production') {
  DB_URL = process.env.DATABASE_URL; // heroku
} else if (NODE_ENV === 'testing') {
  DB_URL = process.env.TESTING_DATABASE_URL; // .env
} else {
  DB_URL = process.env.DEV_DATABASE_URL; // .env
}

console.log('DB_URL: ', DB_URL);


// ==============================================

const pg = require('pg');
if (NODE_ENV === 'production') {
  pg.defaults.ssl = { rejectUnauthorized: false };
}

// ==============================================

const sharedConfig = {
  client: 'pg',
  migrations: { directory: './src/db/migrations' },
  seeds: { directory: './src/db/seeds' },
  connection: DB_URL, 
};

// ==============================================

module.exports = {
  development: sharedConfig,
  testing: sharedConfig,
  production: {
    ...sharedConfig,
    pool: { min: 2, max: 10 },
  },
};

1.d: /.env

PORT=9000
NODE_ENV=development
DEV_DATABASE_URL=postgresql://postgres:password@localhost:5432/database_name
TESTING_DATABASE_URL=postgresql://postgres:password@localhost:5432/testing_database_name

Put the above in your .env file. Some adjustments in the connection URLs will be needed:

  • 5432: this is the default TCP port for PostgreSQL
  • postgres: in postgres:password, this is the default superadmin user
  • password: in postgres:password, replace with the password of the postgres user
  • database_name: use the name of the development database you created in pgAdmin
  • testing_database_name: use the name of the testing database you created in pgAdmin

Part 2: API

2.a: /src/api/server.js

require("dotenv").config();
const express = require('express');
const helmet = require('helmet');
const cors = require('cors');

// ==============================================

const server = express();
server.use(express.json());
server.use(helmet());
server.use(cors());

// ==============================================

const usersRouter = require('./users/routes');
server.use('/api/users', usersRouter);

// ==============================================

// Catch-All Endpoint
server.use('*', (req, res) => {
  res.status(404).json({
    message: '404 - Route not found 😔',
  });
});

// ==============================================

// error-handling middleware
server.use((err, req, res, next) => {
  console.log('err (in error-handling middleware [server.js]): ', err);
  res.status(err.status ?? 500).json({ message: err.message });
});

// ==============================================

module.exports = server;

2.b: /src/api/users/routes

const express = require('express');
const router = express.Router();

const Controller = require('./controller');

router.get('/', Controller.getUsers );
router.post('/', Controller.insertUser);
router.get('/:id', Controller.getUserByID);
router.delete('/:id', Controller.deleteByID);
router.put('/:id', Controller.update);

module.exports = router;

2.b: /src/api/users/controller

const Model = require('./model');
const { hash } = required('util/hash');

// ==============================================

exports.getUsers = async (req, res) => {
  const users = await Model.getAll();
  res.status(200).json( users )
};
// ==============================================

exports.insertUser = async (req, res) => {
  const user = req.body;
  user.password = hash(user.password);
  const inserted = await Model.insert( user );
  res.status(201).json( inserted );
};

// ==============================================

exports.deleteByID = async (req, res) => {
  const id = req.params.id;
  const deleted = await Model.remove(id);
  res.status(200).json( deleted );
};

// ==============================================

exports.update = async (req, res) => {
  const user = req.body;
  const updated_user = await Model.update(user);
  res.status(200).json( updated_user );
};

2.d: /src/api/users/model

// const db = require('../../db/db');
const db = required('db/db');
const { truncateStringFront } = required('util/string');

// ==============================================

async function getAll() {

  const users = await db('users');

  const mapped = users.map((user) => ({
    ...user,
    password: truncateStringFront({ str: user.password }),
  }));

  return mapped;
}

// ==============================================

function getByEmail(email) {
  return db('users').where('email', email);
}

// ==============================================

function getById(id) {
  return db('users').where('id', id);
}

// ==============================================

async function insert(user) {
  const [newUserObject] = await db('users').insert(user, [
    'id',
    'email',
    'password',
    'is_admin',
  ]);
  return newUserObject;
}

// ==============================================

async function remove(id) { 
  const to_be_deleted = await getById(+id);
  await db('users').where('id', +id).del();
  return to_be_deleted;
}

// ==============================================

async function update(user) {
  const num_rows_updated = await db('users')
    .where('id', +user.id)
    .update(user);
  const [updated_user] = await getById(+user.id);
  return updated_user;
}

// ==============================================

module.exports = {
  getAll,
  getByEmail,
  getById,
  insert,
  remove,
  update,
};

Part 3: Database

3.a: /src/db/db.js

const knex = require('knex')
const configs = require('../../knexfile')

module.exports = knex(configs[process.env.NODE_ENV])

3.b: /src/db/migrations/__init-migration.js

const { hash } = require('../../util/hash');

// ==============================================

exports.seed = function (knex) {
  
  // --------------------------------------------
  
  const users = [
    {
      email: 'josh@josh.com',
      password: hash('josh'),
      is_admin: true,
    },
    {
      email: 'steve@steve.com',
      password: hash('steve'),
      is_admin: false,
    }
  ];

  // --------------------------------------------

  return knex('users').insert(users);

  // --------------------------------------------
};

// ==============================================

3.c: /src/db/migrations/seeds/01-cleanup.js

const { clean } = require('knex-cleaner')

// ==============================================

exports.seed = function (knex) {
  return clean(knex, {
    mode: 'truncate',
    ignoreTables: ['knex_migrations', 'knex_migrations_lock'],
  })
}

3.d: /src/db/migrations/02-users.js

const { hash } = require('../../util/hash');

// ==============================================

exports.seed = function (knex) {
  
  // --------------------------------------------
  
  const users = [
    {
      email: 'josh@josh.com',
      password: hash('josh'),
      is_admin: true,
    },
    {
      email: 'steve@apple.com',
      password: hash('steve'),
      is_admin: false,
    }
  ];

  // --------------------------------------------

  return knex('users').insert(users);

  // --------------------------------------------
};

// ==============================================

Part 4: Unit Tests

4.1: /src/__tests__/server.test.js

const request = require('supertest');
const server = require('../api/server');
const db = require('../db/db');

beforeAll(async () => {
  await db.migrate.rollback();
  await db.migrate.latest();
});
beforeEach(async () => {
  await db.seed.run();
});
afterAll(async () => {
  await db.destroy();
});

// ==============================================

it('sanity check', () => {
  expect(true).not.toBe(false)
});

// ==============================================

describe('server.js', () => {
  it('is the correct testing environment', async () => {
    expect(process.env.NODE_ENV).toBe('testing')
  })
});

// ==============================================

describe('db', () => {
  it('should return user ID 1', async () => {
    async function getAllUsers() { return db('users') };
    const users = await getAllUsers();
    expect(users[0].id).toBe(1);
  })
});

// ==============================================

describe('HTTP', () => {
  it('[GET] endpoint', async () => {
    // async function getAllUsers() { return db('users') };
    // const users = await getAllUsers();
    const res = await request(server).get('/api/users');
    expect(res.status).toBe(201);
  });

  it('[POST] endpoint', async () => {
    const res = await request(server).post('/api/users').send({
      email: 'steve jobs',
      password: 'apple'
    });
    expect(res.status).toBe(201);
  });
});