diff --git a/.env.example b/.env.example index 7febfa1..4888e7e 100644 --- a/.env.example +++ b/.env.example @@ -7,3 +7,7 @@ MYSQL_ROOT_PASSWORD= # Set `WP_ENV` to `yourLastName-local` (ex. `smith-local`). WP_ENV=sparkuser-local + +# This will replace localhost URLs in the DB on exports, and be replaced by localhost URLs on imports +# Relace "example.com" with your domain name (or the domain name for a staging environment) +SITE_URL=https:\\/\\/example.com diff --git a/.gitignore b/.gitignore index 984c989..9a418c6 100644 --- a/.gitignore +++ b/.gitignore @@ -45,4 +45,7 @@ coverage/ # LightHouse Audit Results .lighthouseci/ -plugins/rollbar/ \ No newline at end of file +plugins/rollbar/ + +# DB imports/exports +sql diff --git a/docs/development.md b/docs/development.md index b414462..9dd7d41 100644 --- a/docs/development.md +++ b/docs/development.md @@ -53,7 +53,36 @@ This theme uses the following files for linting: ## Syncing Environments -**TBD** +The preferred mechanism for syncing your environment with others is to use database imports and exports. This repo has a few scripts to make this process as easy as possible. While your containers are running, you can run any of these commands to import, export, or backup a database. Here are the quick commands, with more instructions below. + +```sh +# import a DB from the `sql` folder +npm run import-db + +# export your DB +npm run export-db + +# export your DB with a custom name +npm run export-db validation-data + +# backup your DB in case you need to restore it later +npm run backup-db + +# backup your DB with a custom name +npm run backup-db work-in-progress +``` + +### Importing Databases + +You can import databases from production, a saved backup, or another developer's DB export with the `import-db` script. To use it, put a `*.sql.gz` file in a top-level `sql` folder in the repo and run `npm run import-db`. This will first back up your existing database in case you need to revert back to it, and then it will import the database from the given file, effectively replacing your database with a new one. + +### Exporting Databases + +You can export your database for another developer to import or to import to a staging environment by running `npm run export-db`. By default, this will create a timestamped and gzipped file in `sql/exports`, but you can specify a name by running `npm run export-db `. The exported file will still be timestamped, but it will use the name you give it instead of the default prefix. + +### Backing Up Databases + +This will happen automatically when you import a database, but if you want to manually backup your database, you can run `npm run backup-db`. This functions nearly identically to the `export-db` script, except for using a different prefix and putting the file in `sql/backups`. As with `export-db`, you can specify a name for your DB backup if you want. ## Atom diff --git a/package.json b/package.json index a7b37d9..a4d4114 100644 --- a/package.json +++ b/package.json @@ -30,6 +30,10 @@ "lint:twig": "./scripts/run.sh vendor/bin/twigcs src/php/views", "lint": "run-s lint:*", "php:run": "./scripts/run.sh", + "export-db": "./scripts/export-db.sh", + "backup-db": "BACKUP=true ./scripts/export-db.sh", + "import-db": "./scripts/import-db.sh", + "preimport-db": "npm run backup-db", "node-version": "check-node-version --package" }, "engines": { diff --git a/scripts/export-db.sh b/scripts/export-db.sh new file mode 100755 index 0000000..bc5f447 --- /dev/null +++ b/scripts/export-db.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +export $(grep -v '^#' .env | xargs) + +timestamp=$(date -u +%Y-%m-%dT%H-%M-%S_%Z) +path='sql/exports' +prefix='db-export' + +if [ $BACKUP ] +then + path='sql/backups' + prefix='db-backup' +fi + +filename=$path/$prefix-$timestamp.sql + +if [ $1 ] +then + filename=$path/$1-$timestamp.sql +fi + +# create folders if they don't already exist +mkdir -p $path + +# generate SQL dump +docker exec -i sparkpress_db mysqldump --user=root --password=$MYSQL_ROOT_PASSWORD $MYSQL_DATABASE > $path/$prefix-raw.sql + +# replace localhost URLs with target environment URL +sed "s/http:\/\/localhost:8000/$SITE_URL/g" $path/$prefix-raw.sql > $filename + +# gzip the DB export file +gzip $filename + +# clean up file that's not useful after export +rm $path/$prefix-raw.sql diff --git a/scripts/import-db.sh b/scripts/import-db.sh new file mode 100755 index 0000000..e009ff1 --- /dev/null +++ b/scripts/import-db.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +export $(grep -v '^#' .env | xargs) + +# copy the most recent .sql.gz file in the sql folder for import +cp "$(ls -t sql/*.sql.gz | head -1)" sql/db-import-raw.sql.gz +if [ $? -ne 0 ] +then + echo "There must be at least one .sql.gz file in the sql folder to import" + exit 1 +fi + +gunzip sql/db-import-raw.sql.gz + +# replace environment-specific URLs with localhost URL +sed "s/$SITE_URL/http:\/\/localhost:8000/g" sql/db-import-raw.sql > sql/db-import.sql + +# drop existing database, create a new one, and load it up with data +docker exec -i sparkpress_db mysql --user=$MYSQL_USER --password=$MYSQL_PASSWORD -e "drop database if exists $MYSQL_DATABASE" +docker exec -i sparkpress_db mysql --user=$MYSQL_USER --password=$MYSQL_PASSWORD -e "create database $MYSQL_DATABASE" +docker exec -i sparkpress_db mysql --user=$MYSQL_USER --password=$MYSQL_PASSWORD $MYSQL_DATABASE < sql/db-import.sql + +# clean up files that aren't useful after import +rm sql/db-import* + +mkdir -p sql/previous-imports +mv sql/*.sql.gz sql/previous-imports