When I’m running WordPress inside a docker container, I usually only mirror the wp-content directory from the container to the host. You shouldn’t change anything in the other WordPress directories anyway and you avoid many volume syncs. The problem with this approach is that you can’t run the wp-command from the host, you have to run WP-CLI inside Docker, that means logging in to the container running the commands and logging out again. Assuming your container is called wp-container
the following commands would do that:
docker exec -ti wp-container bash wp plugin list exit
It’s just a little inconvenience, but why not save some command when we can?
I’m using gulp as task-manager for most of my projects, and added a task which forwards all commands to WP-CLI inside Docker. It even checks if the container is running and if WP-CLI is installed inside the container. If not it will be automatically installed.
var gulp = require('gulp'); var spawn = require('child_process').spawn; var checkContainers = function(names, done) { var exec = require('child_process').exec; exec('docker ps --format {{.Names}}', function(error, stdout, stderr) { done(names.map(function(name) { return stdout.split("\n").indexOf(name) >= 0; }).reduce(function(running, next) { return running && next; }, true)); }); }; var checkWPCli = function(container, done) { var exec = require('child_process').exec; exec('docker exec ' + container + ' bash -c \'wp\'', function(error, stdout, stderr) {}).on('exit', function(code) { done(127 !== code); }); }; var installWPCli = function(container, done) { var exec = require('child_process').exec; exec('docker exec ' + container + ' bash -c \'apt-get update && apt-get install -y less && curl -O https://raw.githubusercontent.com/wp-cli/builds/gh-pages/phar/wp-cli.phar && chmod +x wp-cli.phar && mv wp-cli.phar /usr/local/bin/wp && wp --allow-root cli\'', function(error, stdout, stderr) {}).on('exit', function(code) { done(0 === code); }); }; var runWPCli = function(container, done) { var command = process.argv.slice(process.argv.indexOf('-c') + 1); if (!command.length) { console.log('ERROR: Provide a valid wp-cli command!'); return done(); } var exec = require('child_process').exec; exec('docker exec ' + container + ' bash -c \'wp --allow-root ' + command.join(' ') + '\'', function(error, stdout, stderr) { console.log(stdout); }).on('exit', function(code) { done(); }); }; gulp.task( 'wp', function(done) { checkContainers(['wp-container'], function(containerRunning) { if (!containerRunning) { console.log('ERROR: wp-container container is not running. Try "docker-compose up -d"') return done(); } checkWPCli('wp-container', function(wpCliRunning) { if (!wpCliRunning) { console.log('WARNING: wp cli not installed, trying auto install ...'); installWPCli('wp-container', function(wpCliRunning) { console.log('SUCCESS: wp cli installed!'); runWPCli('wp-container', done); }); } else { runWPCli('wp-container', done); } }); }); });
So, with that gulp task in place, you will be able to run the following commands and every other WP-CLI command as well:
gulp wp -c "plugin list" gulp wp -c "plugin install advanced-custom-fields"
What do you think?
Is that to much hassle for running some wp-commands or would you use the gulp forward in your daily work? Let me know!