Backup Postgres DB and uploads directory and store it on Google Drive
Here is my Thor task that backups a database and public/uploads directory for one of my projects. These backups are then gzipped and uploaded to Google Drive. Also it cleans up old backups.
# lib/tasks/server_db.thor
class ServerDb < MightyThor
include Memery
desc 'backup_to_drive', 'Backup DB and upload to google drive'
def backup_to_drive
system(dump_database_cmd, exception: true)
system(gzip_db_cmd, exception: true)
system(remove_sql_file, exception: true)
system(gzip_uploads_cmd, exception: true)
keep_last_x_db_backups(5)
keep_last_x_uploads_backups(3)
upload_gzip_db_to_drive
upload_gzip_uploads_to_drive
msg = 'Success!'
logger.info(msg)
say_with_time(msg)
rescue Exception => e # rubocop:disable Lint/RescueException
logger.error(e)
raise
end
private
def dump_database_cmd = "pg_dump --no-privileges --no-owner -U #{username} #{database} > #{sql_path}"
def gzip_db_cmd = "tar --create --gzip --directory #{backup_dir} --file #{gzip_db_file_path} #{sql_file_name}"
def gzip_uploads_cmd = "tar --create --gzip --directory #{public_dir} --file #{gzip_uploads_file_path} uploads"
def gzip_db_file_name = "db-#{Date.current}.tar.gz"
def gzip_db_file_path = File.join(backup_dir, gzip_db_file_name)
def gzip_uploads_file_name = "uploads-#{Date.current}.tar.gz"
def gzip_uploads_file_path = File.join(backup_dir, gzip_uploads_file_name)
def remove_sql_file = "rm #{sql_path}"
def backup_dir = ENV.fetch('DB_BACKUP_DIR_PATH')
def public_dir = ENV.fetch('PUBLIC_DIR_PATH')
def sql_file_name = 'db.sql'
def sql_path = File.join(backup_dir, sql_file_name)
memoize def config = YAML.load_file('config/database.yml')['production'].symbolize_keys
def database = config[:database]
def username = config[:username]
# def password = config[:password]
def upload_gzip_db_to_drive
upload_gzip_to_drive(gzip_db_file_path, Credentials[:db_backup_drive_dir_id])
end
def upload_gzip_uploads_to_drive
upload_gzip_to_drive(gzip_uploads_file_path, Credentials[:db_backup_drive_dir_id])
end
def upload_gzip_to_drive(gzip_file_path, parent_dir_id)
drive_file = Google::Apis::DriveV3::File.new(
name: File.basename(gzip_file_path),
parents: [parent_dir_id]
)
drive.create_file(drive_file, upload_source: gzip_file_path, content_type: 'application/gzip')
end
def keep_last_x_db_backups(leave_count)
keep_last_x_backups(leave_count, backup_dir, 'db-')
end
def keep_last_x_uploads_backups(leave_count)
keep_last_x_backups(leave_count, backup_dir, 'uploads-')
end
def keep_last_x_backups(leave_count, target_dir, file_prefix)
Dir["#{target_dir}/#{file_prefix}*.tar.gz"].reverse[leave_count..]&.then { FileUtils.rm(_1) }
res = drive.list_files
sorted_files = res.files.
filter_map { |file| [file.name[/\d{4}-\d{2}-\d{2}/].to_date, file] if file.name.start_with?(file_prefix) }.
sort_by(&:first).map(&:second).reverse
sorted_files[leave_count..]&.each do |file|
drive.delete_file(file.id)
end
end
memoize def drive =
Google::Apis::DriveV3::DriveService.new.tap do |s|
s.authorization = Google::Auth::ServiceAccountCredentials.make_creds(
json_key_io: File.open('config/google-drive-service-secret.json'),
scope: Google::Apis::DriveV3::AUTH_DRIVE_FILE
)
end
memoize def logger = MyLogger.new('log/db_backup.log', 5, 2.megabytes)
end
# Gemfile
gem 'google-apis-drive_v3', require: false
gem 'memery'
# Thorfile
require 'google/apis/drive_v3'
require_relative 'lib/mighty_thor'
# ...
# lib/tasks/mighty_thor.rb
require 'thor'
class MightyThor < Thor
private
def say_with_time(message)
puts "#{Time.current.strftime('%Y-%m-%d %H:%M:%S %Z')}: #{message}"
end
end
Written by Lev Lukomsky
Related protips
Have a fresh tip? Share with Coderwall community!
Post
Post a tip
Best
#Ruby
Authors
Sponsored by #native_company# — Learn More
#native_title#
#native_desc#