\
--env "PGPASSWORD=${password}" \
--env "POSTGRES_PORT=${port}" \
--env "POSTGRES_USER=${username}" \
--name "${container_name}" \
--publish "${port}:${port}" \
"postgres:18.1-trixie"; then
return 1
fi
}
function stop_and_remove_container() {
local container_name="$1"
if ! stop_container "${container_name}"; then
return 1
fi
if ! remove_container "${container_name}"; then
return 1
fi
}
function stop_container() {
if [[ -z "$(command -v docker)" ]]; then
log_error "The docker command was not found, check if it is installed"
return 1
fi
if [[ "$#" -lt 1 ]]; then
log_error "Usage: stop_container <container_name>"
return 1
fi
local -r container_name="$1"
shift 1
if [[ -z "${container_name}" ]]; then
log_error "No value was supplied for container_name:${container_name}"
return 1
fi
if ! docker container stop "${container_name}"; then
return 1
fi
}
function main() {
trap 'cleanup $LINENO' EXIT
local -r POSTGRES_DB="${POSTGRES_DB}"
local -r POSTGRES_HOST="${POSTGRES_HOST}"
local -r POSTGRES_PASSWORD="${POSTGRES_PASSWORD}"
local -r POSTGRES_PORT="${POSTGRES_PORT}"
local -r POSTGRES_USER="${POSTGRES_USER}"
local -r bucket_url="s3://some-bucket"
local -r container_name="pg_dumper"
local -r date_str="$(date +%d_%m_%y_%HH_%MM_%SS_%Z)"
local -r email_template_success="template-success"
local -r file_name_prefix="${POSTGRES_DB}"
local -r file_name_suffix="${date_str}"
local -r from_email="[email protected]"
local -r to_email="[email protected]"
local -r dump_file_name="${file_name_prefix}_${file_name_suffix}"
local email_template_data
local rows
if ! start_container \
"${container_name}" \
"${POSTGRES_DB}" \
"${POSTGRES_HOST}" \
"${POSTGRES_PASSWORD}" \
"${POSTGRES_PORT}" \
"${POSTGRES_USER}"; then
return 1
fi
if ! install_dependencies \
"${container_name}"; then
return 1
fi
if ! copy_files_to_container \
"${script_directory}/docker_pg_dump.sh" \
"${container_name}:/root"; then
return 1
fi
if ! run_script_inside_container \
"${container_name}" \
"${dump_file_name}" \
"${POSTGRES_DB}" \
"${POSTGRES_HOST}" \
"${POSTGRES_PASSWORD}" \
"${POSTGRES_PORT}" \
"${POSTGRES_USER}"; then
return 1
fi
if ! copy_files_from_container \
"${container_name}:/tmp/${dump_file_name}.tar.gz.br" \
"/tmp"; then
return 1
fi
if ! stop_and_remove_container \
"${container_name}"; then
return 1
fi
if rows=$(fetch_table_row_counts \
"${POSTGRES_DB}" \
"${POSTGRES_HOST}" \
"${POSTGRES_PORT}" \
"${POSTGRES_USER}"); then
log_info "We got the rows: ${rows}"
else
return 1
fi
# Handle the case where nothing is returned because you ran it on an empty database with no tables
if [[ -z "${rows}" ]]; then
rows="[]"
fi
# shellcheck disable=SC2016
if email_template_data=$(prepare_template_data "tables" "${rows}" '{tables:$tables}'); then
log_info "We got the email template data: ${email_template_data}"
else
return 1
fi
if ! save_files_to_s3 \
"/tmp/${dump_file_name}.tar.gz.br" \
"${bucket_url}"; then
return 1
fi
if ! send_email \
"${from_email}" \
"${to_email}" \
"${email_template_data}" \
"${email_template_success}"; then
return 1
fi
# The backup file has been saved to S3 and we no longer need it locally
rm -f "/tmp/${dump_file_name}.tar.gz.br"
}
main "$@"
```
https://redd.it/1spjark
@r_bash
--env "PGPASSWORD=${password}" \
--env "POSTGRES_PORT=${port}" \
--env "POSTGRES_USER=${username}" \
--name "${container_name}" \
--publish "${port}:${port}" \
"postgres:18.1-trixie"; then
return 1
fi
}
function stop_and_remove_container() {
local container_name="$1"
if ! stop_container "${container_name}"; then
return 1
fi
if ! remove_container "${container_name}"; then
return 1
fi
}
function stop_container() {
if [[ -z "$(command -v docker)" ]]; then
log_error "The docker command was not found, check if it is installed"
return 1
fi
if [[ "$#" -lt 1 ]]; then
log_error "Usage: stop_container <container_name>"
return 1
fi
local -r container_name="$1"
shift 1
if [[ -z "${container_name}" ]]; then
log_error "No value was supplied for container_name:${container_name}"
return 1
fi
if ! docker container stop "${container_name}"; then
return 1
fi
}
function main() {
trap 'cleanup $LINENO' EXIT
local -r POSTGRES_DB="${POSTGRES_DB}"
local -r POSTGRES_HOST="${POSTGRES_HOST}"
local -r POSTGRES_PASSWORD="${POSTGRES_PASSWORD}"
local -r POSTGRES_PORT="${POSTGRES_PORT}"
local -r POSTGRES_USER="${POSTGRES_USER}"
local -r bucket_url="s3://some-bucket"
local -r container_name="pg_dumper"
local -r date_str="$(date +%d_%m_%y_%HH_%MM_%SS_%Z)"
local -r email_template_success="template-success"
local -r file_name_prefix="${POSTGRES_DB}"
local -r file_name_suffix="${date_str}"
local -r from_email="[email protected]"
local -r to_email="[email protected]"
local -r dump_file_name="${file_name_prefix}_${file_name_suffix}"
local email_template_data
local rows
if ! start_container \
"${container_name}" \
"${POSTGRES_DB}" \
"${POSTGRES_HOST}" \
"${POSTGRES_PASSWORD}" \
"${POSTGRES_PORT}" \
"${POSTGRES_USER}"; then
return 1
fi
if ! install_dependencies \
"${container_name}"; then
return 1
fi
if ! copy_files_to_container \
"${script_directory}/docker_pg_dump.sh" \
"${container_name}:/root"; then
return 1
fi
if ! run_script_inside_container \
"${container_name}" \
"${dump_file_name}" \
"${POSTGRES_DB}" \
"${POSTGRES_HOST}" \
"${POSTGRES_PASSWORD}" \
"${POSTGRES_PORT}" \
"${POSTGRES_USER}"; then
return 1
fi
if ! copy_files_from_container \
"${container_name}:/tmp/${dump_file_name}.tar.gz.br" \
"/tmp"; then
return 1
fi
if ! stop_and_remove_container \
"${container_name}"; then
return 1
fi
if rows=$(fetch_table_row_counts \
"${POSTGRES_DB}" \
"${POSTGRES_HOST}" \
"${POSTGRES_PORT}" \
"${POSTGRES_USER}"); then
log_info "We got the rows: ${rows}"
else
return 1
fi
# Handle the case where nothing is returned because you ran it on an empty database with no tables
if [[ -z "${rows}" ]]; then
rows="[]"
fi
# shellcheck disable=SC2016
if email_template_data=$(prepare_template_data "tables" "${rows}" '{tables:$tables}'); then
log_info "We got the email template data: ${email_template_data}"
else
return 1
fi
if ! save_files_to_s3 \
"/tmp/${dump_file_name}.tar.gz.br" \
"${bucket_url}"; then
return 1
fi
if ! send_email \
"${from_email}" \
"${to_email}" \
"${email_template_data}" \
"${email_template_success}"; then
return 1
fi
# The backup file has been saved to S3 and we no longer need it locally
rm -f "/tmp/${dump_file_name}.tar.gz.br"
}
main "$@"
```
https://redd.it/1spjark
@r_bash
Reddit
From the bash community on Reddit
Explore this post and more from the bash community
Showcase Termyt: A professional, pure Bash CLI wrapper (with .deb packaging & CI/CD)
Hi everyone!
I wanted to share a project I’ve been developing called Termyt.
Unlike heavy terminal emulators, Termyt is a pure Bash script designed to be a fast, efficient CLI wrapper. It’s built for those who need a streamlined way to manage their workflows without the overhead of a full GUI application.
# Why Termyt?
Pure Bash: No heavy dependencies, no bloat. Just fast, native execution.
Portable: Since it's a shell script, it's highly adaptable across different environments.
Debianized: I’ve packaged it into a professional `.deb` file, following all the standard Linux filesystem hierarchies (`/usr/bin`, `/usr/share/doc`, etc.).
Security & Transparency: The code is open, readable, and easy to audit. You can find the source code (
# Recent Improvements:
CI/CD Pipeline: Even for a Bash script, I’ve implemented GitHub Actions to automate the packaging and ensure everything is reproducible.
Professional Layout: Cleaned up the repo to meet professional standards, including man pages support and proper script documentation.
Badges & Documentation: Added real-time badges to track the build status and versioning.
# Link:
GitHub Repository: [https://github.com/Rob1c/termyt\]
Lemme know your thoughts!
https://redd.it/1spyn3v
@r_bash
Hi everyone!
I wanted to share a project I’ve been developing called Termyt.
Unlike heavy terminal emulators, Termyt is a pure Bash script designed to be a fast, efficient CLI wrapper. It’s built for those who need a streamlined way to manage their workflows without the overhead of a full GUI application.
# Why Termyt?
Pure Bash: No heavy dependencies, no bloat. Just fast, native execution.
Portable: Since it's a shell script, it's highly adaptable across different environments.
Debianized: I’ve packaged it into a professional `.deb` file, following all the standard Linux filesystem hierarchies (`/usr/bin`, `/usr/share/doc`, etc.).
Security & Transparency: The code is open, readable, and easy to audit. You can find the source code (
.sh file) on the main branch, inside usr/bin, or packed in CI/CD (Github Action) safe generated tarballs and ZIPs in the release.# Recent Improvements:
CI/CD Pipeline: Even for a Bash script, I’ve implemented GitHub Actions to automate the packaging and ensure everything is reproducible.
Professional Layout: Cleaned up the repo to meet professional standards, including man pages support and proper script documentation.
Badges & Documentation: Added real-time badges to track the build status and versioning.
# Link:
GitHub Repository: [https://github.com/Rob1c/termyt\]
Lemme know your thoughts!
https://redd.it/1spyn3v
@r_bash
GitHub
GitHub - Rob1c/Termyt: Termyt is an easy-to-use CLI Tool for downloading audio and videos from streaming sites like YouTube, yt…
Termyt is an easy-to-use CLI Tool for downloading audio and videos from streaming sites like YouTube, yt-dlp based. (Protected by CC-BY-NC License 4.0) - Rob1c/Termyt
What is the difference between have 2 separate ERR and EXIT traps vs a single EXIT trap for handling everything?
I have a function called testcommand that looks like this
```
function testcommand() {
local -r command="$1"
if eval "${command}"; then
printf "%s\n" "INFO: the command completed its execution successfully"
return 0
else
printf "%s\n" "ERROR: the command failed to execute"
return 1
fi
}
function main() {
trap 'handleexit $?' EXIT
local command="$1"
case "${command}" in
"ls") ;;
*)
command="badcommand"
;;
esac
printf "%s\n" "This is our error log file ${ERRORLOGFILE}"
if ! testcommand "${command}" 2>"${ERRORLOGFILE}"; then
return 1
fi
}
main "$@"
```
In case you are wondering, this is what the handleexit actually looks like
Alternatively I can also make 2 functions and have the main function basically handle ERR and EXIT separately
In which I ll need 2 functions
Quick questions based on the stuff above
- Do I need just the EXIT or do I need both ERR and EXIT
- Is there any tradeoff involved on using one vs two traps like this?
- Where should I remove that log file for success and failure cases?
- Is this a good basic setup to write more complex stuff like calling external commands like psql, aws etc?
- Is there a name for this design pattern in bash?
https://redd.it/1sqqvxg
@r_bash
I have a function called testcommand that looks like this
```
function testcommand() {
local -r command="$1"
if eval "${command}"; then
printf "%s\n" "INFO: the command completed its execution successfully"
return 0
else
printf "%s\n" "ERROR: the command failed to execute"
return 1
fi
}
In this invocation, I am calling a main() function that calls this command with a single trap
function main() {
trap 'handleexit $?' EXIT
local command="$1"
case "${command}" in
"ls") ;;
*)
command="badcommand"
;;
esac
printf "%s\n" "This is our error log file ${ERRORLOGFILE}"
if ! testcommand "${command}" 2>"${ERRORLOGFILE}"; then
return 1
fi
}
main "$@"
```
In case you are wondering, this is what the handleexit actually looks like
#!/usr/bin/env bash
ERROR_LOG_FILE=$(mktemp)
function handle_exit() {
local error_message
local -r exit_code="$1"
error_message="$(cat "${ERROR_LOG_FILE}")"
if [[ -z "${error_message}" ]]; then
printf "handle_exit: INFO: date:%s, exit_code::%s, error:%s\n" "$(date)" "${exit_code}" "No errors were detected"
else
printf "handle_exit: ERROR: date:%s, exit_code::%s, error:%s\n" "$(date)" "${exit_code}" "${error_message}"
fi
if [[ -f "${ERROR_LOG_FILE}" ]]; then
rm -f "${ERROR_LOG_FILE}"
fi
}
Alternatively I can also make 2 functions and have the main function basically handle ERR and EXIT separately
function main() {
trap 'handle_error' ERR
trap 'handle_exit $?' EXIT
local command="$1"
case "${command}" in
"ls") ;;
*)
command="bad_command"
;;
esac
printf "%s\n" "This is our error log file ${ERROR_LOG_FILE}"
if ! test_command "${command}" 2>"${ERROR_LOG_FILE}"; then
return 1
fi
}
main "$@"
In which I ll need 2 functions
#!/usr/bin/env bash
ERROR_LOG_FILE=$(mktemp)
function handle_error() {
local arg="$1"
printf "%s\n" "handle_error called at date:$(date) ${arg}"
if [[ -f "${ERROR_LOG_FILE}" ]]; then
rm -f "${ERROR_LOG_FILE}"
fi
}
function handle_exit() {
local arg="$1"
printf "%s\n" "handle_exit called at date:$(date) ${arg}"
if [[ -f "${ERROR_LOG_FILE}" ]]; then
rm -f "${ERROR_LOG_FILE}"
fi
}
Quick questions based on the stuff above
- Do I need just the EXIT or do I need both ERR and EXIT
- Is there any tradeoff involved on using one vs two traps like this?
- Where should I remove that log file for success and failure cases?
- Is this a good basic setup to write more complex stuff like calling external commands like psql, aws etc?
- Is there a name for this design pattern in bash?
https://redd.it/1sqqvxg
@r_bash
Reddit
From the bash community on Reddit
Explore this post and more from the bash community
I made my own sandboxed bash
I mainly built this for agents. Bash is one of their main tools but was designed for humans.
For example, we assume that getting no information after a mutating command means success. But for an agent, that silence has no particular meaning. If an agent runs
The idea is to give instant feedback for every command:
> mkdir hello
# Stdout: Folder has been created ✔
with additional information like
The other big problem is safety. For the majority of commands we can control the input and output, but it becomes problematic with commands like
I built it in TypeScript, so if there are any fellow bash and TypeScript lovers out there, I'd love to hear your thought. And if you ever feel like adding a command or two, that would be awesome too.
Here the repository: https://github.com/capsulerun/bash
https://redd.it/1srs68i
@r_bash
I mainly built this for agents. Bash is one of their main tools but was designed for humans.
For example, we assume that getting no information after a mutating command means success. But for an agent, that silence has no particular meaning. If an agent runs
mv file1 file2, it has no idea if the command worked, so it will immediately do an ls just to check.The idea is to give instant feedback for every command:
> mkdir hello
# Stdout: Folder has been created ✔
with additional information like
exit code, stderr, and a full diff of what changed (created, modified, deleted).The other big problem is safety. For the majority of commands we can control the input and output, but it becomes problematic with commands like
python3 -c or node -e that let an agent run untrusted code. That's why this bash is sandboxed by default using wasm to keep the host system safe.I built it in TypeScript, so if there are any fellow bash and TypeScript lovers out there, I'd love to hear your thought. And if you ever feel like adding a command or two, that would be awesome too.
Here the repository: https://github.com/capsulerun/bash
https://redd.it/1srs68i
@r_bash
GitHub
GitHub - capsulerun/bash: Sandboxed bash made for Agents
Sandboxed bash made for Agents. Contribute to capsulerun/bash development by creating an account on GitHub.
How to launch a program in bash.
hello I'm looking to launch a C program in bash, I launch the usual program as its 'sudo./p' so if I see a stcript bash that launches in my place what will it give? I tried its #!/bin/bash sudo./p
https://redd.it/1ss1p31
@r_bash
hello I'm looking to launch a C program in bash, I launch the usual program as its 'sudo./p' so if I see a stcript bash that launches in my place what will it give? I tried its #!/bin/bash sudo./p
https://redd.it/1ss1p31
@r_bash
Reddit
From the bash community on Reddit
Explore this post and more from the bash community
recommendations for books to learn bash scripting from scratch
I’m somewhat of a beginner to bash scripting. I’m somewhat familiar with common shell command and how to navigate a linux CLI as well as some very basic programming concepts.
However by all means i am looking for a way to learn bash scripting from zero. looking for a good book or web resource to follow along and get a baseline knowledge of scripting in bash.
I’m not a fan of AI and I want to be able to write my own scripts for automating tasks like connecting to my openVPN, setting up crons for daily backups, file organization and management, etc.
https://redd.it/1ss65hf
@r_bash
I’m somewhat of a beginner to bash scripting. I’m somewhat familiar with common shell command and how to navigate a linux CLI as well as some very basic programming concepts.
However by all means i am looking for a way to learn bash scripting from zero. looking for a good book or web resource to follow along and get a baseline knowledge of scripting in bash.
I’m not a fan of AI and I want to be able to write my own scripts for automating tasks like connecting to my openVPN, setting up crons for daily backups, file organization and management, etc.
https://redd.it/1ss65hf
@r_bash
Reddit
From the bash community on Reddit
Explore this post and more from the bash community
Bash Ships
I've gone and written another terminal game, this time a version of the old strategy game Battleships. I'm just sharing here really, but - I hope some might at least find the mouse control / cursor positioning of interest or useful. With a bit of effort you can write some quite slick and ergonomic applications in Bash.
EDIT: suppose I should include the URL: https://github.com/StarShovel/bash-ships
https://preview.redd.it/3lerw7nmfswg1.jpg?width=683&format=pjpg&auto=webp&s=97ef05b2eaa3a10f8a92659a7ca36b7073b276c6
https://preview.redd.it/gh1lkgbqfswg1.png?width=800&format=png&auto=webp&s=bcb6fcdfd9e39db6c0da7a06ed9cd59e2e4af0e6
https://redd.it/1ssujli
@r_bash
I've gone and written another terminal game, this time a version of the old strategy game Battleships. I'm just sharing here really, but - I hope some might at least find the mouse control / cursor positioning of interest or useful. With a bit of effort you can write some quite slick and ergonomic applications in Bash.
EDIT: suppose I should include the URL: https://github.com/StarShovel/bash-ships
https://preview.redd.it/3lerw7nmfswg1.jpg?width=683&format=pjpg&auto=webp&s=97ef05b2eaa3a10f8a92659a7ca36b7073b276c6
https://preview.redd.it/gh1lkgbqfswg1.png?width=800&format=png&auto=webp&s=bcb6fcdfd9e39db6c0da7a06ed9cd59e2e4af0e6
https://redd.it/1ssujli
@r_bash
GitHub
GitHub - StarShovel/bash-ships: A Bash implementation of the classic strategy game Battleships
A Bash implementation of the classic strategy game Battleships - StarShovel/bash-ships
Command Works in Terminal but not Bash Script
/r/linuxquestions/comments/1ssxb20/command_works_in_terminal_but_not_bash_script/
https://redd.it/1ssxbhw
@r_bash
/r/linuxquestions/comments/1ssxb20/command_works_in_terminal_but_not_bash_script/
https://redd.it/1ssxbhw
@r_bash
Reddit
From the bash community on Reddit: Command Works in Terminal but not Bash Script
Posted by Secret_Creme_2691 - 0 votes and 1 comment
Is the order of the flags important in all commands in bash?
```
function test() {
local -n args="$1"
printf "%s\n" "running items now"
local -a pg_restore_flags=(
"--disable-triggers"
"--exit-on-error"
"--format=directory"
"--no-acl"
"--no-owner"
"--no-password"
"--no-privileges"
)
local key
for key in "${!args[@]}"; do
local value
value="${args[${key}]}"
pg_restore_flags+=("--${key}=${value}")
done
printf "%s\n" "${pg_restore_flags[*]}"
# pg_restore "${pg_restore_flags[*]}" is that a bad idea?
}
function main() {
trap 'handle_exit $?' EXIT
local -A items=(["dbname"]="test_db" ["host"]="localhost" ["jobs"]=8 ["port"]=5432 ["username"]="test_user")
test items
}
main "$@"
```
- There are often commands that I would like to pack into a function where I can check multiple things like: does the command exist? are all arguments valid? Redirect error to stderr etc.
- Take this pg_restore function for example. It takes so many arguments that I was thinking why not send an associative array instead but it seems order is not preserved when using associative arrays
- Is this going to be a problem say if I started wrapping commands like this inside a function that accepts an associative array with required flags?
https://redd.it/1st8ikv
@r_bash
```
function test() {
local -n args="$1"
printf "%s\n" "running items now"
local -a pg_restore_flags=(
"--disable-triggers"
"--exit-on-error"
"--format=directory"
"--no-acl"
"--no-owner"
"--no-password"
"--no-privileges"
)
local key
for key in "${!args[@]}"; do
local value
value="${args[${key}]}"
pg_restore_flags+=("--${key}=${value}")
done
printf "%s\n" "${pg_restore_flags[*]}"
# pg_restore "${pg_restore_flags[*]}" is that a bad idea?
}
function main() {
trap 'handle_exit $?' EXIT
local -A items=(["dbname"]="test_db" ["host"]="localhost" ["jobs"]=8 ["port"]=5432 ["username"]="test_user")
test items
}
main "$@"
```
- There are often commands that I would like to pack into a function where I can check multiple things like: does the command exist? are all arguments valid? Redirect error to stderr etc.
- Take this pg_restore function for example. It takes so many arguments that I was thinking why not send an associative array instead but it seems order is not preserved when using associative arrays
- Is this going to be a problem say if I started wrapping commands like this inside a function that accepts an associative array with required flags?
https://redd.it/1st8ikv
@r_bash
Reddit
From the bash community on Reddit
Explore this post and more from the bash community
Accumulate errors and print at end (but also keep them shown in output)
I have a script that extracts files with
I would like to extract all of them unattended and log any errors encountered, printing them all at the end in order after the typical stdout and stderr (so the terminal history is preserved for full context).
What's the recommended way to go about this? I suppose with a file you can do
P.S. Unrelated, but anyone compile Bash for loadble builtins like
https://redd.it/1sta9w6
@r_bash
I have a script that extracts files with
7z (which can only extract one file at a time), looping through them. I would like to extract all of them unattended and log any errors encountered, printing them all at the end in order after the typical stdout and stderr (so the terminal history is preserved for full context).
What's the recommended way to go about this? I suppose with a file you can do
cmd 2> >(tee -a "$log").P.S. Unrelated, but anyone compile Bash for loadble builtins like
asort for performance reasons?https://redd.it/1sta9w6
@r_bash
Reddit
From the bash community on Reddit
Explore this post and more from the bash community
Asking the human experts here, how would you turn something like this into a production grade script?
- This is something I cooked up without using any AI whatsoever and while I can most certainly use AI to ask this question, I am interested in hearing from the human experts on this sub
- It only does 3 things: decompress first using brotli then using tar and then runs a pgrestore. Why 3? because pgdump only supports concurrency if you use a directory format and brotli does not work with directories and tar --gzip doesnt have a good compression ratio. You can read about the performance of various compression algorithms here
- As you can tell quickly many things can go wrong here
- The arguments are not validated.
- The commands could be missing or not installed on a particular machine.
- There is no cleanup if one of the steps fail.
- What does a production version of this look like according to you? What changes will need to be made to this?
https://redd.it/1st8ybe
@r_bash
#!/usr/bin/env bash
function handle_exit() {
local -r exit_code="$1"
printf "%s\n" "exit_code:${exit_code}"
}
function run_brotli_decompress() {
local -r input_path="$1"
local -r output_path="$2"
brotli \
--decompress \
--output="${output_path}" \
--rm \
"${input_path}"
}
function run_tar_decompress() {
local -r input_path="$1"
local -r output_path="$2"
local -r directory=$(dirname "${input_path}")
tar \
--directory="${directory}" \
--extract \
--file "${input_path}"
}
function run_pg_restore() {
local -r dbname="$1"
local -r host="$2"
local -r port="$3"
local -r username="$4"
local -r jobs="$5"
local -r file="$6"
pg_restore \
--dbname="${dbname}" \
--disable-triggers \
--exit-on-error \
--format=directory \
--host="${host}" \
--jobs="${jobs}" \
--no-acl \
--no-owner \
--no-password \
--no-privileges \
--port="${port}" \
--username="${username}" \
"${file}"
}
function main() {
trap 'handle_exit $?' EXIT
run_brotli_decompress \
"/tmp/test_db.tar.gz.br" \
"/tmp/test_db.tar.gz" || return 1
run_tar_decompress \
"/tmp/test_db.tar.gz" \
"/tmp/test_db" || return 1
run_pg_restore \
"test_db" \
"localhost" \
"5432" \
"test_user" \
8 \
"/tmp/test_db" || return 1
}
main "$@"
- This is something I cooked up without using any AI whatsoever and while I can most certainly use AI to ask this question, I am interested in hearing from the human experts on this sub
- It only does 3 things: decompress first using brotli then using tar and then runs a pgrestore. Why 3? because pgdump only supports concurrency if you use a directory format and brotli does not work with directories and tar --gzip doesnt have a good compression ratio. You can read about the performance of various compression algorithms here
- As you can tell quickly many things can go wrong here
- The arguments are not validated.
- The commands could be missing or not installed on a particular machine.
- There is no cleanup if one of the steps fail.
- What does a production version of this look like according to you? What changes will need to be made to this?
https://redd.it/1st8ybe
@r_bash
ntorga's - security-driven software engineering
gzip, bzip2, xz, zstd, 7z, brotli or lz4?
Not long ago, I found myself pondering over the choice of a compression tool for backups in Infinite Ez, our self-hosted container platform that transforms a single server into a fully-fledged PaaS. Ordinarily, gzip would be the obvious choice, but I began…