log-docker-digests.sh
Created on Sun Mar 10 2024Last updated on Wed Apr 03 2024
#!/bin/bash
#
# Unraid Userscript (but should run on any linux distro)
# Log and display latest known use of docker image digests for each image.
#
# Recommended to set as a 5 minute cron (`*/5 * * * *`)
#
# If something goes wrong you can pull the image by digest instead of `latest`
# See: https://docs.docker.com/reference/cli/docker/image/pull/#pull-an-image-by-digest-immutable-identifier
##
# Go outside the error output in the user scripts runner
printf "\n\n"
# Define the folder where the digests will be stored
digestsFolder='/mnt/user/appdata/docker-digests/'
# Create directory if it does not exist
mkdir -p $digestsFolder
# Log latest digest to file
docker ps -q |
# Get digest for each running container
xargs -I {} docker inspect --format='{{.Image}}' {} |
# Get the json for the corresponding image
xargs -I {} docker inspect {} |
# create a row with the relevant information
jq -r '(.[0].Id + " " + .[0].RepoTags[0])' |
# replace slashes with triple underscore for use in filename and reversing later)
awk '{gsub(/\//, "___")} 1' |
# create command that writes sha256 digest to file
awk '{print "echo " $1 " last-recorded-use " date " >> '"$digestsFolder"'" $2 ".log"}' date="$(date +%Y-%m-%d' '%H:%M:%S)" |
# run the command
bash
# Update all log files to only keep the latest date for each digest
for file in "$digestsFolder"*.log ; do
# reverse, so that the latest date for each digest is kept
sort -r "$file" | awk '!row[$1]++' > temp_file ;
# temp file is needed because we're reading and writing from the same file
mv temp_file "$file" ;
done
## Display contents of each file
for file in "$digestsFolder"*.log ; do
# Image as specified in the template
echo "Image: $file" | sed -e "s|$digestsFolder||" -e 's|.log$||' -e 's|___|/|g'
# Logfile with digests
echo "Logfile: $file" ;
# Contents of that log file
echo "Records:"
cat "$file";
# Newline
printf "\n"
done