Create a single file with the contents of all files of a given directory to provide as context to LLMs
Brandon C. Roberts,•pages/posts/ai/
Here is the script
#!/bin/bash
# Check if a directory path was provided
if [ $# -ne 1 ]; then
echo "Usage: $0 <directory_path>"
exit 1
fi
# Assign the input directory to a variable
INPUT_DIR=$1
# Check if the directory exists
if [ ! -d "$INPUT_DIR" ]; then
echo "Error: Directory '$INPUT_DIR' does not exist."
exit 1
fi
# Create output file name based on the directory name
OUTPUT_FILE="$(basename "$INPUT_DIR")_contents.txt"
# Clear the output file if it exists
> "$OUTPUT_FILE"
# Function to process each file
process_file() {
local file="$1"
local rel_path="${file#$INPUT_DIR/}"
# Only process regular files (not directories, symlinks, etc.)
if [ -f "$file" ]; then
# Add a comment with the file name
echo -e "\n\n# ========== FILE: $rel_path ==========" >> "$OUTPUT_FILE"
# Check if the file is binary
if file "$file" | grep -q "binary"; then
echo "[Binary file - contents not included]" >> "$OUTPUT_FILE"
else
# Append the file contents
cat "$file" >> "$OUTPUT_FILE"
fi
fi
}
# Export the function so it can be used with find
export -f process_file
export INPUT_DIR
export OUTPUT_FILE
# Use find to recursively process all files
find "$INPUT_DIR" -type f -not -path "*/\.*" -exec bash -c 'process_file "$0"' {} \;
echo "All file contents have been written to $OUTPUT_FILE"