#!/usr/bin/env bash # this script generates compressed blobs for a model into huggingface's cache, to be transferred elsewhere and decompressed # the size is reduced by about 22% LEVEL=-4 #LEVEL=-9 #LEVEL=--adapt=min=1,max=22 PV="pv -cpteabm $((60*60*24))" #PV="pv -cpteab" model_id="$1" revision="$2" repo_url=https://huggingface.co/"$model_id" clonepath="$(basename "$repo_url")" cachepath="${HOME}/.cache/huggingface/hub/models--${model_id//\//--}/blobs" if ! [ -e ${HOME}/.cache/huggingface/token ] then huggingface-cli login || exit -1 fi HF_TOKEN="$(<${HOME}/.cache/huggingface/token)" GIT_LFS_SKIP_SMUDGE=1 git clone https://huggingface.co/"$1" "$clonepath" mkdir -p "$cachepath" cd "$clonepath" git checkout "$revision" || exit -1 filecount=$(ls *.safetensors | wc -l) for file in *.safetensors do file_url="$repo_url"/resolve/"$revision"/"$file" export $(cat "$file" | tr \ =) # sets oid and size blobname=${oid##*:} outfile="$cachepath"/"$blobname".zst if [ -e "$outfile" ]; then zstd -l "$outfile" && { echo "$file" 1>&3; continue; } echo echo "# Recompressing $file #" fi echo { curl -s -L -H "Authorization: Bearer $HF_TOKEN" "$file_url" || exit -1; } | $PV -s "$size" -N "$file" | zstd -q --no-progress --force --size-hint="$size" --stream-size="$size" --ultra --long=31 --memory=2048MB --threads=1024 "$LEVEL" | $PV -s "$((size * 100/129))" -N "$blobname".zst > "$outfile" zstd -l "$outfile" echo echo echo "$file" 1>&3 done 3>&1 1>&2 | $PV -l -s "$filecount" -N "$clonepath"