#!/usr/bin/env -S julia --project # Script to generate `Artifacts.toml` and `artifacts/*.tar.gz`. # # For the 1st time running this script, you need to run # using Pkg; Pkg.instantiate() # to install the dependencies. # # Artifacts docs: # https://pkgdocs.julialang.org/v1/artifacts/ # using Pkg Pkg.instantiate() using Tar, Inflate, SHA, TOML using Dates: unix2datetime const artifacts_dir = joinpath(@__DIR__, "artifacts") const datasets_dir = joinpath(@__DIR__, "datasets") const deploy_branch = "artifacts" """ Create a artifact tarball for a folder. # Arguments - `folder`: subfolders in `datasets/` folder, e.g. `"Si2"` - `local_url`: use local file path for the url, for testing only """ function create_artifact(folder::AbstractString; local_url::Bool = false) fullpath = joinpath(datasets_dir, folder) isdir(fullpath) || return mkpath(artifacts_dir) # exclude the inputs folder which contains the input files for generating the datasets tar_excludes = ["inputs", ".gitignore", "README.md"] # gzip compression level, highest GZIP = "-9" # By default, use gzip compress_prog = "gzip $GZIP" # Try to use pigz for parallel compression. # However, it is not available in github workflow (ubuntu-latest) try run(`which pigz`) # -k: keep original files global compress_prog = "pigz $GZIP -k" catch # pigz is not available end tar_cmd = [ Sys.isapple() ? "gtar" : "tar", "--exclude-vcs", "--exclude-vcs-ignores", "--use-compress-program=$compress_prog", ] append!(tar_cmd, ["--exclude=" * f for f in tar_excludes]) tar_name = "$(folder).tar.gz" outpath = joinpath(artifacts_dir, tar_name) prev_dir = pwd() cd(fullpath) do files = readdir() # check w90 prefix is the same as the folder name, otherwise # `Wannier.Datasets.load_dataset(folder)` won't work "$(folder).win" in files || @warn "No $folder.win file in the folder $folder?" run(Cmd(vcat(tar_cmd, ["-cvf", outpath], files))) end cd(prev_dir) if local_url # if you want to test locally url = "file://$(outpath)" else # use huggingface `artifacts` branch to host the artifacts url = "https://huggingface.co/datasets/atomology/WannierDatasets/resolve/artifacts/$(tar_name)" end artifact_name = folder res = artifact_name => Dict( "git-tree-sha1" => Tar.tree_hash(IOBuffer(inflate_gzip(outpath))), "lazy" => true, "download" => [Dict("url" => url, "sha256" => bytes2hex(open(sha256, outpath)))], ) # print the result as toml buf = IOBuffer() TOML.print(buf, Dict(res)) s = String(take!(buf)) @info "New artifact:\n$s" return res end """ Get the git hash of the latest commit of a file. """ function git_hash(path::AbstractString; branch::AbstractString = "HEAD") return read(`git log -n 1 --pretty=format:"%H" $branch -- "$path"`, String) end """ Get the date of a git commit. """ function git_date(githash::AbstractString) # get the date of the commit, unix timestamp g = strip(read(`git show -s --format=%ct $githash`, String)) return unix2datetime(parse(Float64, g)) end """ Get list of artifact tarballs in the deploy branch. """ function list_previous_artifacts() # if this command fails with the error # `fatal: Not a valid object name artifacts` # you need to run at least once # `git checkout artifacts` # to create the artifacts branch files = split(read(`git ls-tree -r --name-only $deploy_branch`, String)) filter!(f -> endswith(f, ".tar.gz"), files) return Dict(f => git_hash(f; branch = deploy_branch) for f in files) end """ Get folder names in `datasets/` that are newer than the corresponding tarball. """ function list_new_folders() prev_artifacts = list_previous_artifacts() new_folders = String[] for data in readdir(datasets_dir) startswith(data, "_") && continue startswith(data, ".") && continue isdir(joinpath(datasets_dir, data)) || continue dir_hash = git_hash(joinpath(basename(datasets_dir), data)) artifact_hash = get(prev_artifacts, "$(data).tar.gz", nothing) # @info "dir_hash: $dir_hash, artifact_hash: $artifact_hash" if !isnothing(artifact_hash) # if date of dir_hash is older than artifact_hash # then we do not need to update the artifact (git_date(dir_hash) <= git_date(artifact_hash)) && continue end push!(new_folders, data) end return new_folders end """ Remove the `artifacts_dir` folder and create a new one. """ function clean_artifacts_dir() print("I will clean the `$(artifacts_dir)` folder [y/N]: ") n = readline() if lowercase(n) != "y" println("Aborting...") return false end rm(artifacts_dir; force = true, recursive = true) mkpath(artifacts_dir) println("Cleaned `$(artifacts_dir)` folder.") return true end """ Get previous Artifacts.toml from deploy branch. """ function read_artifact_toml() return try content = read(`git show $deploy_branch:Artifacts.toml`) return TOML.parse(String(content)) catch e return Dict{String,Any}() end end function (@main)(args) artifacts = read_artifact_toml() new_folders = list_new_folders() if isempty(new_folders) println("No new folders to process.") return end if !("dryrun" in args) clean_artifacts_dir() || return for folder in new_folders artifact = create_artifact(folder) # existing entries will be replaced push!(artifacts, artifact) # break end open("Artifacts.toml", "w") do io TOML.print(io, artifacts) end end end