WannierDatasets / make_artifacts.jl
qiaojunfeng's picture
Upload to huggingface
19ed6cf
raw
history blame
4.76 kB
#!/usr/bin/env -S julia --project
# Script to generate `Artifacts.toml` and `artifacts/*.tar.gz`.
#
# For the 1st time running this script, you need to run
# using Pkg; Pkg.instantiate()
# to install the dependencies.
#
# Artifacts docs:
# https://pkgdocs.julialang.org/v1/artifacts/
#
using Tar, Inflate, SHA, TOML
using Dates: unix2datetime
"""
folder: e.g. `"Si2"`
local_url: use local file path for the url, for testing only
"""
function create_artifact(folder::AbstractString; local_url::Bool=false)
artifacts_dir = joinpath(@__DIR__, "artifacts")
datasets_dir = joinpath(@__DIR__, "datasets")
fullpath = joinpath(datasets_dir, folder)
isdir(fullpath) || return
mkpath(artifacts_dir)
# exclude the inputs folder which contains the input files for generating the datasets
tar_excludes = ["inputs", ".gitignore", "README.md"]
# gzip compression level, highest
GZIP = "-9"
# By default, use gzip
compress_prog = "gzip $GZIP"
# Try to use pigz for parallel compression.
# However, it is not available in github workflow (ubuntu-latest)
try
run(`which pigz`)
# -k: keep original files
global compress_prog = "pigz $GZIP -k"
catch
# pigz is not available
end
tar_cmd = [
Sys.isapple() ? "gtar" : "tar",
"--exclude-vcs",
"--exclude-vcs-ignores",
"--use-compress-program=$compress_prog",
]
append!(tar_cmd, ["--exclude=" * f for f in tar_excludes])
tar_name = "$(folder).tar.gz"
outpath = joinpath(artifacts_dir, tar_name)
prev_dir = pwd()
cd(fullpath) do
files = readdir()
# check w90 prefix is the same as the folder name, otherwise
# `Wannier.Datasets.load_dataset(folder)` won't work
"$(folder).win" in files || @warn "No $folder.win file in the folder $folder?"
run(Cmd(vcat(tar_cmd, ["-cvf", outpath], files)))
end
cd(prev_dir)
if local_url
# if you want to test locally
url = "file://$(outpath)"
else
# use huggingface `artifacts` branch to host the artifacts
url = "https://huggingface.co/datasets/atomology/WannierDatasets/blob/artifacts/$(tar_name)"
end
artifact_name = folder
return artifact_name => Dict(
"git-tree-sha1" => Tar.tree_hash(IOBuffer(inflate_gzip(outpath))),
"lazy" => true,
"download" =>
[Dict("url" => url, "sha256" => bytes2hex(open(sha256, outpath)))],
)
end
function git_hash(path::AbstractString; branch::AbstractString="HEAD")
return read(`git log -n 1 --pretty=format:"%H" $branch -- "$path"`, String)
end
function git_date(githash::AbstractString)
# get the date of the commit, unix timestamp
g = strip(read(`git show -s --format=%ct $githash`, String))
return unix2datetime(parse(Float64, g))
end
function list_previous_artifacts()
branch = "artifacts"
files = split(read(`git ls-tree -r --name-only $branch`, String))
filter!(f -> endswith(f, ".tar.gz"), files)
return Dict(f => git_hash(f; branch) for f in files)
end
function list_new_folders()
datasets_dir = joinpath(@__DIR__, "datasets")
prev_artifacts = list_previous_artifacts()
new_folders = String[]
for data in readdir(datasets_dir)
startswith(data, "_") && continue
startswith(data, ".") && continue
isdir(joinpath(datasets_dir, data)) || continue
dir_hash = git_hash(joinpath(basename(datasets_dir), data))
artifact_hash = get(prev_artifacts, "$(data).tar.gz", nothing)
# @info "dir_hash: $dir_hash, artifact_hash: $artifact_hash"
if !isnothing(artifact_hash)
# if date of dir_hash is older than artifact_hash
# then we do not need to update the artifact
(git_date(dir_hash) <= git_date(artifact_hash)) && continue
end
push!(new_folders, data)
end
return new_folders
end
function upload_artifacts(names::AbstractVector)
paths = map(names) do name
joinpath(@__DIR__, "artifacts", "$(name).tar.gz")
end
paths
end
function (@main)(args)
new_folders = list_new_folders()
toml_path = joinpath(@__DIR__, "Artifacts.toml")
if isfile(toml_path)
artifacts = TOML.parsefile(toml_path)
else
artifacts = Dict{String, Any}()
end
for folder in new_folders
artifact = create_artifact(folder)
# print the result as toml
buf = IOBuffer()
TOML.print(buf, Dict(artifact))
s = String(take!(buf))
@info "New artifact:\n$s"
push!(artifacts, artifact)
break
end
open(toml_path, "w") do io
TOML.print(io, artifacts)
end
upload_artifacts(new_folders)
end