qiaojunfeng commited on
Commit
19ed6cf
·
1 Parent(s): fd99ce0

Upload to huggingface

Browse files
Files changed (3) hide show
  1. .gitignore +28 -0
  2. Project.toml +14 -0
  3. make_artifacts.jl +122 -72
.gitignore ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Files generated by invoking Julia with --code-coverage
2
+ *.jl.cov
3
+ *.jl.*.cov
4
+
5
+ # Files generated by invoking Julia with --track-allocation
6
+ *.jl.mem
7
+
8
+ # System-specific files and directories generated by the BinaryProvider and BinDeps packages
9
+ # They contain absolute paths specific to the host computer, and so should not be committed
10
+ deps/deps.jl
11
+ deps/build.log
12
+ deps/downloads/
13
+ deps/usr/
14
+ deps/src/
15
+
16
+ # Build artifacts for creating documentation generated by the Documenter package
17
+ docs/build/
18
+ docs/site/
19
+
20
+ # File generated by Pkg, the package manager, based on a corresponding Project.toml
21
+ # It records a fixed state of all packages used by the project. As such, it should not be
22
+ # committed for packages, but should be committed for applications that require a static
23
+ # environment.
24
+ Manifest.toml
25
+
26
+ # Do not commit large (and redundant) tar.gz files, instead, I upload them
27
+ # to the `artifacts` branch
28
+ artifacts/*.tar.gz
Project.toml ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [deps]
2
+ Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
3
+ Inflate = "d25df0c9-e2be-5dd7-82c8-3ad0b3e990b9"
4
+ SHA = "ea8e919c-243c-51af-8825-aaa63cd721ce"
5
+ TOML = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
6
+ Tar = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e"
7
+
8
+ [compat]
9
+ Dates = "1"
10
+ Inflate = "0.1"
11
+ SHA = "0.7"
12
+ TOML = "1.0"
13
+ Tar = "1.10"
14
+ julia = "1.11"
make_artifacts.jl CHANGED
@@ -9,93 +9,69 @@
9
  # https://pkgdocs.julialang.org/v1/artifacts/
10
  #
11
  using Tar, Inflate, SHA, TOML
 
12
 
13
- # let Artifacts.toml point to local tarballs, otherwise point to GitHub releases
14
- LOCAL = false
15
- # if on local machine, I assume all the 7z files are already decompressed
16
- DECOMPRESS_7Z = true
17
-
18
- # check if we are running in github actions
19
- if isnothing(get(ENV, "GITHUB_ACTIONS", nothing))
20
- LOCAL = true
21
- DECOMPRESS_7Z = false
22
- else
23
- LOCAL = false
24
- DECOMPRESS_7Z = true
25
- end
26
 
27
- if DECOMPRESS_7Z
28
- PY_SCRIPT = joinpath(@__DIR__, "util/GitHub-ForceLargeFiles/src/reverse.py")
29
- run(
30
- Cmd([
31
- "python",
32
- PY_SCRIPT,
33
- # reverse.py will delete the 7z files by default
34
- "--delete_partitions",
35
- # workaround for python argparse: only empty string "" -> false
36
- "",
37
- "--root_dir",
38
- joinpath(@__DIR__, "datasets"),
39
- ]),
40
- )
41
- end
42
 
43
- artifacts = Dict()
44
-
45
- const datasets_dir = joinpath(@__DIR__, "datasets")
46
- const artifacts_dir = joinpath(@__DIR__, "artifacts")
47
- # exclude the generator folder which contains the inputs for generating the datasets
48
- const tar_excludes = ["generator", ".gitignore", "README.md", "*.7z.*"]
49
-
50
- # gzip compression level, highest
51
- const GZIP = "-9"
52
- # By default, use gzip
53
- compress_prog = "gzip $GZIP"
54
- # Try to use pigz for parallel compression.
55
- # However, it is not available in github workflow (ubuntu-latest)
56
- try
57
- run(`which pigz`)
58
- # -k: keep original files
59
- global compress_prog = "pigz $GZIP -k"
60
- catch
61
- # pigz is not available
62
- end
63
 
64
- TAR_CMD = [
65
- "tar",
66
- "--exclude-vcs",
67
- "--exclude-vcs-ignores",
68
- "--use-compress-program=$compress_prog",
69
- ]
70
- append!(TAR_CMD, ["--exclude=" * f for f in tar_excludes])
71
 
72
- mkpath(artifacts_dir)
 
 
 
 
 
 
 
 
 
 
 
 
73
 
74
- for data in readdir(datasets_dir)
75
- startswith(data, "_") && continue
76
- fullpath = joinpath(datasets_dir, data)
77
- isdir(fullpath) || continue
 
 
 
78
 
79
- tar_name = "$(data).tar.gz"
80
  outpath = joinpath(artifacts_dir, tar_name)
 
81
  cd(fullpath) do
82
  files = readdir()
83
- run(Cmd(vcat(TAR_CMD, ["-cvf", outpath], files)))
 
 
 
 
84
  end
 
85
 
86
- if LOCAL
87
  # if you want to test locally
88
  url = "file://$(outpath)"
89
  else
90
- # use github release to host the artifacts
91
- # https://docs.github.com/en/repositories/releasing-projects-on-github/linking-to-releases
92
- # 2GB limit per file, no limit on total size, no bandwidth limit
93
- # https://docs.github.com/en/repositories/releasing-projects-on-github/about-releases
94
- url = "https://github.com/qiaojunfeng/WannierDatasets/releases/latest/download/$(tar_name)"
95
  end
96
 
97
- artifact_name = data
98
- artifacts[artifact_name] = Dict(
99
  "git-tree-sha1" => Tar.tree_hash(IOBuffer(inflate_gzip(outpath))),
100
  "lazy" => true,
101
  "download" =>
@@ -103,6 +79,80 @@ for data in readdir(datasets_dir)
103
  )
104
  end
105
 
106
- open("Artifacts.toml", "w") do io
107
- TOML.print(io, artifacts)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
108
  end
 
9
  # https://pkgdocs.julialang.org/v1/artifacts/
10
  #
11
  using Tar, Inflate, SHA, TOML
12
+ using Dates: unix2datetime
13
 
14
+ """
15
+ folder: e.g. `"Si2"`
16
+ local_url: use local file path for the url, for testing only
17
+ """
18
+ function create_artifact(folder::AbstractString; local_url::Bool=false)
19
+ artifacts_dir = joinpath(@__DIR__, "artifacts")
20
+ datasets_dir = joinpath(@__DIR__, "datasets")
 
 
 
 
 
 
21
 
22
+ fullpath = joinpath(datasets_dir, folder)
23
+ isdir(fullpath) || return
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
+ mkpath(artifacts_dir)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
+ # exclude the inputs folder which contains the input files for generating the datasets
28
+ tar_excludes = ["inputs", ".gitignore", "README.md"]
 
 
 
 
 
29
 
30
+ # gzip compression level, highest
31
+ GZIP = "-9"
32
+ # By default, use gzip
33
+ compress_prog = "gzip $GZIP"
34
+ # Try to use pigz for parallel compression.
35
+ # However, it is not available in github workflow (ubuntu-latest)
36
+ try
37
+ run(`which pigz`)
38
+ # -k: keep original files
39
+ global compress_prog = "pigz $GZIP -k"
40
+ catch
41
+ # pigz is not available
42
+ end
43
 
44
+ tar_cmd = [
45
+ Sys.isapple() ? "gtar" : "tar",
46
+ "--exclude-vcs",
47
+ "--exclude-vcs-ignores",
48
+ "--use-compress-program=$compress_prog",
49
+ ]
50
+ append!(tar_cmd, ["--exclude=" * f for f in tar_excludes])
51
 
52
+ tar_name = "$(folder).tar.gz"
53
  outpath = joinpath(artifacts_dir, tar_name)
54
+ prev_dir = pwd()
55
  cd(fullpath) do
56
  files = readdir()
57
+ # check w90 prefix is the same as the folder name, otherwise
58
+ # `Wannier.Datasets.load_dataset(folder)` won't work
59
+ "$(folder).win" in files || @warn "No $folder.win file in the folder $folder?"
60
+
61
+ run(Cmd(vcat(tar_cmd, ["-cvf", outpath], files)))
62
  end
63
+ cd(prev_dir)
64
 
65
+ if local_url
66
  # if you want to test locally
67
  url = "file://$(outpath)"
68
  else
69
+ # use huggingface `artifacts` branch to host the artifacts
70
+ url = "https://huggingface.co/datasets/atomology/WannierDatasets/blob/artifacts/$(tar_name)"
 
 
 
71
  end
72
 
73
+ artifact_name = folder
74
+ return artifact_name => Dict(
75
  "git-tree-sha1" => Tar.tree_hash(IOBuffer(inflate_gzip(outpath))),
76
  "lazy" => true,
77
  "download" =>
 
79
  )
80
  end
81
 
82
+ function git_hash(path::AbstractString; branch::AbstractString="HEAD")
83
+ return read(`git log -n 1 --pretty=format:"%H" $branch -- "$path"`, String)
84
+ end
85
+
86
+ function git_date(githash::AbstractString)
87
+ # get the date of the commit, unix timestamp
88
+ g = strip(read(`git show -s --format=%ct $githash`, String))
89
+ return unix2datetime(parse(Float64, g))
90
+ end
91
+
92
+ function list_previous_artifacts()
93
+ branch = "artifacts"
94
+ files = split(read(`git ls-tree -r --name-only $branch`, String))
95
+ filter!(f -> endswith(f, ".tar.gz"), files)
96
+ return Dict(f => git_hash(f; branch) for f in files)
97
+ end
98
+
99
+ function list_new_folders()
100
+ datasets_dir = joinpath(@__DIR__, "datasets")
101
+
102
+ prev_artifacts = list_previous_artifacts()
103
+
104
+ new_folders = String[]
105
+ for data in readdir(datasets_dir)
106
+ startswith(data, "_") && continue
107
+ startswith(data, ".") && continue
108
+ isdir(joinpath(datasets_dir, data)) || continue
109
+
110
+ dir_hash = git_hash(joinpath(basename(datasets_dir), data))
111
+ artifact_hash = get(prev_artifacts, "$(data).tar.gz", nothing)
112
+ # @info "dir_hash: $dir_hash, artifact_hash: $artifact_hash"
113
+ if !isnothing(artifact_hash)
114
+ # if date of dir_hash is older than artifact_hash
115
+ # then we do not need to update the artifact
116
+ (git_date(dir_hash) <= git_date(artifact_hash)) && continue
117
+ end
118
+ push!(new_folders, data)
119
+ end
120
+ return new_folders
121
+ end
122
+
123
+ function upload_artifacts(names::AbstractVector)
124
+ paths = map(names) do name
125
+ joinpath(@__DIR__, "artifacts", "$(name).tar.gz")
126
+ end
127
+ paths
128
+ end
129
+
130
+ function (@main)(args)
131
+ new_folders = list_new_folders()
132
+
133
+ toml_path = joinpath(@__DIR__, "Artifacts.toml")
134
+ if isfile(toml_path)
135
+ artifacts = TOML.parsefile(toml_path)
136
+ else
137
+ artifacts = Dict{String, Any}()
138
+ end
139
+
140
+ for folder in new_folders
141
+ artifact = create_artifact(folder)
142
+
143
+ # print the result as toml
144
+ buf = IOBuffer()
145
+ TOML.print(buf, Dict(artifact))
146
+ s = String(take!(buf))
147
+ @info "New artifact:\n$s"
148
+
149
+ push!(artifacts, artifact)
150
+ break
151
+ end
152
+
153
+ open(toml_path, "w") do io
154
+ TOML.print(io, artifacts)
155
+ end
156
+
157
+ upload_artifacts(new_folders)
158
  end