-
-
Notifications
You must be signed in to change notification settings - Fork 270
/
API.jl
2379 lines (2190 loc) · 98 KB
/
API.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# This file is a part of Julia. License is MIT: https://julialang.org/license
module API
using UUIDs
using Printf
import Random
using Dates
import LibGit2
import Logging
using Serialization
using REPL.TerminalMenus
import FileWatching
import Base: StaleCacheKey
import ..depots, ..depots1, ..logdir, ..devdir, ..printpkgstyle
import ..Operations, ..GitTools, ..Pkg, ..Registry
import ..can_fancyprint, ..pathrepr, ..isurl, ..PREV_ENV_PATH
using ..Types, ..TOML
using ..Types: VersionTypes
using Base.BinaryPlatforms
import ..stderr_f, ..stdout_f
using ..Artifacts: artifact_paths
using ..MiniProgressBars
import ..Resolve: ResolverError
include("generate.jl")
Base.@kwdef struct PackageInfo
name::String
version::Union{Nothing,VersionNumber}
tree_hash::Union{Nothing,String}
is_direct_dep::Bool
is_pinned::Bool
is_tracking_path::Bool
is_tracking_repo::Bool
is_tracking_registry::Bool
git_revision::Union{Nothing,String}
git_source::Union{Nothing,String}
source::String
dependencies::Dict{String,UUID}
end
function Base.:(==)(a::PackageInfo, b::PackageInfo)
return a.name == b.name && a.version == b.version && a.tree_hash == b.tree_hash &&
a.is_direct_dep == b.is_direct_dep &&
a.is_pinned == b.is_pinned && a.is_tracking_path == b.is_tracking_path &&
a.is_tracking_repo == b.is_tracking_repo &&
a.is_tracking_registry == b.is_tracking_registry &&
a.git_revision == b.git_revision && a.git_source == b.git_source &&
a.source == b.source && a.dependencies == b.dependencies
end
function package_info(env::EnvCache, pkg::PackageSpec)::PackageInfo
entry = manifest_info(env.manifest, pkg.uuid)
if entry === nothing
pkgerror("expected package $(err_rep(pkg)) to exist in the manifest",
" (use `resolve` to populate the manifest)")
end
package_info(env, pkg, entry)
end
function package_info(env::EnvCache, pkg::PackageSpec, entry::PackageEntry)::PackageInfo
git_source = pkg.repo.source === nothing ? nothing :
isurl(pkg.repo.source::String) ? pkg.repo.source::String :
Operations.project_rel_path(env, pkg.repo.source::String)
info = PackageInfo(
name = pkg.name,
version = pkg.version != VersionSpec() ? pkg.version : nothing,
tree_hash = pkg.tree_hash === nothing ? nothing : string(pkg.tree_hash), # TODO or should it just be a SHA?
is_direct_dep = pkg.uuid in values(env.project.deps),
is_pinned = pkg.pinned,
is_tracking_path = pkg.path !== nothing,
is_tracking_repo = pkg.repo.rev !== nothing || pkg.repo.source !== nothing,
is_tracking_registry = Operations.is_tracking_registry(pkg),
git_revision = pkg.repo.rev,
git_source = git_source,
source = Operations.project_rel_path(env, Operations.source_path(env.manifest_file, pkg)),
dependencies = copy(entry.deps), #TODO is copy needed?
)
return info
end
dependencies() = dependencies(EnvCache())
function dependencies(env::EnvCache)
pkgs = Operations.load_all_deps(env)
return Dict(pkg.uuid::UUID => package_info(env, pkg) for pkg in pkgs)
end
function dependencies(fn::Function, uuid::UUID)
dep = get(dependencies(), uuid, nothing)
if dep === nothing
pkgerror("dependency with UUID `$uuid` does not exist")
end
fn(dep)
end
Base.@kwdef struct ProjectInfo
name::Union{Nothing,String}
uuid::Union{Nothing,UUID}
version::Union{Nothing,VersionNumber}
ispackage::Bool
dependencies::Dict{String,UUID}
path::String
end
project() = project(EnvCache())
function project(env::EnvCache)::ProjectInfo
pkg = env.pkg
return ProjectInfo(
name = pkg === nothing ? nothing : pkg.name,
uuid = pkg === nothing ? nothing : pkg.uuid,
version = pkg === nothing ? nothing : pkg.version::VersionNumber,
ispackage = pkg !== nothing,
dependencies = env.project.deps,
path = env.project_file
)
end
function check_package_name(x::AbstractString, mode::Union{Nothing,String,Symbol}=nothing)
if !Base.isidentifier(x)
message = sprint() do iostr
print(iostr, "`$x` is not a valid package name")
if endswith(lowercase(x), ".jl")
print(iostr, ". Perhaps you meant `$(chop(x; tail=3))`")
end
if mode !== nothing && any(occursin.(['\\','/'], x)) # maybe a url or a path
print(iostr, "\nThe argument appears to be a URL or path, perhaps you meant ",
"`Pkg.$mode(url=\"...\")` or `Pkg.$mode(path=\"...\")`.")
end
end
pkgerror(message)
end
return
end
check_package_name(::Nothing, ::Any) = nothing
function require_not_empty(pkgs, f::Symbol)
isempty(pkgs) && pkgerror("$f requires at least one package")
end
# Provide some convenience calls
for f in (:develop, :add, :rm, :up, :pin, :free, :test, :build, :status, :why, :precompile)
@eval begin
$f(pkg::Union{AbstractString, PackageSpec}; kwargs...) = $f([pkg]; kwargs...)
$f(pkgs::Vector{<:AbstractString}; kwargs...) = $f([PackageSpec(pkg) for pkg in pkgs]; kwargs...)
function $f(pkgs::Vector{PackageSpec}; io::IO=$(f === :status ? :stdout_f : :stderr_f)(), kwargs...)
$(f != :precompile) && Registry.download_default_registries(io)
ctx = Context()
# Save initial environment for undo/redo functionality
if $(f != :precompile) && !saved_initial_snapshot[]
add_snapshot_to_undo(ctx.env)
saved_initial_snapshot[] = true
end
kwargs = merge((;kwargs...), (:io => io,))
pkgs = deepcopy(pkgs) # don't mutate input
foreach(handle_package_input!, pkgs)
ret = $f(ctx, pkgs; kwargs...)
$(f in (:add, :up, :pin, :free, :build)) && Pkg._auto_precompile(ctx)
$(f in (:up, :pin, :free, :rm)) && Pkg._auto_gc(ctx)
return ret
end
$f(ctx::Context; kwargs...) = $f(ctx, PackageSpec[]; kwargs...)
function $f(; name::Union{Nothing,AbstractString}=nothing, uuid::Union{Nothing,String,UUID}=nothing,
version::Union{VersionNumber, String, VersionSpec, Nothing}=nothing,
url=nothing, rev=nothing, path=nothing, mode=PKGMODE_PROJECT, subdir=nothing, kwargs...)
pkg = PackageSpec(; name, uuid, version, url, rev, path, subdir)
if $f === status || $f === rm || $f === up
kwargs = merge((;kwargs...), (:mode => mode,))
end
# Handle $f() case
if all(isnothing, [name,uuid,version,url,rev,path,subdir])
$f(PackageSpec[]; kwargs...)
else
$f(pkg; kwargs...)
end
end
function $f(pkgs::Vector{<:NamedTuple}; kwargs...)
$f([PackageSpec(;pkg...) for pkg in pkgs]; kwargs...)
end
end
end
function develop(ctx::Context, pkgs::Vector{PackageSpec}; shared::Bool=true,
preserve::PreserveLevel=Operations.default_preserve(), platform::AbstractPlatform=HostPlatform(), kwargs...)
require_not_empty(pkgs, :develop)
Context!(ctx; kwargs...)
for pkg in pkgs
check_package_name(pkg.name, "develop")
if pkg.name == "julia" # if julia is passed as a package the solver gets tricked
pkgerror("`julia` is not a valid package name")
end
if pkg.name === nothing && pkg.uuid === nothing && pkg.repo.source === nothing
pkgerror("name, UUID, URL, or filesystem path specification required when calling `develop`")
end
if pkg.repo.rev !== nothing
pkgerror("rev argument not supported by `develop`; consider using `add` instead")
end
if pkg.version != VersionSpec()
pkgerror("version specification invalid when calling `develop`:",
" `$(pkg.version)` specified for package $(err_rep(pkg))")
end
# not strictly necessary to check these fields early, but it is more efficient
if pkg.name !== nothing && (length(findall(x -> x.name == pkg.name, pkgs)) > 1)
pkgerror("it is invalid to specify multiple packages with the same name: $(err_rep(pkg))")
end
if pkg.uuid !== nothing && (length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1)
pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))")
end
end
new_git = handle_repos_develop!(ctx, pkgs, shared)
for pkg in pkgs
if Types.collides_with_project(ctx.env, pkg)
pkgerror("package $(err_rep(pkg)) has the same name or UUID as the active project")
end
if length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1
pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))")
end
end
Operations.develop(ctx, pkgs, new_git; preserve=preserve, platform=platform)
return
end
function add(ctx::Context, pkgs::Vector{PackageSpec}; preserve::PreserveLevel=Operations.default_preserve(),
platform::AbstractPlatform=HostPlatform(), kwargs...)
require_not_empty(pkgs, :add)
Context!(ctx; kwargs...)
for pkg in pkgs
check_package_name(pkg.name, "add")
if pkg.name == "julia" # if julia is passed as a package the solver gets tricked
pkgerror("`julia` is not a valid package name")
end
if pkg.name === nothing && pkg.uuid === nothing && pkg.repo.source === nothing
pkgerror("name, UUID, URL, or filesystem path specification required when calling `add`")
end
if pkg.repo.source !== nothing || pkg.repo.rev !== nothing
if pkg.version != VersionSpec()
pkgerror("version specification invalid when tracking a repository:",
" `$(pkg.version)` specified for package $(err_rep(pkg))")
end
end
# not strictly necessary to check these fields early, but it is more efficient
if pkg.name !== nothing && (length(findall(x -> x.name == pkg.name, pkgs)) > 1)
pkgerror("it is invalid to specify multiple packages with the same name: $(err_rep(pkg))")
end
if pkg.uuid !== nothing && (length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1)
pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))")
end
end
repo_pkgs = [pkg for pkg in pkgs if (pkg.repo.source !== nothing || pkg.repo.rev !== nothing)]
new_git = handle_repos_add!(ctx, repo_pkgs)
# repo + unpinned -> name, uuid, repo.rev, repo.source, tree_hash
# repo + pinned -> name, uuid, tree_hash
Operations.update_registries(ctx; force=false, update_cooldown=Day(1))
project_deps_resolve!(ctx.env, pkgs)
registry_resolve!(ctx.registries, pkgs)
stdlib_resolve!(pkgs)
ensure_resolved(ctx, ctx.env.manifest, pkgs, registry=true)
for pkg in pkgs
if Types.collides_with_project(ctx.env, pkg)
pkgerror("package $(err_rep(pkg)) has same name or UUID as the active project")
end
if length(findall(x -> x.uuid == pkg.uuid, pkgs)) > 1
pkgerror("it is invalid to specify multiple packages with the same UUID: $(err_rep(pkg))")
end
end
Operations.add(ctx, pkgs, new_git; preserve, platform)
return
end
function rm(ctx::Context, pkgs::Vector{PackageSpec}; mode=PKGMODE_PROJECT, all_pkgs::Bool=false, kwargs...)
Context!(ctx; kwargs...)
if all_pkgs
!isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages")
append_all_pkgs!(pkgs, ctx, mode)
else
require_not_empty(pkgs, :rm)
end
for pkg in pkgs
if pkg.name === nothing && pkg.uuid === nothing
pkgerror("name or UUID specification required when calling `rm`")
end
if !(pkg.version == VersionSpec() && pkg.pinned == false &&
pkg.tree_hash === nothing && pkg.repo.source === nothing &&
pkg.repo.rev === nothing && pkg.path === nothing)
pkgerror("packages may only be specified by name or UUID when calling `rm`")
end
end
mode == PKGMODE_PROJECT && project_deps_resolve!(ctx.env, pkgs)
mode == PKGMODE_MANIFEST && manifest_resolve!(ctx.env.manifest, pkgs)
ensure_resolved(ctx, ctx.env.manifest, pkgs)
Operations.rm(ctx, pkgs; mode)
return
end
function append_all_pkgs!(pkgs, ctx, mode)
if mode == PKGMODE_PROJECT || mode == PKGMODE_COMBINED
for (name::String, uuid::UUID) in ctx.env.project.deps
push!(pkgs, PackageSpec(name=name, uuid=uuid))
end
end
if mode == PKGMODE_MANIFEST || mode == PKGMODE_COMBINED
for (uuid, entry) in ctx.env.manifest
push!(pkgs, PackageSpec(name=entry.name, uuid=uuid))
end
end
return
end
function up(ctx::Context, pkgs::Vector{PackageSpec};
level::UpgradeLevel=UPLEVEL_MAJOR, mode::PackageMode=PKGMODE_PROJECT,
preserve::Union{Nothing,PreserveLevel}= isempty(pkgs) ? nothing : PRESERVE_ALL,
update_registry::Bool=true,
skip_writing_project::Bool=false,
kwargs...)
Context!(ctx; kwargs...)
if Operations.is_fully_pinned(ctx)
printpkgstyle(ctx.io, :Update, "All dependencies are pinned - nothing to update.", color = Base.info_color())
return
end
if update_registry
Registry.download_default_registries(ctx.io)
Operations.update_registries(ctx; force=true)
end
Operations.prune_manifest(ctx.env)
if isempty(pkgs)
append_all_pkgs!(pkgs, ctx, mode)
else
mode == PKGMODE_PROJECT && project_deps_resolve!(ctx.env, pkgs)
mode == PKGMODE_MANIFEST && manifest_resolve!(ctx.env.manifest, pkgs)
project_deps_resolve!(ctx.env, pkgs)
manifest_resolve!(ctx.env.manifest, pkgs)
ensure_resolved(ctx, ctx.env.manifest, pkgs)
end
Operations.up(ctx, pkgs, level; skip_writing_project, preserve)
return
end
resolve(; io::IO=stderr_f(), kwargs...) = resolve(Context(;io); kwargs...)
function resolve(ctx::Context; skip_writing_project::Bool=false, kwargs...)
up(ctx; level=UPLEVEL_FIXED, mode=PKGMODE_MANIFEST, update_registry=false, skip_writing_project, kwargs...)
return nothing
end
function pin(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwargs...)
Context!(ctx; kwargs...)
if all_pkgs
!isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages")
append_all_pkgs!(pkgs, ctx, PKGMODE_MANIFEST)
else
require_not_empty(pkgs, :pin)
end
for pkg in pkgs
if pkg.name === nothing && pkg.uuid === nothing
pkgerror("name or UUID specification required when calling `pin`")
end
if pkg.repo.source !== nothing
pkgerror("repository specification invalid when calling `pin`:",
" `$(pkg.repo.source)` specified for package $(err_rep(pkg))")
end
if pkg.repo.rev !== nothing
pkgerror("git revision specification invalid when calling `pin`:",
" `$(pkg.repo.rev)` specified for package $(err_rep(pkg))")
end
version = pkg.version
if version isa VersionSpec
if version.ranges[1].lower != version.ranges[1].upper # TODO test this
pkgerror("pinning a package requires a single version, not a versionrange")
end
end
end
project_deps_resolve!(ctx.env, pkgs)
ensure_resolved(ctx, ctx.env.manifest, pkgs)
Operations.pin(ctx, pkgs)
return
end
function free(ctx::Context, pkgs::Vector{PackageSpec}; all_pkgs::Bool=false, kwargs...)
Context!(ctx; kwargs...)
if all_pkgs
!isempty(pkgs) && pkgerror("cannot specify packages when operating on all packages")
append_all_pkgs!(pkgs, ctx, PKGMODE_MANIFEST)
else
require_not_empty(pkgs, :free)
end
for pkg in pkgs
if pkg.name === nothing && pkg.uuid === nothing
pkgerror("name or UUID specification required when calling `free`")
end
if !(pkg.version == VersionSpec() && pkg.pinned == false &&
pkg.tree_hash === nothing && pkg.repo.source === nothing &&
pkg.repo.rev === nothing && pkg.path === nothing)
pkgerror("packages may only be specified by name or UUID when calling `free`")
end
end
manifest_resolve!(ctx.env.manifest, pkgs)
ensure_resolved(ctx, ctx.env.manifest, pkgs)
Operations.free(ctx, pkgs; err_if_free = !all_pkgs)
return
end
function test(ctx::Context, pkgs::Vector{PackageSpec};
coverage=false, test_fn=nothing,
julia_args::Union{Cmd, AbstractVector{<:AbstractString}}=``,
test_args::Union{Cmd, AbstractVector{<:AbstractString}}=``,
force_latest_compatible_version::Bool=false,
allow_earlier_backwards_compatible_versions::Bool=true,
allow_reresolve::Bool=true,
kwargs...)
julia_args = Cmd(julia_args)
test_args = Cmd(test_args)
Context!(ctx; kwargs...)
if isempty(pkgs)
ctx.env.pkg === nothing && pkgerror("trying to test unnamed project") #TODO Allow this?
push!(pkgs, ctx.env.pkg)
else
project_resolve!(ctx.env, pkgs)
project_deps_resolve!(ctx.env, pkgs)
manifest_resolve!(ctx.env.manifest, pkgs)
ensure_resolved(ctx, ctx.env.manifest, pkgs)
end
Operations.test(
ctx,
pkgs;
coverage,
test_fn,
julia_args,
test_args,
force_latest_compatible_version,
allow_earlier_backwards_compatible_versions,
allow_reresolve,
)
return
end
is_manifest_current(ctx::Context = Context()) = Operations.is_manifest_current(ctx.env)
const UsageDict = Dict{String,DateTime}
const UsageByDepotDict = Dict{String,UsageDict}
"""
gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, kwargs...)
Garbage-collect package and artifact installations by sweeping over all known
`Manifest.toml` and `Artifacts.toml` files, noting those that have been deleted, and then
finding artifacts and packages that are thereafter not used by any other projects,
marking them as "orphaned". This method will only remove orphaned objects (package
versions, artifacts, and scratch spaces) that have been continually un-used for a period
of `collect_delay`; which defaults to seven days.
Garbage collection is only applied to the "user depot", e.g. the first entry in the
depot path. If you want to run `gc` on all depots set `force=true` (this might require
admin privileges depending on the setup).
Use verbose mode (`verbose=true`) for detailed output.
"""
function gc(ctx::Context=Context(); collect_delay::Period=Day(7), verbose=false, force=false, kwargs...)
Context!(ctx; kwargs...)
env = ctx.env
# Only look at user-depot unless force=true
gc_depots = force ? depots() : [depots1()]
# First, we load in our `manifest_usage.toml` files which will tell us when our
# "index files" (`Manifest.toml`, `Artifacts.toml`) were last used. We will combine
# this knowledge across depots, condensing it all down to a single entry per extant
# index file, to manage index file growth with would otherwise continue unbounded. We
# keep the lists of index files separated by depot so that we can write back condensed
# versions that are only ever subsets of what we read out of them in the first place.
# Collect last known usage dates of manifest and artifacts toml files, split by depot
manifest_usage_by_depot = UsageByDepotDict()
artifact_usage_by_depot = UsageByDepotDict()
# Collect both last known usage dates, as well as parent projects for each scratch space
scratch_usage_by_depot = UsageByDepotDict()
scratch_parents_by_depot = Dict{String, Dict{String, Set{String}}}()
# Load manifest files from all depots
for depot in gc_depots
# When a manifest/artifact.toml is installed/used, we log it within the
# `manifest_usage.toml` files within `write_env_usage()` and `bind_artifact!()`
function reduce_usage!(f::Function, usage_filepath)
if !isfile(usage_filepath)
return
end
for (filename, infos) in parse_toml(usage_filepath)
f.(Ref(filename), infos)
end
end
# Extract usage data from this depot, (taking only the latest state for each
# tracked manifest/artifact.toml), then merge the usage values from each file
# into the overall list across depots to create a single, coherent view across
# all depots.
usage = UsageDict()
let usage=usage
reduce_usage!(joinpath(logdir(depot), "manifest_usage.toml")) do filename, info
# For Manifest usage, store only the last DateTime for each filename found
usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime)
end
end
manifest_usage_by_depot[depot] = usage
usage = UsageDict()
let usage=usage
reduce_usage!(joinpath(logdir(depot), "artifact_usage.toml")) do filename, info
# For Artifact usage, store only the last DateTime for each filename found
usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime)
end
end
artifact_usage_by_depot[depot] = usage
# track last-used
usage = UsageDict()
parents = Dict{String, Set{String}}()
let usage=usage
reduce_usage!(joinpath(logdir(depot), "scratch_usage.toml")) do filename, info
# For Artifact usage, store only the last DateTime for each filename found
usage[filename] = max(get(usage, filename, DateTime(0)), DateTime(info["time"])::DateTime)
if !haskey(parents, filename)
parents[filename] = Set{String}()
end
for parent in info["parent_projects"]
push!(parents[filename], parent)
end
end
end
scratch_usage_by_depot[depot] = usage
scratch_parents_by_depot[depot] = parents
end
# Next, figure out which files are still existent
all_manifest_tomls = unique(f for (_, files) in manifest_usage_by_depot for f in keys(files))
all_artifact_tomls = unique(f for (_, files) in artifact_usage_by_depot for f in keys(files))
all_scratch_dirs = unique(f for (_, dirs) in scratch_usage_by_depot for f in keys(dirs))
all_scratch_parents = Set{String}()
for (depot, parents) in scratch_parents_by_depot
for parent in values(parents)
union!(all_scratch_parents, parent)
end
end
all_manifest_tomls = Set(filter(Pkg.isfile_nothrow, all_manifest_tomls))
all_artifact_tomls = Set(filter(Pkg.isfile_nothrow, all_artifact_tomls))
all_scratch_dirs = Set(filter(Pkg.isdir_nothrow, all_scratch_dirs))
all_scratch_parents = Set(filter(Pkg.isfile_nothrow, all_scratch_parents))
# Immediately write these back as condensed toml files
function write_condensed_toml(f::Function, usage_by_depot, fname)
for (depot, usage) in usage_by_depot
# Run through user-provided filter/condenser
usage = f(depot, usage)
# Write out the TOML file for this depot
usage_path = joinpath(logdir(depot), fname)
if !(isempty(usage)::Bool) || isfile(usage_path)
let usage=usage
open(usage_path, "w") do io
TOML.print(io, usage, sorted=true)
end
end
end
end
end
# Write condensed Manifest usage
let all_manifest_tomls=all_manifest_tomls
write_condensed_toml(manifest_usage_by_depot, "manifest_usage.toml") do depot, usage
# Keep only manifest usage markers that are still existent
let usage=usage
filter!(((k,v),) -> k in all_manifest_tomls, usage)
# Expand it back into a dict-of-dicts
return Dict(k => [Dict("time" => v)] for (k, v) in usage)
end
end
end
# Write condensed Artifact usage
let all_artifact_tomls=all_artifact_tomls
write_condensed_toml(artifact_usage_by_depot, "artifact_usage.toml") do depot, usage
let usage = usage
filter!(((k,v),) -> k in all_artifact_tomls, usage)
return Dict(k => [Dict("time" => v)] for (k, v) in usage)
end
end
end
# Write condensed scratch space usage
let all_scratch_parents=all_scratch_parents, all_scratch_dirs=all_scratch_dirs
write_condensed_toml(scratch_usage_by_depot, "scratch_usage.toml") do depot, usage
# Keep only scratch directories that still exist
filter!(((k,v),) -> k in all_scratch_dirs, usage)
# Expand it back into a dict-of-dicts
expanded_usage = Dict{String,Vector{Dict}}()
for (k, v) in usage
# Drop scratch spaces whose parents are all non-existent
parents = scratch_parents_by_depot[depot][k]
filter!(p -> p in all_scratch_parents, parents)
if isempty(parents)
continue
end
expanded_usage[k] = [Dict(
"time" => v,
"parent_projects" => collect(parents),
)]
end
return expanded_usage
end
end
function process_manifest_pkgs(path)
# Read the manifest in
manifest = try
read_manifest(path)
catch e
@warn "Reading manifest file at $path failed with error" exception = e
return nothing
end
# Collect the locations of every package referred to in this manifest
pkg_dir(uuid, entry) = Operations.find_installed(entry.name, uuid, entry.tree_hash)
return [pkg_dir(u, e) for (u, e) in manifest if e.tree_hash !== nothing]
end
# TODO: Merge with function above to not read manifest twice?
function process_manifest_repos(path)
# Read the manifest in
manifest = try
read_manifest(path)
catch e
# Do not warn here, assume that `process_manifest_pkgs` has already warned
return nothing
end
# Collect the locations of every repo referred to in this manifest
return [Types.add_repo_cache_path(e.repo.source) for (u, e) in manifest if e.repo.source !== nothing]
end
function process_artifacts_toml(path, pkgs_to_delete)
# Not only do we need to check if this file doesn't exist, we also need to check
# to see if it this artifact is contained within a package that is going to go
# away. This places an implicit ordering between marking packages and marking
# artifacts; the package marking must be done first so that we can ensure that
# all artifacts that are solely bound within such packages also get reaped.
if any(startswith(path, package_dir) for package_dir in pkgs_to_delete)
return nothing
end
artifact_dict = try
parse_toml(path)
catch e
@warn "Reading artifacts file at $path failed with error" exception = e
return nothing
end
artifact_path_list = String[]
for name in keys(artifact_dict)
getpaths(meta) = artifact_paths(SHA1(hex2bytes(meta["git-tree-sha1"])))
if isa(artifact_dict[name], Vector)
for platform_meta in artifact_dict[name]
append!(artifact_path_list, getpaths(platform_meta))
end
else
append!(artifact_path_list, getpaths(artifact_dict[name]))
end
end
return artifact_path_list
end
function process_scratchspace(path, pkgs_to_delete)
# Find all parents of this path
parents = String[]
# It is slightly awkward that we need to reach out to our `*_by_depot`
# datastructures here; that's because unlike Artifacts and Manifests we're not
# parsing a TOML file to find paths within it here, we're actually doing the
# inverse, finding files that point to this directory.
for (depot, parent_map) in scratch_parents_by_depot
if haskey(parent_map, path)
append!(parents, parent_map[path])
end
end
# Look to see if all parents are packages that will be removed, if so, filter
# this scratchspace out by returning `nothing`
if all(any(startswith(p, dir) for dir in pkgs_to_delete) for p in parents)
return nothing
end
return [path]
end
# Mark packages/artifacts as active or not by calling the appropriate user function
function mark(process_func::Function, index_files, ctx::Context; do_print=true, verbose=false, file_str=nothing)
marked_paths = String[]
active_index_files = Set{String}()
for index_file in index_files
# Check to see if it's still alive
paths = process_func(index_file)
if paths !== nothing
# Mark found paths, and record the index_file for printing
push!(active_index_files, index_file)
append!(marked_paths, paths)
end
end
if do_print
@assert file_str !== nothing
n = length(active_index_files)
printpkgstyle(ctx.io, :Active, "$(file_str): $(n) found")
if verbose
foreach(active_index_files) do f
println(ctx.io, " $(pathrepr(f))")
end
end
end
# Return the list of marked paths
return Set(marked_paths)
end
gc_time = now()
function merge_orphanages!(new_orphanage, paths, deletion_list, old_orphanage = UsageDict())
for path in paths
free_time = something(
get(old_orphanage, path, nothing),
gc_time,
)
# No matter what, store the free time in the new orphanage. This allows
# something terrible to go wrong while trying to delete the artifact/
# package and it will still try to be deleted next time. The only time
# something is removed from an orphanage is when it didn't exist before
# we even started the `gc` run.
new_orphanage[path] = free_time
# If this path was orphaned long enough ago, add it to the deletion list.
# Otherwise, we continue to propagate its orphaning date but don't delete
# it. It will get cleaned up at some future `gc`, or it will be used
# again during a future `gc` in which case it will not persist within the
# orphanage list.
if gc_time - free_time >= collect_delay
push!(deletion_list, path)
end
end
end
# Scan manifests, parse them, read in all UUIDs listed and mark those as active
# printpkgstyle(ctx.io, :Active, "manifests:")
packages_to_keep = mark(process_manifest_pkgs, all_manifest_tomls, ctx,
verbose=verbose, file_str="manifest files")
# Do an initial scan of our depots to get a preliminary `packages_to_delete`.
packages_to_delete = String[]
for depot in gc_depots
depot_orphaned_packages = String[]
packagedir = abspath(depot, "packages")
if isdir(packagedir)
for name in readdir(packagedir)
!isdir(joinpath(packagedir, name)) && continue
for slug in readdir(joinpath(packagedir, name))
pkg_dir = joinpath(packagedir, name, slug)
!isdir(pkg_dir) && continue
if !(pkg_dir in packages_to_keep)
push!(depot_orphaned_packages, pkg_dir)
end
end
end
end
merge_orphanages!(UsageDict(), depot_orphaned_packages, packages_to_delete)
end
# Next, do the same for artifacts. Note that we MUST do this after calculating
# `packages_to_delete`, as `process_artifacts_toml()` uses it internally to discount
# `Artifacts.toml` files that will be deleted by the future culling operation.
# printpkgstyle(ctx.io, :Active, "artifacts:")
artifacts_to_keep = let packages_to_delete=packages_to_delete
mark(x -> process_artifacts_toml(x, packages_to_delete),
all_artifact_tomls, ctx; verbose=verbose, file_str="artifact files")
end
repos_to_keep = mark(process_manifest_repos, all_manifest_tomls, ctx; do_print=false)
# printpkgstyle(ctx.io, :Active, "scratchspaces:")
spaces_to_keep = let packages_to_delete=packages_to_delete
mark(x -> process_scratchspace(x, packages_to_delete),
all_scratch_dirs, ctx; verbose=verbose, file_str="scratchspaces")
end
# Collect all orphaned paths (packages, artifacts and repos that are not reachable). These
# are implicitly defined in that we walk all packages/artifacts installed, then if
# they were not marked in the above steps, we reap them.
packages_to_delete = String[]
artifacts_to_delete = String[]
repos_to_delete = String[]
spaces_to_delete = String[]
for depot in gc_depots
# We track orphaned objects on a per-depot basis, writing out our `orphaned.toml`
# tracking file immediately, only pushing onto the overall `*_to_delete` lists if
# the package has been orphaned for at least a period of `collect_delay`
depot_orphaned_packages = String[]
depot_orphaned_artifacts = String[]
depot_orphaned_repos = String[]
depot_orphaned_scratchspaces = String[]
packagedir = abspath(depot, "packages")
if isdir(packagedir)
for name in readdir(packagedir)
!isdir(joinpath(packagedir, name)) && continue
for slug in readdir(joinpath(packagedir, name))
pkg_dir = joinpath(packagedir, name, slug)
!isdir(pkg_dir) && continue
if !(pkg_dir in packages_to_keep)
push!(depot_orphaned_packages, pkg_dir)
end
end
end
end
reposdir = abspath(depot, "clones")
if isdir(reposdir)
for repo in readdir(reposdir)
repo_dir = joinpath(reposdir, repo)
!isdir(repo_dir) && continue
if !(repo_dir in repos_to_keep)
push!(depot_orphaned_repos, repo_dir)
end
end
end
artifactsdir = abspath(depot, "artifacts")
if isdir(artifactsdir)
for hash in readdir(artifactsdir)
artifact_path = joinpath(artifactsdir, hash)
!isdir(artifact_path) && continue
if !(artifact_path in artifacts_to_keep)
push!(depot_orphaned_artifacts, artifact_path)
end
end
end
scratchdir = abspath(depot, "scratchspaces")
if isdir(scratchdir)
for uuid in readdir(scratchdir)
uuid_dir = joinpath(scratchdir, uuid)
!isdir(uuid_dir) && continue
for space in readdir(uuid_dir)
space_dir_or_file = joinpath(uuid_dir, space)
if isdir(space_dir_or_file)
if !(space_dir_or_file in spaces_to_keep)
push!(depot_orphaned_scratchspaces, space_dir_or_file)
end
elseif uuid == Operations.PkgUUID && isfile(space_dir_or_file)
# special cleanup for the precompile cache files that Pkg saves
if any(prefix->startswith(basename(space_dir_or_file), prefix), ("suspend_cache_", "pending_cache_"))
if mtime(space_dir_or_file) < (time() - (24*60*60))
push!(depot_orphaned_scratchspaces, space_dir_or_file)
end
end
end
end
end
end
# Read in this depot's `orphaned.toml` file:
orphanage_file = joinpath(logdir(depot), "orphaned.toml")
new_orphanage = UsageDict()
old_orphanage = try
TOML.parse(String(read(orphanage_file)))
catch
UsageDict()
end
# Update the package and artifact lists of things to delete, and
# create the `new_orphanage` list for this depot.
merge_orphanages!(new_orphanage, depot_orphaned_packages, packages_to_delete, old_orphanage)
merge_orphanages!(new_orphanage, depot_orphaned_artifacts, artifacts_to_delete, old_orphanage)
merge_orphanages!(new_orphanage, depot_orphaned_repos, repos_to_delete, old_orphanage)
merge_orphanages!(new_orphanage, depot_orphaned_scratchspaces, spaces_to_delete, old_orphanage)
# Write out the `new_orphanage` for this depot
mkpath(dirname(orphanage_file))
open(orphanage_file, "w") do io
TOML.print(io, new_orphanage, sorted=true)
end
end
# Next, we calculate the space savings we're about to gain!
pretty_byte_str = (size) -> begin
bytes, mb = Base.prettyprint_getunits(size, length(Base._mem_units), Int64(1024))
return @sprintf("%.3f %s", bytes, Base._mem_units[mb])
end
function recursive_dir_size(path)
size = 0
try
for (root, dirs, files) in walkdir(path)
for file in files
path = joinpath(root, file)
try
size += lstat(path).size
catch ex
@error("Failed to calculate size of $path", exception=ex)
end
end
end
catch ex
@error("Failed to calculate size of $path", exception=ex)
end
return size
end
# Delete paths for unreachable package versions and artifacts, and computing size saved
function delete_path(path)
path_size = if isfile(path)
try
lstat(path).size
catch ex
@error("Failed to calculate size of $path", exception=ex)
0
end
else
recursive_dir_size(path)
end
try
Base.Filesystem.prepare_for_deletion(path)
Base.rm(path; recursive=true, force=true)
catch e
@warn("Failed to delete $path", exception=e)
return 0
end
if verbose
printpkgstyle(ctx.io, :Deleted, pathrepr(path) * " (" *
pretty_byte_str(path_size) * ")")
end
return path_size
end
package_space_freed = 0
repo_space_freed = 0
artifact_space_freed = 0
scratch_space_freed = 0
for path in packages_to_delete
package_space_freed += delete_path(path)
end
for path in repos_to_delete
repo_space_freed += delete_path(path)
end
for path in artifacts_to_delete
artifact_space_freed += delete_path(path)
end
for path in spaces_to_delete
scratch_space_freed += delete_path(path)
end
# Prune package paths that are now empty
for depot in gc_depots
packagedir = abspath(depot, "packages")
!isdir(packagedir) && continue
for name in readdir(packagedir)
name_path = joinpath(packagedir, name)
!isdir(name_path) && continue
!isempty(readdir(name_path)) && continue
Base.rm(name_path)
end
end