Skip to content

Commit

Permalink
Merge pull request #9116 from iwita/ls-volumes-before-pruning
Browse files Browse the repository at this point in the history
List volumes before pruning
  • Loading branch information
openshift-merge-robot authored Feb 2, 2021
2 parents d66a18c + 9b5b03d commit 628b0d7
Show file tree
Hide file tree
Showing 2 changed files with 61 additions and 6 deletions.
52 changes: 46 additions & 6 deletions cmd/podman/volumes/prune.go
Original file line number Diff line number Diff line change
Expand Up @@ -49,16 +49,46 @@ func init() {

func prune(cmd *cobra.Command, args []string) error {
var (
pruneOptions = entities.VolumePruneOptions{}
pruneOptions = entities.VolumePruneOptions{}
listOptions = entities.VolumeListOptions{}
unusedOptions = entities.VolumeListOptions{}
)
// Prompt for confirmation if --force is not set
force, err := cmd.Flags().GetBool("force")
if err != nil {
return err
}
pruneOptions.Filters, err = filters.ParseFilterArgumentsIntoFilters(filter)
if !force {
reader := bufio.NewReader(os.Stdin)
fmt.Println("WARNING! This will remove all volumes not used by at least one container.")
fmt.Println("WARNING! This will remove all volumes not used by at least one container. The following volumes will be removed:")
if err != nil {
return err
}
listOptions.Filter, err = filters.ParseFilterArgumentsIntoFilters(filter)
if err != nil {
return err
}
// filter all the dangling volumes
unusedOptions.Filter = make(map[string][]string, 1)
unusedOptions.Filter["dangling"] = []string{"true"}
unusedVolumes, err := registry.ContainerEngine().VolumeList(context.Background(), unusedOptions)
if err != nil {
return err
}
// filter volumes based on user input
filteredVolumes, err := registry.ContainerEngine().VolumeList(context.Background(), listOptions)
if err != nil {
return err
}
finalVolumes := getIntersection(unusedVolumes, filteredVolumes)
if len(finalVolumes) < 1 {
fmt.Println("No dangling volumes found")
return nil
}
for _, fv := range finalVolumes {
fmt.Println(fv.Name)
}
fmt.Print("Are you sure you want to continue? [y/N] ")
answer, err := reader.ReadString('\n')
if err != nil {
Expand All @@ -68,13 +98,23 @@ func prune(cmd *cobra.Command, args []string) error {
return nil
}
}
pruneOptions.Filters, err = filters.ParseFilterArgumentsIntoFilters(filter)
if err != nil {
return err
}
responses, err := registry.ContainerEngine().VolumePrune(context.Background(), pruneOptions)
if err != nil {
return err
}
return utils.PrintVolumePruneResults(responses, false)
}

func getIntersection(a, b []*entities.VolumeListReport) []*entities.VolumeListReport {
var intersection []*entities.VolumeListReport
hash := make(map[string]bool, len(a))
for _, aa := range a {
hash[aa.Name] = true
}
for _, bb := range b {
if hash[bb.Name] {
intersection = append(intersection, bb)
}
}
return intersection
}
15 changes: 15 additions & 0 deletions test/system/160-volumes.bats
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,13 @@ EOF
run_podman volume create $vol
done

# Create two additional labeled volumes
for i in 5 6; do
vol=myvol${i}$(random_string)
v[$i]=$vol
run_podman volume create $vol --label "mylabel"
done

# (Assert that output is formatted, not a one-line blob: #8011)
run_podman volume inspect ${v[1]}
if [[ "${#lines[*]}" -lt 10 ]]; then
Expand All @@ -225,6 +232,14 @@ EOF
run_podman run --name c2 --volume ${v[2]}:/vol2 -v ${v[3]}:/vol3 \
$IMAGE date

# List available volumes for pruning after using 1,2,3
run_podman volume prune <<< N
is "$(echo $(sort <<<${lines[@]:1:3}))" "${v[4]} ${v[5]} ${v[6]}" "volume prune, with 1,2,3 in use, lists 4,5,6"

# List available volumes for pruning after using 1,2,3 and filtering; see #8913
run_podman volume prune --filter label=mylabel <<< N
is "$(echo $(sort <<<${lines[@]:1:2}))" "${v[5]} ${v[6]}" "volume prune, with 1,2,3 in use and 4 filtered out, lists 5,6"

# prune should remove v4
run_podman volume prune --force
is "$output" "${v[4]}" "volume prune, with 1, 2, 3 in use, deletes only 4"
Expand Down

0 comments on commit 628b0d7

Please sign in to comment.