Skip to content

Commit

Permalink
Update main.yml
Browse files Browse the repository at this point in the history
  • Loading branch information
idabblewith committed Sep 26, 2024
1 parent af5473c commit 8954494
Showing 1 changed file with 60 additions and 60 deletions.
120 changes: 60 additions & 60 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,63 +59,63 @@ jobs:
docker tag ghcr.io/dbca-wa/science-projects-client:${{ steps.tag.outputs.TAG_NAME }} ghcr.io/dbca-wa/science-projects-client:latest
docker push ghcr.io/dbca-wa/science-projects-client:latest
cleanup:
name: Cleanup old tags (keep latest 16)
runs-on: ubuntu-latest
needs: [build_and_push_production, build_and_push_test]
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Run tag cleanup script
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "Fetching list of tags..."
page=1
tags=""
while :; do
result=$(curl -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/tags?page=$page&per_page=100" | jq -r '.[] | .name + " " + (.commit.committer.date)')
[[ -z "$result" ]] && break
tags+="$result"$'\n'
((page++))
done
# Prepare a list to hold all tags
tag_list=()
while IFS= read -r line; do
tag_list+=("$line")
done <<< "$tags"
# Sort tags by push date (latest first)
IFS=$'\n' sorted_tags=($(sort -r -t ' ' -k 2 <<< "${tag_list[*]}"))
# Keep the latest 16 tags
keep_tags=("${sorted_tags[@]:0:16}")
keep_tags_names=()
for tag in "${keep_tags[@]}"; do
tag_name=$(echo "$tag" | cut -d ' ' -f 1)
keep_tags_names+=("$tag_name")
done
echo "Keeping latest 16 tags: ${keep_tags_names[*]}"
# Loop through each tag and delete if not in keep list
for line in "${sorted_tags[@]}"; do
tag_name=$(echo "$line" | cut -d ' ' -f 1)
if ! [[ " ${keep_tags_names[*]} " =~ " $tag_name " ]]; then
if [ "$DRY_RUN" == "true" ]; then
echo "Dry run: would delete tag $tag_name"
else
echo "Deleting tag $tag_name..."
# Uncomment below to actually delete the tags
# curl -X DELETE -H "Authorization: token $GITHUB_TOKEN" \
# https://api.github.com/repos/${{ github.repository }}/git/refs/tags/$tag_name
fi
else
echo "Keeping tag $tag_name"
fi
done
# cleanup:
# name: Cleanup old tags (keep latest 16)
# runs-on: ubuntu-latest
# needs: [build_and_push_production, build_and_push_test]
# steps:
# - name: Checkout repository
# uses: actions/checkout@v4
# with:
# fetch-depth: 0

# - name: Run tag cleanup script
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# run: |
# echo "Fetching list of tags..."
# page=1
# tags=""
# while :; do
# result=$(curl -H "Authorization: token $GITHUB_TOKEN" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/tags?page=$page&per_page=100" | jq -r '.[] | .name + " " + (.commit.committer.date)')
# [[ -z "$result" ]] && break
# tags+="$result"$'\n'
# ((page++))
# done

# # Prepare a list to hold all tags
# tag_list=()
# while IFS= read -r line; do
# tag_list+=("$line")
# done <<< "$tags"

# # Sort tags by push date (latest first)
# IFS=$'\n' sorted_tags=($(sort -r -t ' ' -k 2 <<< "${tag_list[*]}"))

# # Keep the latest 16 tags
# keep_tags=("${sorted_tags[@]:0:16}")
# keep_tags_names=()
# for tag in "${keep_tags[@]}"; do
# tag_name=$(echo "$tag" | cut -d ' ' -f 1)
# keep_tags_names+=("$tag_name")
# done

# echo "Keeping latest 16 tags: ${keep_tags_names[*]}"

# # Loop through each tag and delete if not in keep list
# for line in "${sorted_tags[@]}"; do
# tag_name=$(echo "$line" | cut -d ' ' -f 1)

# if ! [[ " ${keep_tags_names[*]} " =~ " $tag_name " ]]; then
# if [ "$DRY_RUN" == "true" ]; then
# echo "Dry run: would delete tag $tag_name"
# else
# echo "Deleting tag $tag_name..."
# # Uncomment below to actually delete the tags
# # curl -X DELETE -H "Authorization: token $GITHUB_TOKEN" \
# # https://api.github.com/repos/${{ github.repository }}/git/refs/tags/$tag_name
# fi
# else
# echo "Keeping tag $tag_name"
# fi
# done

0 comments on commit 8954494

Please sign in to comment.