# Purge the URLs contained in purge-list.txt from the Cloudflare cache # # We delay this job to give the pages:deploy job time to finish. If we don't # delay, then the cache might refill with old pages before the new pages are # finished deploying. purge-cache: tags: - docker rules: - if: $CF_PURGE_CACHE_ZONE && $CF_PURGE_CACHE_TOKEN when: delayed start_in: 3 minutes needs: - pipeline: $PARENT_PIPELINE_ID job: create-purge-list before_script: # Make sure the purge-list.txt file is readable, else exit - test -r purge-list.txt || { echo "purge-list.txt not found" ; exit 1 ; } - echo "Purge list has $(cat purge-list.txt | wc -l) URLs" # Default to chunks of 30 URLs because Cloudflare only allows 30 URLs per # purge request on free accounts - echo "Chunk size of ${CF_PURGE_CACHE_CHUNK_SIZE:=30}" script: # Split the purge list into chunks named 'purge-chunk-[aaa,aab,...]' - split -l $CF_PURGE_CACHE_CHUNK_SIZE -a 3 purge-list.txt purge-chunk- - chunks=$(find . -type f -name 'purge-chunk-*' | wc -l) # Loop over the chunks, creating a purge request for each - |- counter=0 for chunk in purge-chunk-* ; do echo -e "\nChunk $((++counter))/$chunks" # Create the purge request body echo -en "{\n \"files\": [" > purge.json unset comma # This needs to be unset for the first line in each chunk while read path; do echo -en "$comma\n \"$path\"" >> purge.json comma=',' done < $chunk echo -e "\n ]\n}" >> purge.json cat purge.json # Make the API request to Cloudflare to purge the URLs from cache wget -qO- "https://api.cloudflare.com/client/v4/zones/$CF_PURGE_CACHE_ZONE/purge_cache" \ --header "Content-Type: application/json" \ --header "Authorization: Bearer $CF_PURGE_CACHE_TOKEN" \ --post-file purge.json # Rate limit ourselves to 1 request per second sleep 1 done # vi: set ts=2 sw=2 et ft=yaml: