Skip to content

Commit

Permalink
Merge pull request #10 from gynzy/PF-2687-pull-jsonnet-20241105145546
Browse files Browse the repository at this point in the history
chore: update jsonnet [PF-2687]
  • Loading branch information
fhp authored Nov 5, 2024
2 parents 109b642 + bafb45d commit b481ab6
Show file tree
Hide file tree
Showing 26 changed files with 1,392 additions and 506 deletions.
2 changes: 1 addition & 1 deletion .github/jsonnet/GIT_VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
4187fef119638c2f8453bf4fd3d6da5641e4ffee
84ec8f29f0c97ca93ebcb74c1e338d3cb6c03302
2 changes: 1 addition & 1 deletion .github/jsonnet/README.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
These files come from https://www.github.com/gynzy/lib-jsonnet/
Do not update here, but extend the libraries upstream.
Do not update here, but extend the libraries upstream.
14 changes: 9 additions & 5 deletions .github/jsonnet/base.jsonnet
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
local images = import 'images.jsonnet';
local misc = import 'misc.jsonnet';

{
pipeline(name, jobs, event=['pull_request'], permissions=null, concurrency=null):: {
[name + '.yml']:
Expand All @@ -14,7 +17,7 @@
name,
timeoutMinutes=30,
runsOn=null,
image=$.default_job_image,
image=images.default_job_image,
steps=[],
ifClause=null,
needs=null,
Expand All @@ -36,7 +39,7 @@
{
container: {
image: image,
} + (if useCredentials then { credentials: { username: '_json_key', password: $.secret('docker_gcr_io') } } else {}),
} + (if useCredentials then { credentials: { username: '_json_key', password: misc.secret('docker_gcr_io') } } else {}),
}
) +
{
Expand All @@ -49,7 +52,7 @@
(if permissions == null then {} else { permissions: permissions }) +
(if concurrency == null then {} else { concurrency: concurrency }) +
(if continueOnError == null then {} else { 'continue-on-error': continueOnError }) +
(if env == null then {} else { env: env })
(if env == null then {} else { env: env }),
},

ghExternalJob(
Expand All @@ -65,7 +68,7 @@
} else {}),
},

step(name, run, env=null, workingDirectory=null, ifClause=null, id=null, continueOnError=null)::
step(name, run, env=null, workingDirectory=null, ifClause=null, id=null, continueOnError=null, shell=null)::
[
{
name: name,
Expand All @@ -74,7 +77,8 @@
+ (if env != null then { env: env } else {})
+ (if ifClause != null then { 'if': ifClause } else {})
+ (if id != null then { id: id } else {})
+ (if continueOnError == null then {} else { 'continue-on-error': continueOnError }),
+ (if continueOnError == null then {} else { 'continue-on-error': continueOnError })
+ (if shell == null then {} else { 'shell': shell }),
],

action(name, uses, env=null, with=null, id=null, ifClause=null, continueOnError=null)::
Expand Down
152 changes: 152 additions & 0 deletions .github/jsonnet/buckets.jsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
{
// Uploads all files in the source folder to the destination bucket, including compression and TTL headers.
//
// Warnings:
// - remote/destination files not included in the source will be DELETED recursively if pruneRemote is true!
// - the files in the source directory will be modified. Do not attempt to use this directory after running this command.
// - must be run with bash shell.
//
// Parameters:
// sourcePath: The source directory to upload. Can be a local folder of a path in a bucket, depending on sourceBucket. Required.
// sourceBucket: The source bucket. If null, the sourcePath is a local directory. Defaults to null.
// destinationBucket: The destination bucket. Required.
// destinationPath: The destination directory in the bucket. Required.
//
// pruneRemote: If true, all files in the destination bucket that are not in the source will be deleted. Can only be used with destinationPath containing 'pr-'.
//
// compressFileExtentions: A list of file extentions that will be compressed. Set to an empty list to disable compression.
// compressJobs: The number of parallel gzip compression jobs. Use 4 for arc-runner-2 and 16 for arc-runner-16. Defaults to 4.
//
// lowTTLfiles: A list of files, or a single regex, that will be uploaded with a low TTL. Use this for files that are not fingerprinted.
//
// lowTTL: The TTL for lowTTLfiles. Defaults to 60 seconds.
// lowTTLStaleWhileRevalidate: The stale-while-revalidate value for lowTTLfiles. Defaults to 60 seconds.
// lowTTLHeader: The Cache-Control header for lowTTLfiles. This is generated from lowTTL and lowTTLStaleWhileRevalidate.
//
// highTTL: The TTL for all other files. Defaults to 1 week.
// highTTLStaleWhileRevalidate: The stale-while-revalidate value for all other files. Defaults to 1 day.
// highTTLHeader: The Cache-Control header for all other files. This is generated from highTTL and highTTLStaleWhileRevalidate.
//
// additionalHeaders: Additional headers to add to all uploaded files. This should be an array of strings.
uploadFilesToBucketCommand(
sourcePath,
sourceBucket=null,
destinationBucket,
destinationPath,
pruneRemote=false,
compressFileExtentions=['css', 'svg', 'html', 'json', 'js', 'xml', 'txt', 'map'],
compressJobs=4,
lowTTLfiles=[],
lowTTL=60,
lowTTLStaleWhileRevalidate=60,
lowTTLHeader='Cache-Control: public, max-age=' + lowTTL + (if lowTTLStaleWhileRevalidate == 0 then '' else ', stale-while-revalidate=' + lowTTLStaleWhileRevalidate),
highTTL=604800, // 1 week
highTTLStaleWhileRevalidate=86400, // 1 day
highTTLHeader='Cache-Control: public, max-age=' + highTTL + (if highTTLStaleWhileRevalidate == 0 then '' else ', stale-while-revalidate=' + highTTLStaleWhileRevalidate),
additionalHeaders=[],
)::
// if this function is called with remote pruning, destination must contain pr-
assert !pruneRemote || std.length(std.findSubstr('/pr-', destinationPath)) > 0;

local hasLowTTLfiles = (std.isArray(lowTTLfiles) && std.length(lowTTLfiles) > 0) || (std.isString(lowTTLfiles) && lowTTLfiles != '');
local lowTTLfilesRegex = if std.isArray(lowTTLfiles) then '(' + std.strReplace(std.join('|', lowTTLfiles), '.', '\\.') + ')' else lowTTLfiles;
local highTTLfilesRegex = '(?!' + lowTTLfilesRegex + ').*';

local hasCompressedFiles = (std.isArray(compressFileExtentions) && std.length(compressFileExtentions) > 0) || (std.isString(compressFileExtentions) && compressFileExtentions != '');
local compressedFilesRegex = '(' + std.join('|', std.map(function(ext) '((.*(\\.|/))?' + ext + ')', compressFileExtentions)) + ')';
local uncompressedFilesRegex = '(?!' + compressedFilesRegex + ').*';

local compressionHeader = 'Content-Encoding: gzip';


local rsyncCommand = function(name, excludeRegexes, headers)
local excludeRegex = if std.length(excludeRegexes) == 0 then null else '^((' + std.join(')|(', excludeRegexes) + '))$';

'echo "Uploading ' + name + ' files"\n' +
'gsutil -m ' + std.join(' ', std.map(function(header) '-h "' + header + '" ', headers + additionalHeaders)) + 'rsync -r -c' +
(if excludeRegex == null then '' else ' -x "' + excludeRegex + '"') +
(if pruneRemote then ' -d' else '') +
(if sourceBucket == null then ' ./' else ' gs://' + sourceBucket + '/' + sourcePath + '/') +
' gs://' + destinationBucket + '/' + destinationPath + '/;\n' +
'echo "Uploading ' + name + ' files completed"; echo\n' +
'\n';

'set -e -o pipefail;\n' +
(if sourceBucket == null then 'cd ' + sourcePath + ';\n' else '') +
'\n' +


if hasCompressedFiles then
(
if sourceBucket == null then
'echo "Compressing files in parallel before uploading"\n' +
'{\n' +
" for file in `find . -type f -regextype posix-egrep -regex '" + compressedFilesRegex + "' | sed --expression 's/\\.\\///g'`; do\n" +
' echo "gzip -9 $file; mv $file.gz $file"\n' +
' done\n' +
'} | parallel --halt now,fail=1 -j ' + compressJobs + '\n' +
'echo "Compressing files in parallel completed"\n' +
'\n'
else ''
) +

if hasLowTTLfiles then
rsyncCommand(
'highTTL compressed',
excludeRegexes=[lowTTLfilesRegex, uncompressedFilesRegex],
headers=[highTTLHeader, compressionHeader],
) +
rsyncCommand(
'highTTL uncompressed',
excludeRegexes=[lowTTLfilesRegex, compressedFilesRegex],
headers=[highTTLHeader],
) +

rsyncCommand(
'lowTTL compressed',
excludeRegexes=[highTTLfilesRegex, uncompressedFilesRegex],
headers=[lowTTLHeader, compressionHeader],
) +
rsyncCommand(
'lowTTL uncompressed',
excludeRegexes=[highTTLfilesRegex, compressedFilesRegex],
headers=[lowTTLHeader],
)


else // no lowTTL files, with compression
rsyncCommand(
'compressed',
excludeRegexes=[uncompressedFilesRegex],
headers=[highTTLHeader, compressionHeader],
) +
rsyncCommand(
'uncompressed',
excludeRegexes=[compressedFilesRegex],
headers=[highTTLHeader],
)


else // no compression
if hasLowTTLfiles then
rsyncCommand(
'highTTL',
excludeRegexes=[lowTTLfilesRegex],
headers=[highTTLHeader],
) +

rsyncCommand(
'lowTTL',
excludeRegexes=[highTTLfilesRegex],
headers=[lowTTLHeader],
)


else // no lowTTL files, no compression
rsyncCommand(
'all',
excludeRegexes=[],
headers=[highTTLHeader],
),

}
128 changes: 128 additions & 0 deletions .github/jsonnet/cache.jsonnet
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
local base = import 'base.jsonnet';

{
// Fetch a cache from the cache server.
// This is a generic function that can be used to fetch any cache. It is advised to wrap this function
// in a more specific function that fetches a specific cache, setting the cacheName and folders parameters.
//
// To be paired with the uploadCache function.
//
// Parameters:
// cacheName: The name of the cache to fetch. The name of the repository is usually a good option. Required.
// backupCacheName: The name of a backup cache to fetch if the main cache fails. Default is null.
// folders: A list of folders that are in the cache. These will be deleted if the download fails. Can be an empty list if additionalCleanupCommands are used.
// additionalCleanupCommands: A list of additional commands to run if the download fails. Default is an empty list.
// ifClause: An optional if clause to conditionally run this step. Default is null.
// workingDirectory: The working directory for this step. Default is null.
// retry: Whether to retry the download if it fails. Default is true.
// continueWithoutCache: Whether to continue if the cache is not found. Default is true.
fetchCache(
cacheName,
backupCacheName=null,
folders=[],
version='v1',
backupCacheVersion=version,
additionalCleanupCommands=[],
ifClause=null,
workingDirectory=null,
retry=true,
continueWithoutCache=true,
)::
assert std.length(folders) > 0 || std.length(additionalCleanupCommands) > 0;

local downloadCommand(cacheName, version, nextSteps, indent = '') =
indent + 'wget -q -O - "https://storage.googleapis.com/files-gynzy-com-test/ci-cache/' + cacheName + '-' + version + '.tar.zst" | tar --extract --zstd \n' +
indent + 'if [ $? -ne 0 ]; then\n' +
indent + ' echo "Cache download failed, cleanup up partial downloads"\n' +
(if std.length(folders) > 0 then indent + ' rm -rf ' + std.join(' ', folders) + '\n' else '') +
std.join(' ', std.map(function(cmd) indent + ' ' + cmd + '\n', additionalCleanupCommands)) +
indent + ' echo "Cleanup complete"; echo\n\n' +
nextSteps +
indent + 'fi\n';

local downloadCommandWithRetry(cacheName, version, nextSteps, indent = '') =
downloadCommand(
cacheName,
version,
if retry then
indent + ' echo "Retrying download..."\n' +
downloadCommand(cacheName, version, nextSteps, indent + ' ')
else
nextSteps,
indent,
);

local backupIndent = (if retry then ' ' else ' ');

local downloadFailedCommand = backupIndent + 'echo "Cache download failed :( ' + (if continueWithoutCache then 'Continuing without cache"' else 'Aborting"; exit 1') + '\n';

base.step(
'download ' + cacheName + ' cache',
run=
'set +e;\n' +
'command -v zstd || { apt update && apt install -y zstd; }\n' +
'echo "Downloading cache"\n' +
downloadCommandWithRetry(
cacheName,
version,
if backupCacheName != null then
backupIndent + 'echo "Downloading backup cache"\n' +
downloadCommandWithRetry(backupCacheName, backupCacheVersion, backupIndent + downloadFailedCommand, indent=backupIndent)
else
downloadFailedCommand,
),
ifClause=ifClause,
workingDirectory=workingDirectory,
),

// Uploads a cache to the cache server.
// This is a generic function that can be used to upload any cache. It is advised to wrap this function
// in a more specific function that uploads a specific cache, setting the cacheName and folders parameters.
//
// To be paired with the fetchCache function.
//
// Parameters:
// cacheName: The name of the cache to upload. The name of the repository is usually a good option. Required.
// folders: A list of folders to include in the cache. Required unless tarCommand is given.
// compressionLevel: The compression level to use for zstd. Default is 10.
// tarCommand: The command to run to create the tar file. Default is 'tar -c ' + std.join(' ', folders).
uploadCache(
cacheName,
folders=null,
version='v1',
compressionLevel=10,
tarCommand='tar -c ' + std.join(' ', folders),
)::
local cacheBucketPath = function(temp=false)
'gs://files-gynzy-com-test/ci-cache/' + cacheName + '-' + version + '.tar.zst' + (if temp then '.tmp' else '');

base.step(
'upload-gatsby-cache',
run=
'set -e\n' +
'\n' +
'command -v zstd || { apt update && apt install -y zstd; }\n' +
'\n' +
'echo "Create and upload cache"\n' +
tarCommand + ' | zstdmt -' + compressionLevel + ' | gsutil cp - "' + cacheBucketPath(temp=true) + '"\n' +
'gsutil mv "' + cacheBucketPath(temp=true) + '" "' + cacheBucketPath(temp=false) + '"\n' +

'echo "Upload finished"\n'
),

// Removes a cache from the cache server.
// This is a generic function that can be used to remove any cache. It is advised to wrap this function
// in a more specific function that removes a specific cache, setting the cacheName parameter.
//
// Parameters:
// cacheName: The name of the cache to remove. The name of the repository is usually a good option. Required.
// version: The version of the cache to remove. Default is 'v1'.
removeCache(cacheName, version='v1')::
base.step(
'remove ' + cacheName + ' cache',
run=
'set +e;\n' +
'gsutil rm "gs://files-gynzy-com-test/ci-cache/' + cacheName + '-' + version + '.tar.zst"\n' +
'echo "Cache removed"\n'
),
}
41 changes: 22 additions & 19 deletions .github/jsonnet/clusters.jsonnet
Original file line number Diff line number Diff line change
@@ -1,24 +1,27 @@
local misc = import 'misc.jsonnet';

{
clusters: {
test: {
project: 'gynzy-test-project',
name: 'test',
zone: 'europe-west4-b',
secret: '${{ secrets.GCE_NEW_TEST_JSON }}',
},
test: {
project: 'gynzy-test-project',
name: 'test',
zone: 'europe-west4-b',
secret: misc.secret('GCE_NEW_TEST_JSON'),
jobNodeSelectorType: 'preemptible',
},

prod: {
project: 'unicorn-985',
name: 'prod-europe-west4',
zone: 'europe-west4',
secret: '${{ secrets.GCE_JSON }}',
},
prod: {
project: 'unicorn-985',
name: 'prod-europe-west4',
zone: 'europe-west4',
secret: misc.secret('GCE_JSON'),
jobNodeSelectorType: 'worker',
},

'gynzy-intern': {
project: 'gynzy-intern',
name: 'gynzy-intern',
zone: 'europe-west4',
secret: '${{ secrets.CONTINUOUS_DEPLOYMENT_GCE_JSON }}',
},
'gynzy-intern': {
project: 'gynzy-intern',
name: 'gynzy-intern',
zone: 'europe-west4',
secret: misc.secret('CONTINUOUS_DEPLOYMENT_GCE_JSON'),
jobNodeSelectorType: 'worker',
},
}
Loading

0 comments on commit b481ab6

Please sign in to comment.