Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: enable the build of the verifier docker image in the workflows #650

Merged
merged 9 commits into from
Aug 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions .github/workflows/build_and_deploy_generic.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,31 @@ jobs:
dockerfile_name: ./indexer/Dockerfile
build_dir: ./indexer/

docker-verifier:
name: Build and push docker image for verifier
runs-on: ubuntu-latest
steps:
- name: Load secret
id: op-load-secret
uses: 1password/load-secrets-action@v1
with:
export-env: true
env:
OP_SERVICE_ACCOUNT_TOKEN: ${{ secrets.OP_SERVICE_ACCOUNT_TOKEN }}
AWS_ACCESS_KEY_ID: op://DevOps/passport-scorer-${{ inputs.environment }}-secrets/ci/AWS_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY: op://DevOps/passport-scorer-${{ inputs.environment }}-secrets/ci/AWS_SECRET_ACCESS_KEY
- id: build_and_push_docker_image
uses: passportxyz/gh-workflows/.github/actions/build_and_push@v1
with:
refspec: ${{ inputs.refspec }}
docker_tag: ${{ inputs.docker_tag }}
ecr_repository_name: passport-verifier
aws_region: us-west-2
aws_access_key_id: ${{ env.AWS_ACCESS_KEY_ID }}
aws_secret_access_key: ${{ env.AWS_SECRET_ACCESS_KEY }}
dockerfile_name: ./verifier/Dockerfile
build_dir: ./verifier/

deploy_preview:
name: Preview - Deploying AWS Infra
runs-on: ubuntu-latest
Expand Down Expand Up @@ -149,6 +174,7 @@ jobs:
docker-ecs,
docker-indexer,
docker-lambda,
docker-verifier,
deploy_preview,
deploy_confirm,
]
Expand Down
5 changes: 1 addition & 4 deletions .github/workflows/test_generic.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,10 @@ on:
type: string
required: true
uptime-robot-monitor-dry-run:
type: choice
type: string
description: "Set to '--dry-run' to not actually create monitors"
default: ""
required: false
options:
- ""
- "--dry-run"

jobs:
test:
Expand Down
63 changes: 36 additions & 27 deletions api/ceramic_cache/api/v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,7 @@
from datetime import timedelta
from typing import Any, Dict, List, Optional, Type

import api_logging as logging
import tos.api
import tos.schema
from account.models import Account, Community, Nonce
import requests
from asgiref.sync import async_to_sync
from django.conf import settings
from django.contrib.auth import get_user_model
Expand All @@ -24,6 +21,12 @@
# from ninja_jwt.schema import RefreshToken
from ninja_jwt.settings import api_settings
from ninja_jwt.tokens import RefreshToken, Token, TokenError

import api_logging as logging
import tos.api
import tos.schema
from account.models import Account, Community, Nonce
from ceramic_cache.utils import get_utc_time
from registry.api.utils import (
is_valid_address,
)
Expand All @@ -42,15 +45,13 @@
from stake.api import handle_get_gtc_stake
from stake.schema import GetSchemaResponse

from ceramic_cache.utils import get_utc_time

from ..exceptions import (
InternalServerException,
InvalidDeleteCacheRequestException,
TooManyStampsException,
)
from ..models import CeramicCache
from ..utils import validate_dag_jws_payload, verify_jws
from ..utils import validate_dag_jws_payload
from .schema import (
AccessTokenResponse,
CacaoVerifySubmit,
Expand Down Expand Up @@ -548,42 +549,50 @@ def handle_authenticate(payload: CacaoVerifySubmit) -> AccessTokenResponse:
if not validate_dag_jws_payload({"nonce": payload.nonce}, payload.payload):
log.error("Failed to validate nonce: '%s'", payload.nonce)
raise FailedVerificationException(detail="Invalid nonce or payload!")

except Exception as exc:
log.error("Failed authenticate request: '%s'", payload.dict(), exc_info=True)
raise FailedVerificationException(detail="Invalid nonce or payload!") from exc

try:
try:
verify_jws(payload.dict())

except Exception as exc:
log.error(
"Failed to authenticate request (verify_jws failed): '%s'",
payload.dict(),
exc_info=True,
)
raise FailedVerificationException(
detail=f"Failed to authenticate request: {str(exc)}"
) from exc
res = requests.post(
settings.VERIFIER_URL,
json={
"signatures": payload.signatures,
"payload": payload.payload,
"cid": payload.cid,
"cacao": payload.cacao,
"issuer": payload.issuer,
},
)

token = DbCacheToken()
token["did"] = payload.issuer
if res.status_code == 200:
data = res.json()
if data.get("status") == "ok":
token = DbCacheToken()
token["did"] = payload.issuer
return {
"access": str(token.access_token),
}

return {
"access": str(token.access_token),
}
log.error(
"Failed to validate authentication payload (jws)! Response: %s\n%s",
res,
res.json(),
)
raise FailedVerificationException(detail=f"JWS validation failed: {res.json()}")

except APIException:
# re-raise API exceptions
raise
except Exception as esc:
log.error(
"Failed authenticate request (verify_jws failed): '%s'",
"Failed to authenticate request (verify_jws failed): '%s'",
payload.dict(),
exc_info=True,
)
raise APIException(detail=f"Failed authenticate request: {str(esc)}") from esc
raise APIException(
detail=f"Failed to authenticate request: {str(esc)}"
) from esc


def get_detailed_score_response_for_address(
Expand Down
40 changes: 28 additions & 12 deletions api/ceramic_cache/test/test_authenticate_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
from collections import namedtuple

import pytest
from account.models import Nonce
from django.test import Client
from ninja_jwt.tokens import AccessToken

from account.models import Nonce

pytestmark = pytest.mark.django_db

client = Client()
Expand Down Expand Up @@ -39,14 +40,19 @@ def test_authenticate_validates_payload(self, mocker):
"""
We expect that the authenticate request:
1. validates the payload against the nonce
2. makes a validation request for checkting the jws to verify_jws, and verify_jws does not throw
2. makes a validation request for checking the jws to th everifier returns success

If both are ok, the test should succeed
"""
MockedRequestResponse = namedtuple("MockedRequestResponse", "status_code")

class MockedRequestResponse:
status_code = 200

def json(self):
return {"status": "ok"}

with mocker.patch(
"ceramic_cache.api.v1.verify_jws",
return_value=None,
"ceramic_cache.api.v1.requests.post", return_value=MockedRequestResponse()
):
with mocker.patch(
"ceramic_cache.api.v1.validate_dag_jws_payload", return_value=True
Expand Down Expand Up @@ -122,14 +128,19 @@ def test_authenticate_fails_when_validating_jws_fails(self, mocker):
"""
We expect that the authenticate request:
1. validates the payload against the nonce
2. validates the jws with verify_jws
2. validates the jws with verifier

The test should fail at step 2 if the validation returns anything other than 200
"""

class MockedRequestResponse:
status_code = 200

def json(self):
return {"status": "failure", "error": "something went wrong"}

with mocker.patch(
"ceramic_cache.api.v1.verify_jws",
side_effect=Exception("JWS validation failed"),
"ceramic_cache.api.v1.requests.post", return_value=MockedRequestResponse()
):
with mocker.patch(
"ceramic_cache.api.v1.validate_dag_jws_payload", return_value=True
Expand All @@ -153,14 +164,19 @@ def test_authenticate_fails_when_validating_jws_throws(self, mocker):
"""
We expect that the authenticate request:
1. validates the payload against the nonce
2. validates the jws with verify_jws
2. validates the jws with the verifier

The test should fail at step 2 if the validation throws
"""

class MockedRequestResponse:
status_code = 200

def json(self):
return {"status": "failure", "error": "something went wrong"}

with mocker.patch(
"ceramic_cache.api.v1.verify_jws",
side_effect=Exception("this is broken"),
"ceramic_cache.api.v1.requests.post", return_value=MockedRequestResponse()
):
with mocker.patch(
"ceramic_cache.api.v1.validate_dag_jws_payload", return_value=True
Expand All @@ -177,4 +193,4 @@ def test_authenticate_fails_when_validating_jws_throws(self, mocker):
assert auth_response.status_code == 400

assert "detail" in json_data
assert json_data["detail"].startswith("Failed to authenticate request")
assert json_data["detail"].startswith("JWS validation failed")
2 changes: 2 additions & 0 deletions api/scorer/settings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -453,3 +453,5 @@
"127.0.0.1",
# ...
]

VERIFIER_URL = env("VERIFIER_URL", default="http://localhost:8001/verifier/verify")
47 changes: 47 additions & 0 deletions infra/aws/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import { createScheduledTask } from "../lib/scorer/scheduledTasks";
import { secretsManager } from "infra-libs";

import * as op from "@1password/op-js";
import { createVerifierService } from "./verifier";

// The following vars are not allowed to be undefined, hence the `${...}` magic

Expand Down Expand Up @@ -57,6 +58,7 @@ const publicDataDomain =

const current = aws.getCallerIdentity({});
const regionData = aws.getRegion({});

export const dockerGtcPassportScorerImage = pulumi
.all([current, regionData])
.apply(
Expand All @@ -78,6 +80,13 @@ export const dockerGtcStakingIndexerImage = pulumi
`${acc.accountId}.dkr.ecr.${region.id}.amazonaws.com/passport-indexer:${DOCKER_IMAGE_TAG}`
);

export const verifierDockerImage = pulumi
.all([current, regionData])
.apply(
([acc, region]) =>
`${acc.accountId}.dkr.ecr.${region.id}.amazonaws.com/passport-verifier:${DOCKER_IMAGE_TAG}`
);

const redashDbUsername = op.read.parse(
`op://DevOps/passport-scorer-${stack}-env/ci/REDASH_DB_USER`
);
Expand Down Expand Up @@ -618,6 +627,10 @@ const apiEnvironment = [
name: "ALLOWED_HOSTS",
value: JSON.stringify([domain, "*"]),
},
{
name: "VERIFIER_URL",
value: "http://core-alb.private.gitcoin.co/verifier/verify",
},
].sort(secretsManager.sortByName);

const apiSecrets = secretsManager.syncSecretsAndGetRefs({
Expand Down Expand Up @@ -1478,20 +1491,26 @@ const lambdaSettings = {
name: "SCORER_SERVER_SSM_ARN",
value: scorerSecret.arn,
},
{
name: "VERIFIER_URL",
value: "http://core-alb.private.gitcoin.co/verifier/verify",
},
].sort(secretsManager.sortByName),
roleAttachments: httpRoleAttachments,
role: httpLambdaRole,
alertTopic: pagerdutyTopic,
alb: alb,
};

// Create alarms for the load balancer
createLoadBalancerAlarms(
"scorer-service",
alb.arnSuffix,
alarmConfigurations,
pagerdutyTopic
);

// Manage Lamba services
buildHttpLambdaFn(
{
...lambdaSettings,
Expand Down Expand Up @@ -1633,3 +1652,31 @@ buildQueueLambdaFn({
role: queueLambdaRole,
queue: rescoreQueue,
});

// VERIFIER
const privateAlbHttpListenerArn = coreInfraStack.getOutput(
"privateAlbHttpListenerArn"
);
const privatprivateAlbArnSuffixeAlbHttpListenerArn = coreInfraStack.getOutput(
"privateAlbArnSuffix"
);

const verifier = pulumi
.all([verifierDockerImage])
.apply(([_verifierDockerImage]) =>
createVerifierService({
vpcId: vpcID as pulumi.Output<string>,
albListenerArn: privateAlbHttpListenerArn as pulumi.Output<string>,
privateAlbArnSuffix:
privatprivateAlbArnSuffixeAlbHttpListenerArn as pulumi.Output<string>,
albPriorityRule: 1011,
pathPatterns: ["/verifier/*"],
clusterArn: cluster.arn,
clusterName: cluster.name,
dockerImage: _verifierDockerImage,
vpcPrivateSubnets: vpcPrivateSubnetIds as pulumi.Output<string[]>,
snsTopicArn: pagerdutyTopic.arn,
})
);

export const verifierTaskArn = verifier.task.arn;
Loading
Loading