From e3c572b7b9983237b919d02a4f85e08f6be48b97 Mon Sep 17 00:00:00 2001 From: N-o-Z Date: Mon, 16 Sep 2024 12:05:20 -0400 Subject: [PATCH] Use ACL server for CI (#8155) --- .github/workflows/esti.yaml | 150 ++++++++++++++++------- .gitignore | 1 + esti/auth_test.go | 45 +++---- esti/lakectl_test.go | 3 +- esti/lakefs_test.go | 11 +- esti/ops/docker-compose-dynamodb.yaml | 52 ++------ esti/ops/docker-compose-external-db.yaml | 58 +-------- esti/ops/docker-compose-fluffy.yaml | 122 ------------------ esti/ops/docker-compose.yaml | 7 +- esti/system_test.go | 23 +++- 10 files changed, 178 insertions(+), 294 deletions(-) delete mode 100644 esti/ops/docker-compose-fluffy.yaml diff --git a/.github/workflows/esti.yaml b/.github/workflows/esti.yaml index d200122f2c4..70c23e6f817 100644 --- a/.github/workflows/esti.yaml +++ b/.github/workflows/esti.yaml @@ -43,12 +43,11 @@ jobs: run: echo "tag=sha-$(git rev-parse --short HEAD | sed s/^v//g)" >> $GITHUB_OUTPUT - name: Restore cache - uses: actions/cache@v3 + uses: actions/cache@v4 id: restore-cache with: path: /tmp/generated.tar.gz - key: ${{ runner.os }}-go-${{ hashFiles('./pkg/**', './api/**', './webui/**', './auth/**') }} - restore-keys: ${{ runner.os }}-go- + key: ${{ runner.os }}-go-${{ hashFiles('./pkg/**', './api/**', './webui/**', './auth/**', './acl') }} - name: Setup Go if: steps.restore-cache.outputs.cache-hit != 'true' @@ -69,7 +68,8 @@ jobs: if: steps.restore-cache.outputs.cache-hit != 'true' run: | make -j3 gen-api gen-code gen-ui VERSION=${{ steps.version.outputs.tag }} - tar -czf /tmp/generated.tar.gz ./webui/dist ./pkg/auth/{client,service_wrapper,service_inviter_wrapper}.gen.go ./pkg/authentication/apiclient/client.gen.go ./pkg/permissions/actions.gen.go ./pkg/api/apigen/lakefs.gen.go + go build ./contrib/auth/acl/cmd/acl + tar -czf /tmp/generated.tar.gz ./webui/dist ./pkg/auth/{client,service_wrapper,service_inviter_wrapper}.gen.go ./pkg/authentication/apiclient/client.gen.go ./pkg/permissions/actions.gen.go ./pkg/api/apigen/lakefs.gen.go ./acl # must upload artifact in order to download generated later - name: Store generated code @@ -219,7 +219,7 @@ jobs: run: tar -xzvf /tmp/generated.tar.gz - name: Restore cache - uses: actions/cache@v3 + uses: actions/cache@v4 id: restore-cache with: path: ${{ github.workspace }}/test/spark/metaclient @@ -659,7 +659,7 @@ jobs: - name: Check-out code uses: actions/checkout@v4 - name: Restore cache - uses: actions/cache@v3 + uses: actions/cache@v4 id: restore-cache with: path: ${{ github.workspace }}/test/spark/metaclient @@ -700,7 +700,7 @@ jobs: - name: Check-out code uses: actions/checkout@v4 - name: Restore cache - uses: actions/cache@v3 + uses: actions/cache@v4 id: restore-cache with: path: ${{ github.workspace }}/test/spark/metaclient @@ -765,12 +765,22 @@ jobs: id: unique run: echo "value=$RANDOM" >> $GITHUB_OUTPUT - # Required for pulling fluffy image - - name: Login to DockerHub - uses: docker/login-action@v2 + # Retrieve ACL server binary from cache + - name: Retrieve generated code + uses: actions/download-artifact@v3 with: - username: ${{ secrets.DOCKERHUB_USER }} - password: ${{ secrets.DOCKERHUB_PASSWORD }} + name: generated-code + path: /tmp/ + + - name: Unpack generated code + run: tar -xzvf /tmp/generated.tar.gz + + # Run ACL server + - name: Run ACL server + env: + ACLSERVER_ENCRYPT_SECRET_KEY: some random secret string # Must be the same as lakeFS + ACLSERVER_DATABASE_TYPE: local + run: ./acl run & - name: Test lakeFS with S3 tests KV uses: ./.github/actions/bootstrap-test-lakefs @@ -778,6 +788,8 @@ jobs: compose-file: esti/ops/docker-compose-dynamodb.yaml compose-flags: "--quiet-pull --exit-code-from=esti" env: + LAKEFS_AUTH_API_ENDPOINT: http://host.docker.internal:8001/api/v1 + LAKEFS_AUTH_UI_CONFIG_RBAC: simplified LAKEFS_BLOCKSTORE_TYPE: s3 LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }} LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }} @@ -826,19 +838,31 @@ jobs: id: unique run: echo "value=$RANDOM" >> $GITHUB_OUTPUT - # Required for pulling fluffy image - - name: Login to DockerHub - uses: docker/login-action@v2 + # Retrieve ACL server binary from cache + - name: Retrieve generated code + uses: actions/download-artifact@v3 with: - username: ${{ secrets.DOCKERHUB_USER }} - password: ${{ secrets.DOCKERHUB_PASSWORD }} + name: generated-code + path: /tmp/ + + - name: Unpack generated code + run: tar -xzvf /tmp/generated.tar.gz + + # Run ACL server + - name: Run ACL server + env: + ACLSERVER_ENCRYPT_SECRET_KEY: some random secret string # Must be the same as lakeFS + ACLSERVER_DATABASE_TYPE: local + run: ./acl run & - name: Test lakeFS with S3 tests uses: ./.github/actions/bootstrap-test-lakefs with: - compose-file: esti/ops/docker-compose-fluffy.yaml + compose-file: esti/ops/docker-compose.yaml compose-flags: "--quiet-pull --exit-code-from=esti" env: + LAKEFS_AUTH_API_ENDPOINT: http://host.docker.internal:8001/api/v1 + LAKEFS_AUTH_UI_CONFIG_RBAC: simplified LAKEFS_BLOCKSTORE_TYPE: s3 LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }} LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }} @@ -860,7 +884,7 @@ jobs: - name: lakeFS Logs on s3 failure if: ${{ failure() }} continue-on-error: true - run: docker compose -f esti/ops/docker-compose-fluffy.yaml logs --tail=1000 lakefs + run: docker compose -f esti/ops/docker-compose.yaml logs --tail=1000 lakefs - name: Export DB if: ${{ always() }} @@ -887,20 +911,32 @@ jobs: id: unique run: echo "value=$RANDOM" >> $GITHUB_OUTPUT - # Required for pulling fluffy image - - name: Login to DockerHub - uses: docker/login-action@v2 + # Retrieve ACL server binary from cache + - name: Retrieve generated code + uses: actions/download-artifact@v3 with: - username: ${{ secrets.DOCKERHUB_USER }} - password: ${{ secrets.DOCKERHUB_PASSWORD }} + name: generated-code + path: /tmp/ + + - name: Unpack generated code + run: tar -xzvf /tmp/generated.tar.gz + + # Run ACL server + - name: Run ACL server + env: + ACLSERVER_ENCRYPT_SECRET_KEY: some random secret string # Must be the same as lakeFS + ACLSERVER_DATABASE_TYPE: local + run: ./acl run & - name: Start lakeFS with GS tests uses: ./.github/actions/bootstrap-test-lakefs with: - compose-file: esti/ops/docker-compose-fluffy.yaml + compose-file: esti/ops/docker-compose.yaml compose-flags: "--quiet-pull --exit-code-from=esti" env: DOCKER_REG: ${{ needs.login-to-amazon-ecr.outputs.registry }} + LAKEFS_AUTH_API_ENDPOINT: http://host.docker.internal:8001/api/v1 + LAKEFS_AUTH_UI_CONFIG_RBAC: simplified LAKEFS_BLOCKSTORE_TYPE: gs LAKEFS_DATABASE_TYPE: postgres LAKEFS_BLOCKSTORE_GS_CREDENTIALS_JSON: ${{ secrets.LAKEFS_BLOCKSTORE_GS_CREDENTIALS_JSON }} @@ -910,7 +946,7 @@ jobs: - name: lakeFS Logs on GS failure if: ${{ failure() }} continue-on-error: true - run: docker compose -f esti/ops/docker-compose-fluffy.yaml logs --tail=1000 lakefs + run: docker compose -f esti/ops/docker-compose.yaml logs --tail=1000 lakefs run-system-azure-abfs: name: Run latest lakeFS app on Azure with Azure blobstore @@ -932,20 +968,32 @@ jobs: id: unique run: echo "value=$RANDOM" >> $GITHUB_OUTPUT - # Required for pulling fluffy image - - name: Login to DockerHub - uses: docker/login-action@v2 + # Retrieve ACL server binary from cache + - name: Retrieve generated code + uses: actions/download-artifact@v3 with: - username: ${{ secrets.DOCKERHUB_USER }} - password: ${{ secrets.DOCKERHUB_PASSWORD }} + name: generated-code + path: /tmp/ + + - name: Unpack generated code + run: tar -xzvf /tmp/generated.tar.gz + + # Run ACL server + - name: Run ACL server + env: + ACLSERVER_ENCRYPT_SECRET_KEY: some random secret string # Must be the same as lakeFS + ACLSERVER_DATABASE_TYPE: local + run: ./acl run & - name: Start lakeFS with Azure tests uses: ./.github/actions/bootstrap-test-lakefs with: - compose-file: esti/ops/docker-compose-fluffy.yaml + compose-file: esti/ops/docker-compose.yaml compose-flags: "--quiet-pull --exit-code-from=esti" env: DOCKER_REG: ${{ needs.login-to-amazon-ecr.outputs.registry }} + LAKEFS_AUTH_API_ENDPOINT: http://host.docker.internal:8001/api/v1 + LAKEFS_AUTH_UI_CONFIG_RBAC: simplified LAKEFS_DATABASE_TYPE: postgres LAKEFS_BLOCKSTORE_TYPE: azure ESTI_AZURE_STORAGE_ACCOUNT: esti @@ -956,7 +1004,7 @@ jobs: - name: lakeFS Logs on Azure failure if: ${{ failure() }} continue-on-error: true - run: docker compose -f esti/ops/docker-compose-fluffy.yaml logs --tail=1000 lakefs + run: docker compose -f esti/ops/docker-compose.yaml logs --tail=1000 lakefs - name: See the env when we would have tried to publish coverage run: env @@ -984,12 +1032,22 @@ jobs: id: unique run: echo "value=$RANDOM" >> $GITHUB_OUTPUT - # Required for pulling fluffy image - - name: Login to DockerHub - uses: docker/login-action@v2 + # Retrieve ACL server binary from cache + - name: Retrieve generated code + uses: actions/download-artifact@v3 with: - username: ${{ secrets.DOCKERHUB_USER }} - password: ${{ secrets.DOCKERHUB_PASSWORD }} + name: generated-code + path: /tmp/ + + - name: Unpack generated code + run: tar -xzvf /tmp/generated.tar.gz + + # Run ACL server + - name: Run ACL server + env: + ACLSERVER_ENCRYPT_SECRET_KEY: some random secret string # Must be the same as lakeFS + ACLSERVER_DATABASE_TYPE: local + run: ./acl run & - name: Start lakeFS with Azure tests uses: ./.github/actions/bootstrap-test-lakefs @@ -998,6 +1056,8 @@ jobs: compose-flags: "--quiet-pull --exit-code-from=esti" env: DOCKER_REG: ${{ needs.login-to-amazon-ecr.outputs.registry }} + LAKEFS_AUTH_API_ENDPOINT: http://host.docker.internal:8001/api/v1 + LAKEFS_AUTH_UI_CONFIG_RBAC: simplified LAKEFS_DATABASE_TYPE: cosmosdb LAKEFS_DATABASE_COSMOSDB_ENDPOINT: "https://${{ env.COSMOSDB_ACCOUNT }}.documents.azure.com" LAKEFS_DATABASE_COSMOSDB_DATABASE: ${{ env.COSMOSDB_DATABASE }} @@ -1026,7 +1086,7 @@ jobs: - name: lakeFS Logs on Azure failure if: ${{ failure() }} continue-on-error: true - run: docker compose -f esti/ops/docker-compose-fluffy.yaml logs --tail=1000 lakefs + run: docker compose -f esti/ops/docker-compose.yaml logs --tail=1000 lakefs python-wrapper: name: Test lakeFS against the python wrapper client @@ -1073,7 +1133,7 @@ jobs: - name: Generate uniquifying value id: unique run: echo "value=$RANDOM" >> $GITHUB_OUTPUT - + - name: Run Python Wrapper Tests uses: pavelzw/pytest-action@v2 env: @@ -1132,7 +1192,7 @@ jobs: working-directory: webui run: echo "PLAYWRIGHT_VERSION=$(node -e "console.log(require('./package-lock.json').dependencies['@playwright/test'].version)")" >> $GITHUB_ENV - name: Cache Playwright binaries - uses: actions/cache@v3 + uses: actions/cache@v4 id: playwright-cache with: path: | @@ -1188,7 +1248,7 @@ jobs: // 3. Prepare format of the comment const output = ` # E2E Test Results - ${process.env.PLAYWRIGHT_PROJECT} - + ${content} `; @@ -1251,7 +1311,7 @@ jobs: working-directory: webui run: echo "PLAYWRIGHT_VERSION=$(node -e "console.log(require('./package-lock.json').dependencies['@playwright/test'].version)")" >> $GITHUB_ENV - name: Cache Playwright binaries - uses: actions/cache@v3 + uses: actions/cache@v4 id: playwright-cache with: path: | @@ -1307,7 +1367,7 @@ jobs: // 3. Prepare format of the comment const output = ` # E2E Test Results - ${process.env.PLAYWRIGHT_PROJECT} - + ${content} `; @@ -1361,7 +1421,6 @@ jobs: ESTI_AWS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }} ESTI_AWS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }} ESTI_VERSION: ${{ needs.deploy-image.outputs.tag }} - ESTI_AUTH_BASIC: true - name: Check files in S3 bucket run: | @@ -1380,4 +1439,3 @@ jobs: if docker compose ps -q postgres; then docker compose exec -T postgres pg_dumpall --username=lakefs | gzip | aws s3 cp - s3://esti-system-testing/${{ github.run_number }}/${{ steps.unique.outputs.value }}/dump.gz fi - \ No newline at end of file diff --git a/.gitignore b/.gitignore index 1b420ea7bcc..0a09d520e34 100644 --- a/.gitignore +++ b/.gitignore @@ -42,6 +42,7 @@ /cmd/lakectl/lakectl /dist /delta/ +/acl # client excludes /webui/node_modules diff --git a/esti/auth_test.go b/esti/auth_test.go index 3a46acbea6c..dcbc7bdccca 100644 --- a/esti/auth_test.go +++ b/esti/auth_test.go @@ -26,7 +26,7 @@ func TestAdminPermissions(t *testing.T) { resCreateGroup, err := client.CreateGroupWithResponse(ctx, apigen.CreateGroupJSONRequestBody{ Id: gname, }) - if isBasicAuth() { + if isBasicAuth(t, ctx) { require.NoError(t, err, "Admin failed while creating group") require.Equal(t, http.StatusNotImplemented, resCreateGroup.StatusCode()) return @@ -65,16 +65,16 @@ func TestAdminPermissions(t *testing.T) { // Test Super Permissions: AuthManageOwnCredentials, FSFullAccess, RepoManagementReadAll func TestSuperPermissions(t *testing.T) { - if isBasicAuth() { + ctx, log, repo := setupTest(t) + if isBasicAuth(t, ctx) { t.Skip("Unsupported in basic auth configuration") } - ctx, logger, repo := setupTest(t) groups := []string{"Supers", "SuperUsers"} // map group names to IDs mapGroupNameToID, groupIDs := mapGroupNamesToIDs(t, ctx, groups) // generate the Super client - superClient := newClientFromGroup(t, ctx, logger, "super", groupIDs) + superClient := newClientFromGroup(t, ctx, log, "super", groupIDs) // listing the available branches should succeed resListBranches, err := superClient.ListBranchesWithResponse(ctx, repo, &apigen.ListBranchesParams{}) @@ -120,16 +120,17 @@ func TestSuperPermissions(t *testing.T) { // Test Writer Permissions: AuthManageOwnCredentials, FSFullAccess, RepoManagementReadAll func TestWriterPermissions(t *testing.T) { - if isBasicAuth() { + ctx, log, repo := setupTest(t) + if isBasicAuth(t, ctx) { t.Skip("Unsupported in basic auth configuration") } - ctx, logger, repo := setupTest(t) + groups := []string{"Writers", "Developers"} // map group names to IDs _, groupIDs := mapGroupNamesToIDs(t, ctx, groups) // generate the Writer client - writerClient := newClientFromGroup(t, ctx, logger, "writer", groupIDs) + writerClient := newClientFromGroup(t, ctx, log, "writer", groupIDs) // listing the available branches should succeed resListBranches, err := writerClient.ListBranchesWithResponse(ctx, repo, &apigen.ListBranchesParams{}) @@ -170,10 +171,10 @@ func TestWriterPermissions(t *testing.T) { // Test Reader Permissions: AuthManageOwnCredentials, FSReadAll func TestReaderPermissions(t *testing.T) { - if isBasicAuth() { + ctx, log, repo := setupTest(t) + if isBasicAuth(t, ctx) { t.Skip("Unsupported in basic auth configuration") } - ctx, log, repo := setupTest(t) groups := []string{"Readers", "Viewers"} // map group names to IDs @@ -210,10 +211,11 @@ func TestReaderPermissions(t *testing.T) { } func TestCreateRepo_Unauthorized(t *testing.T) { - if isBasicAuth() { + ctx := context.Background() + if isBasicAuth(t, ctx) { t.Skip("Unsupported in basic auth configuration") } - ctx := context.Background() + name := generateUniqueRepositoryName() storageNamespace := generateUniqueStorageNamespace(name) name = makeRepositoryName(name) @@ -238,10 +240,10 @@ func TestCreateRepo_Unauthorized(t *testing.T) { } func TestRepoMetadata_Unauthorized(t *testing.T) { - if isBasicAuth() { + ctx, log, repo := setupTest(t) + if isBasicAuth(t, ctx) { t.Skip("Unsupported in basic auth configuration") } - ctx, log, repo := setupTest(t) // generate client with no group association clt := newClientFromGroup(t, ctx, log, "none", nil) @@ -277,11 +279,10 @@ func TestRepoMetadata_Unauthorized(t *testing.T) { } func TestCreatePolicy(t *testing.T) { - if isBasicAuth() { + ctx := context.Background() + if !isAdvancedAuth(t, ctx) { t.Skip("Unsupported in basic auth configuration") } - ctx := context.Background() - //ctx, log, repo := setupTest(t) t.Run("valid_policy", func(t *testing.T) { resp, err := client.CreatePolicyWithResponse(ctx, apigen.CreatePolicyJSONRequestBody{ @@ -319,10 +320,10 @@ func TestCreatePolicy(t *testing.T) { } func TestBranchProtectionRules_Unauthorized(t *testing.T) { - if isBasicAuth() { + ctx, log, repo := setupTest(t) + if isBasicAuth(t, ctx) { t.Skip("Unsupported in basic auth configuration") } - ctx, log, repo := setupTest(t) // generate client with no group association clt := newClientFromGroup(t, ctx, log, "none", nil) @@ -339,10 +340,10 @@ func TestBranchProtectionRules_Unauthorized(t *testing.T) { } func TestGarbageCollectionRules_Unauthorized(t *testing.T) { - if isBasicAuth() { + ctx, log, repo := setupTest(t) + if isBasicAuth(t, ctx) { t.Skip("Unsupported in basic auth configuration") } - ctx, log, repo := setupTest(t) // generate client with no group association clt := newClientFromGroup(t, ctx, log, "none", nil) @@ -388,10 +389,10 @@ func newClientFromGroup(t *testing.T, context context.Context, logger logging.Lo } func TestUpdatePolicy(t *testing.T) { - if isBasicAuth() { + ctx := context.Background() + if !isAdvancedAuth(t, ctx) { t.Skip("Unsupported in basic auth configuration") } - ctx := context.Background() // test policy now := apiutil.Ptr(time.Now().Unix()) diff --git a/esti/lakectl_test.go b/esti/lakectl_test.go index 5cb093bf13d..70f97d59dd8 100644 --- a/esti/lakectl_test.go +++ b/esti/lakectl_test.go @@ -407,11 +407,12 @@ func TestLakectlAnnotate(t *testing.T) { } func TestLakectlAuthUsers(t *testing.T) { + ctx := context.Background() userName := "test_user" vars := map[string]string{ "ID": userName, } - isSupported := !isBasicAuth() + isSupported := !isBasicAuth(t, ctx) // Not Found RunCmdAndVerifyFailure(t, Lakectl()+" auth users delete --id "+userName, false, "user not found\n404 Not Found\n", vars) diff --git a/esti/lakefs_test.go b/esti/lakefs_test.go index 84c49f2ee18..bc1446c1ef6 100644 --- a/esti/lakefs_test.go +++ b/esti/lakefs_test.go @@ -1,6 +1,9 @@ package esti -import "testing" +import ( + "context" + "testing" +) func TestLakefsHelp(t *testing.T) { RunCmdAndVerifySuccessWithFile(t, Lakefs(), false, "lakefs/help", emptyVars) @@ -15,7 +18,8 @@ func TestLakefsSuperuser_basic(t *testing.T) { outputString := "credentials:\n access_key_id: \n secret_access_key: \n" username := t.Name() expectFailure := false - if isBasicAuth() { + ctx := context.Background() + if isBasicAuth(t, ctx) { lakefsCmd = LakefsWithBasicAuth() outputString = "already exists" expectFailure = true @@ -26,7 +30,8 @@ func TestLakefsSuperuser_basic(t *testing.T) { func TestLakefsSuperuser_alreadyExists(t *testing.T) { RequirePostgresDB(t) lakefsCmd := Lakefs() - if isBasicAuth() { + ctx := context.Background() + if isBasicAuth(t, ctx) { lakefsCmd = LakefsWithBasicAuth() } // On init - the AdminUsername is already created and expected error should be "already exist" (also in basic auth mode) diff --git a/esti/ops/docker-compose-dynamodb.yaml b/esti/ops/docker-compose-dynamodb.yaml index 6cd25de7e90..f5e8199c9f5 100644 --- a/esti/ops/docker-compose-dynamodb.yaml +++ b/esti/ops/docker-compose-dynamodb.yaml @@ -6,22 +6,13 @@ services: ports: - "8000:8000" depends_on: - - "fluffy" + - "dynamodb" volumes: - lakefs-app:/app:ro environment: - LAKEFS_AUTH_ENCRYPT_SECRET_KEY="some random secret string" - - LAKEFS_AUTH_API_ENDPOINT=http://fluffy:9000/api/v1 - - LAKEFS_AUTH_API_SUPPORTS_INVITES=true - - LAKEFS_AUTH_LOGOUT_REDIRECT_URL=http://fluffy:8000/oidc/logout - - LAKEFS_AUTH_UI_CONFIG_LOGIN_URL=http://fluffy:8000/oidc/login - - LAKEFS_AUTH_UI_CONFIG_LOGOUT_URL=http://fluffy:8000/oidc/logout - - LAKEFS_AUTH_UI_CONFIG_RBAC=internal - - LAKEFS_AUTH_UI_CONFIG_LOGIN_COOKIE_NAMES=[internal_auth_session,oidc_auth_session] - - LAKEFS_AUTH_OIDC_FRIENDLY_NAME_CLAIM_NAME="nickname" - - LAKEFS_AUTH_OIDC_DEFAULT_INITIAL_GROUPS=["Admins"] - - LAKEFS_AUTH_AUTHENTICATION_API_ENDPOINT=http://fluffy:8000/api/v1 - - LAKEFS_AUTH_AUTHENTICATION_API_EXTERNAL_PRINCIPALS_ENABLED=true + - LAKEFS_AUTH_UI_CONFIG_RBAC=${LAKEFS_AUTH_UI_CONFIG_RBAC:-simplified} + - LAKEFS_AUTH_API_ENDPOINT=${LAKEFS_AUTH_API_ENDPOINT:-http://localhost:8001/api/v1} - LAKEFS_BLOCKSTORE_TYPE=${LAKEFS_BLOCKSTORE_TYPE:-local} - LAKEFS_BLOCKSTORE_LOCAL_PATH=/home/lakefs - LAKEFS_BLOCKSTORE_LOCAL_IMPORT_ENABLED=true @@ -37,40 +28,11 @@ services: - LAKEFS_DATABASE_DYNAMODB_AWS_REGION=us-east-1 - LAKEFS_DATABASE_DYNAMODB_AWS_ACCESS_KEY_ID=AKIAIO5FODNN7EXAMPLE - LAKEFS_DATABASE_DYNAMODB_AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K3MDENG/bPxRfiCYEXAMPLEKEY - entrypoint: ["/app/wait-for", "fluffy:8000", "--", "/app/lakefs", "run"] - - fluffy: - image: treeverse/fluffy:0.7.1 - command: "${COMMAND:-run}" - ports: - - "8001:8000" - - "9000:9000" - depends_on: - - "dynamodb" - environment: - - FLUFFY_LOGGING_LEVEL=DEBUG - - FLUFFY_DATABASE_TYPE=dynamodb - - FLUFFY_DATABASE_DYNAMODB_ENDPOINT=http://dynamodb:8000 - - FLUFFY_DATABASE_DYNAMODB_AWS_REGION=us-east-1 - - FLUFFY_DATABASE_DYNAMODB_AWS_ACCESS_KEY_ID=AKIAIO5FODNN7EXAMPLE - - FLUFFY_DATABASE_DYNAMODB_AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K3MDENG/bPxRfiCYEXAMPLEKEY - - FLUFFY_AUTH_ENCRYPT_SECRET_KEY="some random secret string" - - FLUFFY_AUTH_SERVE_LISTEN_ADDRESS=0.0.0.0:9000 - - FLUFFY_LISTEN_ADDRESS=0.0.0.0:8000 - - FLUFFY_AUTH_SERVE_DISABLE_AUTHENTICATION=true - - FLUFFY_AUTH_LOGOUT_REDIRECT_URL=https://lakefs-cloud-dev.us.auth0.com/v2/logout - - FLUFFY_AUTH_POST_LOGIN_REDIRECT_URL=http://lakefs:8000/ - - FLUFFY_AUTH_OIDC_ENABLED=true - - FLUFFY_AUTH_OIDC_URL=https://lakefs-cloud-dev.us.auth0.com/ - - FLUFFY_AUTH_OIDC_CLIENT_ID=${AUTH0_CLIENT_ID} - - FLUFFY_AUTH_OIDC_CLIENT_SECRET=${AUTH0_CLIENT_SECRET} - - FLUFFY_AUTH_OIDC_CALLBACK_BASE_URL=http://fluffy:8000 - - FLUFFY_AUTH_OIDC_IS_DEFAULT_LOGIN=true - - FLUFFY_AUTH_OIDC_LOGOUT_CLIENT_ID_QUERY_PARAMETER=client_id - - FLUFFY_AUTH_EXTERNAL_AWS_AUTH_ENABLED=true - entrypoint: ["/app/fluffy"] + entrypoint: ["/app/lakefs", "run"] + extra_hosts: + - "host.docker.internal:host-gateway" - dynamodb: + dynamodb: image: "amazon/dynamodb-local:2.5.2" ports: - "6432:8000" diff --git a/esti/ops/docker-compose-external-db.yaml b/esti/ops/docker-compose-external-db.yaml index 3fea5ef7ecd..a63cd65f2bc 100644 --- a/esti/ops/docker-compose-external-db.yaml +++ b/esti/ops/docker-compose-external-db.yaml @@ -5,23 +5,12 @@ services: command: "${COMMAND:-run}" ports: - "8000:8000" - depends_on: - - "fluffy" volumes: - lakefs-app:/app:ro environment: - LAKEFS_AUTH_ENCRYPT_SECRET_KEY="some random secret string" - - LAKEFS_AUTH_API_ENDPOINT=http://fluffy:9000/api/v1 - - LAKEFS_AUTH_API_SUPPORTS_INVITES=true - - LAKEFS_AUTH_LOGOUT_REDIRECT_URL=http://fluffy:8000/oidc/logout - - LAKEFS_AUTH_UI_CONFIG_LOGIN_URL=http://fluffy:8000/oidc/login - - LAKEFS_AUTH_UI_CONFIG_LOGOUT_URL=http://fluffy:8000/oidc/logout - - LAKEFS_AUTH_UI_CONFIG_RBAC=internal - - LAKEFS_AUTH_UI_CONFIG_LOGIN_COOKIE_NAMES=[internal_auth_session,oidc_auth_session] - - LAKEFS_AUTH_OIDC_FRIENDLY_NAME_CLAIM_NAME="nickname" - - LAKEFS_AUTH_OIDC_DEFAULT_INITIAL_GROUPS=["Admins"] - - LAKEFS_AUTH_AUTHENTICATION_API_ENDPOINT=http://fluffy:8000/api/v1 - - LAKEFS_AUTH_AUTHENTICATION_API_EXTERNAL_PRINCIPALS_ENABLED=true + - LAKEFS_AUTH_UI_CONFIG_RBAC=${LAKEFS_AUTH_UI_CONFIG_RBAC:-simplified} + - LAKEFS_AUTH_API_ENDPOINT=${LAKEFS_AUTH_API_ENDPOINT:-http://localhost:8001/api/v1} - LAKEFS_BLOCKSTORE_TYPE=${LAKEFS_BLOCKSTORE_TYPE:-local} - LAKEFS_BLOCKSTORE_LOCAL_PATH=/home/lakefs - LAKEFS_BLOCKSTORE_LOCAL_IMPORT_ENABLED=true @@ -39,45 +28,10 @@ services: - AZURE_CLIENT_ID - AZURE_CLIENT_SECRET - AZURE_TENANT_ID - entrypoint: ["/app/wait-for", "fluffy:8000", "--", "/app/lakefs", "run"] - - fluffy: - image: treeverse/fluffy:0.7.1 - command: "${COMMAND:-run}" - ports: - - "8001:8000" - - "9000:9000" - depends_on: - - "dynamodb" - environment: - - FLUFFY_LOGGING_LEVEL=DEBUG - - FLUFFY_DATABASE_TYPE=dynamodb - - FLUFFY_DATABASE_DYNAMODB_ENDPOINT=http://dynamodb:8000 - - FLUFFY_DATABASE_DYNAMODB_AWS_REGION=us-east-1 - - FLUFFY_DATABASE_DYNAMODB_AWS_ACCESS_KEY_ID=AKIAIO5FODNN7EXAMPLE - - FLUFFY_DATABASE_DYNAMODB_AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K3MDENG/bPxRfiCYEXAMPLEKEY - - FLUFFY_AUTH_ENCRYPT_SECRET_KEY="some random secret string" - - FLUFFY_AUTH_SERVE_LISTEN_ADDRESS=0.0.0.0:9000 - - FLUFFY_LISTEN_ADDRESS=0.0.0.0:8000 - - FLUFFY_AUTH_SERVE_DISABLE_AUTHENTICATION=true - - FLUFFY_AUTH_LOGOUT_REDIRECT_URL=https://lakefs-cloud-dev.us.auth0.com/v2/logout - - FLUFFY_AUTH_POST_LOGIN_REDIRECT_URL=http://lakefs:8000/ - - FLUFFY_AUTH_OIDC_ENABLED=true - - FLUFFY_AUTH_OIDC_URL=https://lakefs-cloud-dev.us.auth0.com/ - - FLUFFY_AUTH_OIDC_CLIENT_ID=${AUTH0_CLIENT_ID} - - FLUFFY_AUTH_OIDC_CLIENT_SECRET=${AUTH0_CLIENT_SECRET} - - FLUFFY_AUTH_OIDC_CALLBACK_BASE_URL=http://fluffy:8000 - - FLUFFY_AUTH_OIDC_IS_DEFAULT_LOGIN=true - - FLUFFY_AUTH_OIDC_LOGOUT_CLIENT_ID_QUERY_PARAMETER=client_id - - FLUFFY_AUTH_EXTERNAL_AWS_AUTH_ENABLED=true - entrypoint: ["/app/fluffy"] - - # Fluffy is using dynamodb local container because we don't have an integration with fluffy and cosmosdb (which is ok) - dynamodb: - image: "amazon/dynamodb-local:2.5.2" - ports: - - "6432:8000" - + entrypoint: ["/app/lakefs", "run"] + extra_hosts: + - "host.docker.internal:host-gateway" + esti: image: "golang:1.22.6-alpine3.20" links: diff --git a/esti/ops/docker-compose-fluffy.yaml b/esti/ops/docker-compose-fluffy.yaml deleted file mode 100644 index 2486ed310dd..00000000000 --- a/esti/ops/docker-compose-fluffy.yaml +++ /dev/null @@ -1,122 +0,0 @@ -version: "3" -services: - lakefs: - image: "${REPO:-treeverse}/lakefs:${TAG:-dev}" - command: "${COMMAND:-run}" - ports: - - "8000:8000" - depends_on: - - "fluffy" - volumes: - - lakefs-app:/app:ro - environment: - - LAKEFS_AUTH_ENCRYPT_SECRET_KEY="some random secret string" - - LAKEFS_AUTH_API_ENDPOINT=http://fluffy:9000/api/v1 - - LAKEFS_AUTH_API_SUPPORTS_INVITES=true - - LAKEFS_AUTH_LOGOUT_REDIRECT_URL=http://fluffy:8000/oidc/logout - - LAKEFS_AUTH_UI_CONFIG_LOGIN_URL=http://fluffy:8000/oidc/login - - LAKEFS_AUTH_UI_CONFIG_LOGOUT_URL=http://fluffy:8000/oidc/logout - - LAKEFS_AUTH_UI_CONFIG_RBAC=internal - - LAKEFS_AUTH_UI_CONFIG_LOGIN_COOKIE_NAMES=[internal_auth_session,oidc_auth_session] - - LAKEFS_AUTH_OIDC_FRIENDLY_NAME_CLAIM_NAME="nickname" - - LAKEFS_AUTH_OIDC_DEFAULT_INITIAL_GROUPS=["Admins"] - - LAKEFS_AUTH_AUTHENTICATION_API_ENDPOINT=http://fluffy:8000/api/v1 - - LAKEFS_AUTH_AUTHENTICATION_API_EXTERNAL_PRINCIPALS_ENABLED=true - - LAKEFS_DATABASE_TYPE=${LAKEFS_DATABASE_TYPE:-postgres} - - LAKEFS_DATABASE_POSTGRES_CONNECTION_STRING=postgres://lakefs:lakefs@postgres/postgres?sslmode=disable - - LAKEFS_BLOCKSTORE_TYPE=${LAKEFS_BLOCKSTORE_TYPE:-local} - - LAKEFS_BLOCKSTORE_LOCAL_PATH=/home/lakefs - - LAKEFS_BLOCKSTORE_LOCAL_IMPORT_ENABLED=true - - LAKEFS_BLOCKSTORE_LOCAL_ALLOWED_EXTERNAL_PREFIXES=/tmp - - LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} - - LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - - LAKEFS_LOGGING_LEVEL=DEBUG - - LAKEFS_BLOCKSTORE_GS_CREDENTIALS_JSON - - LAKEFS_STATS_ENABLED - - AZURE_CLIENT_ID - - AZURE_CLIENT_SECRET - - AZURE_TENANT_ID - - LAKEFSACTION_VAR=this_is_actions_var - entrypoint: ["/app/wait-for", "fluffy:8000", "--", "/app/lakefs", "run"] - - fluffy: - image: treeverse/fluffy:0.7.1 - command: "${COMMAND:-run}" - ports: - - "8001:8000" - - "9000:9000" - depends_on: - - "postgres" - environment: - - FLUFFY_LOGGING_LEVEL=DEBUG - - FLUFFY_DATABASE_TYPE=${LAKEFS_DATABASE_TYPE:-postgres} - - FLUFFY_DATABASE_POSTGRES_CONNECTION_STRING=postgres://lakefs:lakefs@postgres/postgres?sslmode=disable - - FLUFFY_AUTH_ENCRYPT_SECRET_KEY="some random secret string" - - FLUFFY_AUTH_SERVE_LISTEN_ADDRESS=0.0.0.0:9000 - - FLUFFY_LISTEN_ADDRESS=0.0.0.0:8000 - - FLUFFY_AUTH_SERVE_DISABLE_AUTHENTICATION=true - - FLUFFY_AUTH_LOGOUT_REDIRECT_URL=https://lakefs-cloud-dev.us.auth0.com/v2/logout - - FLUFFY_AUTH_POST_LOGIN_REDIRECT_URL=http://lakefs:8000/ - - FLUFFY_AUTH_OIDC_ENABLED=true - - FLUFFY_AUTH_OIDC_URL=https://lakefs-cloud-dev.us.auth0.com/ - - FLUFFY_AUTH_OIDC_CLIENT_ID=${AUTH0_CLIENT_ID} - - FLUFFY_AUTH_OIDC_CLIENT_SECRET=${AUTH0_CLIENT_SECRET} - - FLUFFY_AUTH_OIDC_CALLBACK_BASE_URL=http://fluffy:8000 - - FLUFFY_AUTH_OIDC_IS_DEFAULT_LOGIN=true - - FLUFFY_AUTH_OIDC_LOGOUT_CLIENT_ID_QUERY_PARAMETER=client_id - - FLUFFY_AUTH_EXTERNAL_AWS_AUTH_ENABLED=true - entrypoint: ["/app/wait-for", "postgres:5432", "--", "/app/fluffy"] - - postgres: - image: "postgres:11" - ports: - - "5433:5432" - environment: - POSTGRES_USER: lakefs - POSTGRES_PASSWORD: lakefs - esti: - image: "golang:1.22.6-alpine3.20" - links: - - lakefs:s3.local.lakefs.io - - lakefs:testmultipartupload.s3.local.lakefs.io - - lakefs:testmultipartuploadabort.s3.local.lakefs.io - - lakefs:testdeleteobjects.s3.local.lakefs.io - - lakefs:testmigrate-testpremigratemultipart.s3.local.lakefs.io - - lakefs:migrate.s3.local.lakefs.io - environment: - - CGO_ENABLED=0 - - AWS_ACCESS_KEY_ID - - AWS_SECRET_ACCESS_KEY - - AWS_REGION=us-east-1 - - ESTI_STORAGE_NAMESPACE - - ESTI_BLOCKSTORE_TYPE - - ESTI_AWS_ACCESS_KEY_ID - - ESTI_SETUP_LAKEFS - - ESTI_AWS_SECRET_ACCESS_KEY - - ESTI_ENDPOINT_URL=http://lakefs:8000 - - ESTI_BINARIES_DIR=/app - - ESTI_DATABASE_CONNECTION_STRING=postgres://lakefs:lakefs@postgres/postgres?sslmode=disable - - ESTI_GOTEST_FLAGS - - ESTI_FLAGS - - ESTI_FORCE_PATH_STYLE - - ESTI_AZURE_STORAGE_ACCOUNT - - ESTI_AZURE_STORAGE_ACCESS_KEY - working_dir: /lakefs - command: - - /bin/sh - - -c - - | - apk add --no-cache util-linux - go test -v $ESTI_GOTEST_FLAGS ./esti --system-tests $ESTI_FLAGS --skip=".*GC" - volumes: - - lakefs-code:/lakefs - - lakefs-app:/app:ro - -volumes: - lakefs-code: - driver: local - driver_opts: - o: bind - type: none - device: ${LAKEFS_ROOT:-.} - lakefs-app: diff --git a/esti/ops/docker-compose.yaml b/esti/ops/docker-compose.yaml index 5e45533b205..71bada4ee12 100644 --- a/esti/ops/docker-compose.yaml +++ b/esti/ops/docker-compose.yaml @@ -11,6 +11,8 @@ services: - lakefs-app:/app:ro environment: - LAKEFS_AUTH_ENCRYPT_SECRET_KEY=some random secret string + - LAKEFS_AUTH_UI_CONFIG_RBAC=${LAKEFS_AUTH_UI_CONFIG_RBAC:-none} + - LAKEFS_AUTH_API_ENDPOINT=${LAKEFS_AUTH_API_ENDPOINT:-} - LAKEFS_DATABASE_TYPE=${LAKEFS_DATABASE_TYPE:-postgres} - LAKEFS_DATABASE_POSTGRES_CONNECTION_STRING=postgres://lakefs:lakefs@postgres/postgres?sslmode=disable - LAKEFS_BLOCKSTORE_TYPE=${LAKEFS_BLOCKSTORE_TYPE:-local} @@ -27,6 +29,9 @@ services: - AZURE_TENANT_ID - LAKEFSACTION_VAR=this_is_actions_var entrypoint: ["/app/wait-for", "postgres:5432", "--", "/app/lakefs", "run"] + extra_hosts: + - "host.docker.internal:host-gateway" + postgres: image: "postgres:11" ports: @@ -34,6 +39,7 @@ services: environment: POSTGRES_USER: lakefs POSTGRES_PASSWORD: lakefs + esti: image: "golang:1.22.6-alpine3.20" links: @@ -61,7 +67,6 @@ services: - ESTI_FORCE_PATH_STYLE - ESTI_AZURE_STORAGE_ACCOUNT - ESTI_AZURE_STORAGE_ACCESS_KEY - - ESTI_AUTH_BASIC working_dir: /lakefs command: - /bin/sh diff --git a/esti/system_test.go b/esti/system_test.go index 7d6311fa11d..8ceb57c855f 100644 --- a/esti/system_test.go +++ b/esti/system_test.go @@ -14,6 +14,7 @@ import ( "strings" "testing" + "github.com/go-openapi/swag" "github.com/rs/xid" "github.com/spf13/viper" "github.com/stretchr/testify/require" @@ -344,6 +345,24 @@ func requireBlockstoreType(t testing.TB, requiredTypes ...string) { } } -func isBasicAuth() bool { - return viper.GetBool("auth.basic") +func isBasicAuth(t testing.TB, ctx context.Context) bool { + t.Helper() + return getRBACState(t, ctx) == "none" +} + +func isAdvancedAuth(t testing.TB, ctx context.Context) bool { + return slices.Contains([]string{"external", "internal"}, getRBACState(t, ctx)) +} + +func getRBACState(t testing.TB, ctx context.Context) string { + setupState := getServerConfig(t, ctx) + return swag.StringValue(setupState.LoginConfig.RBAC) +} + +func getServerConfig(t testing.TB, ctx context.Context) *apigen.SetupState { + t.Helper() + resp, err := client.GetSetupStateWithResponse(ctx) + require.NoError(t, err) + require.NotNil(t, resp.JSON200) + return resp.JSON200 }