Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add table gcp_aiplatform_endpoints Closes #470 #513

Merged
merged 9 commits into from
Nov 16, 2023
68 changes: 68 additions & 0 deletions docs/tables/gcp_vertex_ai_endpoint.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
# Table: gcp_vertex_ai_endpoint

AI Platform is a managed service that enables you to easily build machine learning models, that work on any type of data, of any size.

### Basic info

```sql
select
name,
display_name,
create_time,
network
from
gcp_vertex_ai_endpoint;
```

### List endpoints that are exposed via private service connect

```sql
select
name,
display_name,
create_time,
enable_private_service_connect
from
gcp_vertex_ai_endpoint
where
enable_private_service_connect;
```

### List endpoints created in the last 30 days

```sql
select
name,
display_name,
network,
create_time,
update_time
from
gcp_vertex_ai_endpoint
where
create_time >= now() - interval '30' day;
```

### Get customer-managed key details of endpoints

```sql
select
name,
create_time,
encryption_spec ->> 'KmsKeyName' as kms_key_name
from
gcp_vertex_ai_endpoint;
```

### Get prediction request response config of endpoints

```sql
select
name,
network,
predict_request_response_logging_config ->> 'Enabled' as enabled,
predict_request_response_logging_config ->> 'SamplingRate' as sampling_rate,
predict_request_response_logging_config ->> 'BigqueryDestination' as bigquery_destination
from
gcp_vertex_ai_endpoint;
```
1 change: 1 addition & 0 deletions gcp/plugin.go
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,7 @@ func Plugin(ctx context.Context) *plugin.Plugin {
"gcp_sql_database_instance_metric_cpu_utilization_hourly": tableGcpSQLDatabaseInstanceMetricCpuUtilizationHourly(ctx),
"gcp_storage_bucket": tableGcpStorageBucket(ctx),
"gcp_storage_object": tableGcpStorageObject(ctx),
"gcp_vertex_ai_endpoint": tableGcpVertexAIEndpoint(ctx),
/*
https://github.com/turbot/steampipe/issues/108
"gcp_compute_route": tableGcpComputeRoute(ctx),
Expand Down
66 changes: 65 additions & 1 deletion gcp/service.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ package gcp
import (
"context"

"cloud.google.com/go/redis/apiv1"
aiplatform "cloud.google.com/go/aiplatform/apiv1"
redis "cloud.google.com/go/redis/apiv1"
"github.com/turbot/steampipe-plugin-sdk/v5/plugin"
"google.golang.org/api/accessapproval/v1"
"google.golang.org/api/apikeys/v2"
Expand All @@ -24,6 +25,7 @@ import (
"google.golang.org/api/iam/v1"
"google.golang.org/api/logging/v2"
"google.golang.org/api/monitoring/v3"
"google.golang.org/api/option"
"google.golang.org/api/pubsub/v1"
"google.golang.org/api/run/v2"
"google.golang.org/api/serviceusage/v1"
Expand Down Expand Up @@ -54,6 +56,68 @@ func AccessApprovalService(ctx context.Context, d *plugin.QueryData) (*accessapp
return svc, nil
}

type AIplatfromServiceClients struct {
Endpoint *aiplatform.EndpointClient
Dataset *aiplatform.DatasetClient
Index *aiplatform.IndexClient
Job *aiplatform.JobClient
}

func AIService(ctx context.Context, d *plugin.QueryData, clientType string) (*AIplatfromServiceClients, error) {
// have we already created and cached the service?
matrixLocation := d.EqualsQualString(matrixKeyLocation)

// Default to us-central1 for building the supported locations for the resources like Endpoint, Dataset, Index, Job etc...
if matrixLocation == "" {
matrixLocation = "us-central1"
}

serviceCacheKey := "AIService" + matrixLocation + clientType
if cachedData, ok := d.ConnectionManager.Cache.Get(serviceCacheKey); ok {
return cachedData.(*AIplatfromServiceClients), nil
}

// To get config arguments from plugin config file
opts := setSessionConfig(ctx, d.Connection)
opts = append(opts, option.WithEndpoint(matrixLocation+"-aiplatform.googleapis.com:443"))

clients := &AIplatfromServiceClients{}

switch clientType {
case "Endpoint":
svc, err := aiplatform.NewEndpointClient(ctx, opts...)
if err != nil {
return nil, err
}
clients.Endpoint = svc
return clients, nil
case "Dataset":
svc, err := aiplatform.NewDatasetClient(ctx, opts...)
if err != nil {
return nil, err
}
clients.Dataset = svc
return clients, nil
case "Index":
svc, err := aiplatform.NewIndexClient(ctx, opts...)
if err != nil {
return nil, err
}
clients.Index = svc
return clients, nil
case "Job":
svc, err := aiplatform.NewJobClient(ctx, opts...)
if err != nil {
return nil, err
}
clients.Job = svc
return clients, nil
}

d.ConnectionManager.Cache.Set(serviceCacheKey, clients)
return clients, nil
}

func APIKeysService(ctx context.Context, d *plugin.QueryData) (*apikeys.Service, error) {
// have we already created and cached the service?
serviceCacheKey := "APIKeysService"
Expand Down
Loading