diff --git a/.prow/scripts/test-end-to-end-batch.sh b/.prow/scripts/test-end-to-end-batch.sh index 4ae7ae1e54..ac282a0c33 100755 --- a/.prow/scripts/test-end-to-end-batch.sh +++ b/.prow/scripts/test-end-to-end-batch.sh @@ -123,7 +123,9 @@ feast: spring: jpa: - properties.hibernate.format_sql: true + properties.hibernate: + format_sql: true + event.merge.entity_copy_observer: allow hibernate.naming.physical-strategy=org.hibernate.boot.model.naming: PhysicalNamingStrategyStandardImpl hibernate.ddl-auto: update datasource: @@ -167,7 +169,8 @@ bigquery_config: datasetId: $DATASET_NAME subscriptions: - name: "*" - version: ">0" + version: "*" + project: "*" EOF cat < /tmp/serving.warehouse.application.yml diff --git a/.prow/scripts/test-end-to-end.sh b/.prow/scripts/test-end-to-end.sh index 0206472aae..c7ff461ec3 100755 --- a/.prow/scripts/test-end-to-end.sh +++ b/.prow/scripts/test-end-to-end.sh @@ -115,9 +115,12 @@ feast: spring: jpa: - properties.hibernate.format_sql: true + properties.hibernate: + format_sql: true + event.merge.entity_copy_observer: allow hibernate.naming.physical-strategy=org.hibernate.boot.model.naming: PhysicalNamingStrategyStandardImpl hibernate.ddl-auto: update + datasource: url: jdbc:postgresql://localhost:5432/postgres username: postgres @@ -153,7 +156,8 @@ redis_config: port: 6379 subscriptions: - name: "*" - version: ">0" + version: "*" + project: "*" EOF cat < /tmp/serving.online.application.yml @@ -182,6 +186,7 @@ grpc: spring: main: web-environment: false + EOF nohup java -jar serving/target/feast-serving-$REVISION.jar \ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 06476c0156..eb38db3008 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -64,8 +64,9 @@ store { name: "SERVING" type: REDIS subscriptions { + project: "*" name: "*" - version: ">0" + version: "*" } redis_config { host: "localhost" @@ -76,8 +77,9 @@ store { name: "WAREHOUSE" type: BIGQUERY subscriptions { + project: "*" name: "*" - version: ">0" + version: "*" } bigquery_config { project_id: "my-google-project-id" diff --git a/core/src/main/java/feast/core/dao/FeatureSetRepository.java b/core/src/main/java/feast/core/dao/FeatureSetRepository.java index fd996b331c..900f112190 100644 --- a/core/src/main/java/feast/core/dao/FeatureSetRepository.java +++ b/core/src/main/java/feast/core/dao/FeatureSetRepository.java @@ -19,28 +19,33 @@ import feast.core.model.FeatureSet; import java.util.List; import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.data.jpa.repository.Query; -/** JPA repository supplying FeatureSet objects keyed by id. */ +/** + * JPA repository supplying FeatureSet objects keyed by id. + */ public interface FeatureSetRepository extends JpaRepository { long count(); - // Find feature set by name and version - FeatureSet findFeatureSetByNameAndVersion(String name, Integer version); - - // Find latest version of a feature set by name - FeatureSet findFirstFeatureSetByNameOrderByVersionDesc(String name); - - // find all versions of featureSets matching the given name. - List findByName(String name); + // Find single feature set by project, name, and version + FeatureSet findFeatureSetByNameAndProject_NameAndVersion(String name, String project, + Integer version); - // find all versions of featureSets with names matching the regex - @Query( - nativeQuery = true, - value = "SELECT * FROM feature_sets " + "WHERE name LIKE ?1 ORDER BY name ASC, version ASC") - List findByNameWithWildcardOrderByNameAscVersionAsc(String name); + // Find single latest version of a feature set by project and name (LIKE) + FeatureSet findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc(String name, + String project); // find all feature sets and order by name and version List findAllByOrderByNameAscVersionAsc(); + + // find all feature sets within a project and order by name and version + List findAllByProject_NameOrderByNameAscVersionAsc(String project_name); + + // find all versions of feature sets matching the given name pattern with a specific project. + List findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc(String name, + String project_name); + + // find all versions of feature sets matching the given name pattern and project pattern + List findAllByNameLikeAndProject_NameLikeOrderByNameAscVersionAsc(String name, + String project_name); } diff --git a/core/src/main/java/feast/core/dao/ProjectRepository.java b/core/src/main/java/feast/core/dao/ProjectRepository.java new file mode 100644 index 0000000000..5549bf71a1 --- /dev/null +++ b/core/src/main/java/feast/core/dao/ProjectRepository.java @@ -0,0 +1,28 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package feast.core.dao; + +import feast.core.model.Project; +import java.util.List; +import org.springframework.data.jpa.repository.JpaRepository; + +/** JPA repository supplying Project objects keyed by id. */ +public interface ProjectRepository extends JpaRepository { + + List findAllByArchivedIsFalse(); +} diff --git a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java index 1d42cfb355..2137aa314f 100644 --- a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java +++ b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java @@ -16,39 +16,53 @@ */ package feast.core.grpc; -import com.google.protobuf.InvalidProtocolBufferException; import feast.core.CoreServiceGrpc.CoreServiceImplBase; import feast.core.CoreServiceProto.ApplyFeatureSetRequest; import feast.core.CoreServiceProto.ApplyFeatureSetResponse; +import feast.core.CoreServiceProto.ArchiveProjectRequest; +import feast.core.CoreServiceProto.ArchiveProjectResponse; +import feast.core.CoreServiceProto.CreateProjectRequest; +import feast.core.CoreServiceProto.CreateProjectResponse; import feast.core.CoreServiceProto.GetFeastCoreVersionRequest; import feast.core.CoreServiceProto.GetFeastCoreVersionResponse; import feast.core.CoreServiceProto.GetFeatureSetRequest; import feast.core.CoreServiceProto.GetFeatureSetResponse; import feast.core.CoreServiceProto.ListFeatureSetsRequest; import feast.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.core.CoreServiceProto.ListProjectsRequest; +import feast.core.CoreServiceProto.ListProjectsResponse; import feast.core.CoreServiceProto.ListStoresRequest; import feast.core.CoreServiceProto.ListStoresResponse; import feast.core.CoreServiceProto.UpdateStoreRequest; import feast.core.CoreServiceProto.UpdateStoreResponse; import feast.core.exception.RetrievalException; import feast.core.grpc.interceptors.MonitoringInterceptor; +import feast.core.model.Project; +import feast.core.service.AccessManagementService; import feast.core.service.SpecService; +import io.grpc.Status; import io.grpc.StatusRuntimeException; import io.grpc.stub.StreamObserver; +import java.util.List; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.lognet.springboot.grpc.GRpcService; import org.springframework.beans.factory.annotation.Autowired; -/** Implementation of the feast core GRPC service. */ +/** + * Implementation of the feast core GRPC service. + */ @Slf4j @GRpcService(interceptors = {MonitoringInterceptor.class}) public class CoreServiceImpl extends CoreServiceImplBase { private SpecService specService; + private AccessManagementService accessManagementService; @Autowired - public CoreServiceImpl(SpecService specService) { + public CoreServiceImpl(SpecService specService, AccessManagementService accessManagementService) { this.specService = specService; + this.accessManagementService = accessManagementService; } @Override @@ -65,9 +79,12 @@ public void getFeatureSet( GetFeatureSetResponse response = specService.getFeatureSet(request); responseObserver.onNext(response); responseObserver.onCompleted(); - } catch (RetrievalException | InvalidProtocolBufferException | StatusRuntimeException e) { + } catch (RetrievalException | StatusRuntimeException e) { log.error("Exception has occurred in GetFeatureSet method: ", e); - responseObserver.onError(e); + responseObserver.onError(Status.INTERNAL + .withDescription(e.getMessage()) + .withCause(e) + .asRuntimeException()); } } @@ -78,9 +95,12 @@ public void listFeatureSets( ListFeatureSetsResponse response = specService.listFeatureSets(request.getFilter()); responseObserver.onNext(response); responseObserver.onCompleted(); - } catch (RetrievalException | InvalidProtocolBufferException e) { + } catch (RetrievalException | IllegalArgumentException e) { log.error("Exception has occurred in ListFeatureSet method: ", e); - responseObserver.onError(e); + responseObserver.onError(Status.INTERNAL + .withDescription(e.getMessage()) + .withCause(e) + .asRuntimeException()); } } @@ -93,7 +113,10 @@ public void listStores( responseObserver.onCompleted(); } catch (RetrievalException e) { log.error("Exception has occurred in ListStores method: ", e); - responseObserver.onError(e); + responseObserver.onError(Status.INTERNAL + .withDescription(e.getMessage()) + .withCause(e) + .asRuntimeException()); } } @@ -104,9 +127,21 @@ public void applyFeatureSet( ApplyFeatureSetResponse response = specService.applyFeatureSet(request.getFeatureSet()); responseObserver.onNext(response); responseObserver.onCompleted(); + } catch (org.hibernate.exception.ConstraintViolationException e) { + log.error( + "Unable to persist this feature set due to a constraint violation. Please ensure that" + + " field names are unique within the project namespace: ", + e); + responseObserver.onError(Status.ALREADY_EXISTS + .withDescription(e.getMessage()) + .withCause(e) + .asRuntimeException()); } catch (Exception e) { log.error("Exception has occurred in ApplyFeatureSet method: ", e); - responseObserver.onError(e); + responseObserver.onError(Status.INTERNAL + .withDescription(e.getMessage()) + .withCause(e) + .asRuntimeException()); } } @@ -119,7 +154,61 @@ public void updateStore( responseObserver.onCompleted(); } catch (Exception e) { log.error("Exception has occurred in UpdateStore method: ", e); - responseObserver.onError(e); + responseObserver.onError(Status.INTERNAL + .withDescription(e.getMessage()) + .withCause(e) + .asRuntimeException()); + } + } + + @Override + public void createProject( + CreateProjectRequest request, StreamObserver responseObserver) { + try { + accessManagementService.createProject(request.getName()); + responseObserver.onNext(CreateProjectResponse.getDefaultInstance()); + responseObserver.onCompleted(); + } catch (Exception e) { + log.error("Exception has occurred in the createProject method: ", e); + responseObserver.onError(Status.INTERNAL + .withDescription(e.getMessage()) + .withCause(e) + .asRuntimeException()); } } + + @Override + public void archiveProject( + ArchiveProjectRequest request, StreamObserver responseObserver) { + try { + accessManagementService.archiveProject(request.getName()); + responseObserver.onNext(ArchiveProjectResponse.getDefaultInstance()); + responseObserver.onCompleted(); + } catch (Exception e) { + log.error("Exception has occurred in the createProject method: ", e); + responseObserver.onError(Status.INTERNAL + .withDescription(e.getMessage()) + .withCause(e) + .asRuntimeException()); + } + } + + @Override + public void listProjects( + ListProjectsRequest request, StreamObserver responseObserver) { + try { + List projects = accessManagementService.listProjects(); + responseObserver.onNext(ListProjectsResponse.newBuilder() + .addAllProjects(projects.stream().map(Project::getName).collect( + Collectors.toList())).build()); + responseObserver.onCompleted(); + } catch (Exception e) { + log.error("Exception has occurred in the listProjects method: ", e); + responseObserver.onError(Status.INTERNAL + .withDescription(e.getMessage()) + .withCause(e) + .asRuntimeException()); + } + } + } diff --git a/core/src/main/java/feast/core/job/JobUpdateTask.java b/core/src/main/java/feast/core/job/JobUpdateTask.java index 373a4a113d..c42d75b76b 100644 --- a/core/src/main/java/feast/core/job/JobUpdateTask.java +++ b/core/src/main/java/feast/core/job/JobUpdateTask.java @@ -17,7 +17,6 @@ package feast.core.job; import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.SourceProto; import feast.core.StoreProto; import feast.core.log.Action; @@ -53,7 +52,7 @@ @Getter public class JobUpdateTask implements Callable { - private final List featureSetSpecs; + private final List featureSets; private final SourceProto.Source sourceSpec; private final StoreProto.Store store; private final Optional currentJob; @@ -61,14 +60,14 @@ public class JobUpdateTask implements Callable { private long jobUpdateTimeoutSeconds; public JobUpdateTask( - List featureSetSpecs, + List featureSets, SourceProto.Source sourceSpec, StoreProto.Store store, Optional currentJob, JobManager jobManager, long jobUpdateTimeoutSeconds) { - this.featureSetSpecs = featureSetSpecs; + this.featureSets = featureSets; this.sourceSpec = sourceSpec; this.store = store; this.currentJob = currentJob; @@ -87,8 +86,8 @@ public Job call() { .map(FeatureSet::getId) .collect(Collectors.toSet()); Set newFeatureSetsPopulatedByJob = - featureSetSpecs.stream() - .map(fs -> fs.getName() + ":" + fs.getVersion()) + featureSets.stream() + .map(fs -> FeatureSet.fromProto(fs).getId()) .collect(Collectors.toSet()); if (existingFeatureSetsPopulatedByJob.size() == newFeatureSetsPopulatedByJob.size() && existingFeatureSetsPopulatedByJob.containsAll(newFeatureSetsPopulatedByJob)) { @@ -107,12 +106,12 @@ public Job call() { return job; } else { submittedJob = - executorService.submit(() -> updateJob(currentJob.get(), featureSetSpecs, store)); + executorService.submit(() -> updateJob(currentJob.get(), featureSets, store)); } } else { String jobId = createJobId(source.getId(), store.getName()); submittedJob = - executorService.submit(() -> startJob(jobId, featureSetSpecs, sourceSpec, store)); + executorService.submit(() -> startJob(jobId, featureSets, sourceSpec, store)); } Job job = null; @@ -128,16 +127,16 @@ public Job call() { /** Start or update the job to ingest data to the sink. */ private Job startJob( String jobId, - List featureSetSpecs, + List featureSetProtos, SourceProto.Source source, StoreProto.Store sinkSpec) { List featureSets = - featureSetSpecs.stream() + featureSetProtos.stream() .map( - spec -> + fsp -> FeatureSet.fromProto( - FeatureSetProto.FeatureSet.newBuilder().setSpec(spec).build())) + FeatureSetProto.FeatureSet.newBuilder().setSpec(fsp.getSpec()).setMeta(fsp.getMeta()).build())) .collect(Collectors.toList()); Job job = new Job( @@ -185,13 +184,13 @@ private Job startJob( } /** Update the given job */ - private Job updateJob(Job job, List featureSetSpecs, StoreProto.Store store) { + private Job updateJob(Job job, List featureSets, StoreProto.Store store) { job.setFeatureSets( - featureSetSpecs.stream() + featureSets.stream() .map( - spec -> + fs -> FeatureSet.fromProto( - FeatureSetProto.FeatureSet.newBuilder().setSpec(spec).build())) + FeatureSetProto.FeatureSet.newBuilder().setSpec(fs.getSpec()).setMeta(fs.getMeta()).build())) .collect(Collectors.toList())); job.setStore(feast.core.model.Store.fromProto(store)); AuditLogger.log( diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java index 92763e7971..703fecece8 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java @@ -23,7 +23,7 @@ import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; import com.google.protobuf.util.JsonFormat.Printer; -import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto; import feast.core.SourceProto; import feast.core.StoreProto; import feast.core.config.FeastProperties.MetricsProperties; @@ -33,6 +33,7 @@ import feast.core.model.FeatureSet; import feast.core.model.Job; import feast.core.model.JobStatus; +import feast.core.model.Project; import feast.core.model.Source; import feast.core.model.Store; import feast.core.util.TypeConversion; @@ -77,13 +78,13 @@ public Runner getRunnerType() { @Override public Job startJob(Job job) { - List featureSetSpecs = + List featureSetProtos = job.getFeatureSets().stream() - .map(fs -> fs.toProto().getSpec()) + .map(FeatureSet::toProto) .collect(Collectors.toList()); try { return submitDataflowJob( - job.getId(), featureSetSpecs, job.getSource().toProto(), job.getStore().toProto(), false); + job.getId(), featureSetProtos, job.getSource().toProto(), job.getStore().toProto(), false); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(String.format("Unable to start job %s", job.getId()), e); } @@ -98,13 +99,13 @@ public Job startJob(Job job) { @Override public Job updateJob(Job job) { try { - List featureSetSpecs = + List featureSetProtos = job.getFeatureSets().stream() - .map(fs -> fs.toProto().getSpec()) + .map(FeatureSet::toProto) .collect(Collectors.toList()); return submitDataflowJob( - job.getId(), featureSetSpecs, job.getSource().toProto(), job.getStore().toProto(), true); + job.getId(), featureSetProtos, job.getSource().toProto(), job.getStore().toProto(), true); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(String.format("Unable to update job %s", job.getId()), e); @@ -173,19 +174,21 @@ public JobStatus getJobStatus(Job job) { private Job submitDataflowJob( String jobName, - List featureSetSpecs, + List featureSetProtos, SourceProto.Source source, StoreProto.Store sink, boolean update) { try { - ImportOptions pipelineOptions = getPipelineOptions(jobName, featureSetSpecs, sink, update); + ImportOptions pipelineOptions = getPipelineOptions(jobName, featureSetProtos, sink, update); DataflowPipelineJob pipelineResult = runPipeline(pipelineOptions); List featureSets = - featureSetSpecs.stream() + featureSetProtos.stream() .map( - spec -> { + fsp -> { FeatureSet featureSet = new FeatureSet(); - featureSet.setId(spec.getName() + ":" + spec.getVersion()); + featureSet.setName(fsp.getSpec().getName()); + featureSet.setVersion(fsp.getSpec().getVersion()); + featureSet.setProject(new Project(fsp.getSpec().getProject())); return featureSet; }) .collect(Collectors.toList()); @@ -205,16 +208,16 @@ private Job submitDataflowJob( } private ImportOptions getPipelineOptions( - String jobName, List featureSets, StoreProto.Store sink, boolean update) + String jobName, List featureSets, StoreProto.Store sink, boolean update) throws IOException { String[] args = TypeConversion.convertMapToArgs(defaultOptions); ImportOptions pipelineOptions = PipelineOptionsFactory.fromArgs(args).as(ImportOptions.class); Printer printer = JsonFormat.printer(); List featureSetsJson = new ArrayList<>(); - for (FeatureSetSpec featureSet : featureSets) { - featureSetsJson.add(printer.print(featureSet)); + for (FeatureSetProto.FeatureSet featureSet : featureSets) { + featureSetsJson.add(printer.print(featureSet.getSpec())); } - pipelineOptions.setFeatureSetSpecJson(featureSetsJson); + pipelineOptions.setFeatureSetJson(featureSetsJson); pipelineOptions.setStoreJson(Collections.singletonList(printer.print(sink))); pipelineOptions.setProject(projectId); pipelineOptions.setUpdate(update); diff --git a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java index 89c6dc3848..ff13e22455 100644 --- a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java +++ b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java @@ -20,6 +20,7 @@ import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; import com.google.protobuf.util.JsonFormat.Printer; +import feast.core.FeatureSetProto; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.StoreProto; import feast.core.config.FeastProperties.MetricsProperties; @@ -74,11 +75,11 @@ public Runner getRunnerType() { @Override public Job startJob(Job job) { try { - List featureSetSpecs = + List featureSetProtos = job.getFeatureSets().stream() - .map(fs -> fs.toProto().getSpec()) + .map(FeatureSet::toProto) .collect(Collectors.toList()); - ImportOptions pipelineOptions = getPipelineOptions(featureSetSpecs, job.getStore().toProto()); + ImportOptions pipelineOptions = getPipelineOptions(featureSetProtos, job.getStore().toProto()); PipelineResult pipelineResult = runPipeline(pipelineOptions); DirectJob directJob = new DirectJob(job.getId(), pipelineResult); jobs.add(directJob); @@ -92,16 +93,16 @@ public Job startJob(Job job) { } private ImportOptions getPipelineOptions( - List featureSetSpecs, StoreProto.Store sink) + List featureSets, StoreProto.Store sink) throws InvalidProtocolBufferException { String[] args = TypeConversion.convertMapToArgs(defaultOptions); ImportOptions pipelineOptions = PipelineOptionsFactory.fromArgs(args).as(ImportOptions.class); Printer printer = JsonFormat.printer(); List featureSetsJson = new ArrayList<>(); - for (FeatureSetSpec featureSetSpec : featureSetSpecs) { - featureSetsJson.add(printer.print(featureSetSpec)); + for (FeatureSetProto.FeatureSet featureSet : featureSets) { + featureSetsJson.add(printer.print(featureSet.getSpec())); } - pipelineOptions.setFeatureSetSpecJson(featureSetsJson); + pipelineOptions.setFeatureSetJson(featureSetsJson); pipelineOptions.setStoreJson(Collections.singletonList(printer.print(sink))); pipelineOptions.setRunner(DirectRunner.class); pipelineOptions.setProject(""); // set to default value to satisfy validation diff --git a/core/src/main/java/feast/core/model/FeatureSet.java b/core/src/main/java/feast/core/model/FeatureSet.java index 388b27cb04..e42ccf2ade 100644 --- a/core/src/main/java/feast/core/model/FeatureSet.java +++ b/core/src/main/java/feast/core/model/FeatureSet.java @@ -27,19 +27,24 @@ import feast.types.ValueProto.ValueType; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import javax.persistence.CascadeType; +import javax.persistence.CollectionTable; import javax.persistence.Column; +import javax.persistence.ElementCollection; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; import javax.persistence.Table; +import javax.persistence.UniqueConstraint; import lombok.Getter; import lombok.Setter; +import org.apache.commons.lang3.builder.HashCodeBuilder; import org.hibernate.annotations.Fetch; import org.hibernate.annotations.FetchMode; @@ -49,7 +54,7 @@ @Table(name = "feature_sets") public class FeatureSet extends AbstractTimestampEntity implements Comparable { - // Id of the featureSet, defined as name:version + // Id of the featureSet, defined as project/feature_set_name:feature_set_version @Id @Column(name = "id", nullable = false, unique = true) private String id; @@ -62,20 +67,35 @@ public class FeatureSet extends AbstractTimestampEntity implements Comparable entities; - // Features inside this featureSet - @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER) - @Fetch(value = FetchMode.SUBSELECT) - @JoinColumn(name = "features") - private List features; + // Entity fields inside this feature set + @ElementCollection(fetch = FetchType.EAGER) + @CollectionTable( + name = "entities", + joinColumns = @JoinColumn(name = "feature_set_id") + ) + @Fetch(FetchMode.SUBSELECT) + private Set entities; + + // Feature fields inside this feature set + @ElementCollection(fetch = FetchType.EAGER) + @CollectionTable( + name = "features", + joinColumns = @JoinColumn(name = "feature_set_id"), + uniqueConstraints = + @UniqueConstraint(columnNames = {"name", "project", "version"}) + ) + @Fetch(FetchMode.SUBSELECT) + private Set features; // Source on which feature rows can be found @ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.EAGER) @@ -92,38 +112,71 @@ public FeatureSet() { public FeatureSet( String name, + String project, int version, long maxAgeSeconds, List entities, List features, Source source, FeatureSetStatus status) { - this.id = String.format("%s:%s", name, version); - this.name = name; - this.version = version; this.maxAgeSeconds = maxAgeSeconds; - this.entities = entities; - this.features = features; this.source = source; this.status = status.toString(); + this.entities = new HashSet<>(); + this.features = new HashSet<>(); + this.name = name; + this.project = new Project(project); + this.version = version; + this.setId(project, name, version); + addEntities(entities); + addFeatures(features); + } + + private void setId(String project, String name, int version) { + this.id = project + "/" + name + ":" + version; + } + + public void setVersion(int version) { + this.version = version; + this.setId(getProjectName(), getName(), version); + } + + public void setName(String name) { + this.name = name; + this.setId(getProjectName(), name, getVersion()); + } + + private String getProjectName() { + if (getProject() != null) { + return getProject().getName(); + } else { + return ""; + } + } + + public void setProject(Project project) { + this.project = project; + this.setId(project.getName(), getName(), getVersion()); } public static FeatureSet fromProto(FeatureSetProto.FeatureSet featureSetProto) { FeatureSetSpec featureSetSpec = featureSetProto.getSpec(); Source source = Source.fromProto(featureSetSpec.getSource()); - String id = String.format("%s:%d", featureSetSpec.getName(), featureSetSpec.getVersion()); + List features = new ArrayList<>(); for (FeatureSpec feature : featureSetSpec.getFeaturesList()) { - features.add(new Field(id, feature.getName(), feature.getValueType())); + features.add(new Field(feature.getName(), feature.getValueType())); } + List entities = new ArrayList<>(); for (EntitySpec entity : featureSetSpec.getEntitiesList()) { - entities.add(new Field(id, entity.getName(), entity.getValueType())); + entities.add(new Field(entity.getName(), entity.getValueType())); } return new FeatureSet( - featureSetSpec.getName(), - featureSetSpec.getVersion(), + featureSetProto.getSpec().getName(), + featureSetProto.getSpec().getProject(), + featureSetProto.getSpec().getVersion(), featureSetSpec.getMaxAge().getSeconds(), entities, features, @@ -131,6 +184,30 @@ public static FeatureSet fromProto(FeatureSetProto.FeatureSet featureSetProto) { featureSetProto.getMeta().getStatus()); } + public void addEntities(List fields) { + for (Field field : fields) { + addEntity(field); + } + } + + public void addEntity(Field field) { + field.setProject(this.project.getName()); + field.setVersion(this.getVersion()); + entities.add(field); + } + + public void addFeatures(List fields) { + for (Field field : fields) { + addFeature(field); + } + } + + public void addFeature(Field field) { + field.setProject(this.project.getName()); + field.setVersion(this.getVersion()); + features.add(field); + } + public FeatureSetProto.FeatureSet toProto() { List entitySpecs = new ArrayList<>(); for (Field entity : entities) { @@ -157,8 +234,9 @@ public FeatureSetProto.FeatureSet toProto() { FeatureSetSpec.Builder spec = FeatureSetSpec.newBuilder() - .setName(name) - .setVersion(version) + .setName(getName()) + .setVersion(getVersion()) + .setProject(project.getName()) .setMaxAge(Duration.newBuilder().setSeconds(maxAgeSeconds)) .addAllEntities(entitySpecs) .addAllFeatures(featureSpecs) @@ -174,7 +252,11 @@ public FeatureSetProto.FeatureSet toProto() { * @return boolean denoting if the source or schema have changed. */ public boolean equalTo(FeatureSet other) { - if (!name.equals(other.getName())) { + if (!getName().equals(other.getName())) { + return false; + } + + if (!project.getName().equals(other.project.getName())) { return false; } @@ -198,12 +280,12 @@ public boolean equalTo(FeatureSet other) { } // Ensure map size is consistent with existing fields - if (fields.size() != other.features.size() + other.entities.size()) { + if (fields.size() != other.getFeatures().size() + other.getEntities().size()) { return false; } - // Ensure the other entities and fields exist in the field map - for (Field e : other.entities) { + // Ensure the other entities and features exist in the field map + for (Field e : other.getEntities()) { if (!fields.containsKey(e.getName())) { return false; } @@ -212,7 +294,7 @@ public boolean equalTo(FeatureSet other) { } } - for (Field f : features) { + for (Field f : other.getFeatures()) { if (!fields.containsKey(f.getName())) { return false; } @@ -224,8 +306,28 @@ public boolean equalTo(FeatureSet other) { return true; } + @Override + public int hashCode() { + HashCodeBuilder hcb = new HashCodeBuilder(); + hcb.append(project.getName()); + hcb.append(getName()); + hcb.append(getVersion()); + return hcb.toHashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof FeatureSet)) { + return false; + } + return this.equalTo(((FeatureSet) obj)); + } + @Override public int compareTo(FeatureSet o) { - return Integer.compare(version, o.version); + return Integer.compare(getVersion(), o.getVersion()); } } diff --git a/core/src/main/java/feast/core/model/Field.java b/core/src/main/java/feast/core/model/Field.java index 3eaeb93e27..9307e1273e 100644 --- a/core/src/main/java/feast/core/model/Field.java +++ b/core/src/main/java/feast/core/model/Field.java @@ -19,31 +19,15 @@ import feast.types.ValueProto.ValueType; import java.util.Objects; import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; +import javax.persistence.Embeddable; import lombok.Getter; import lombok.Setter; @Getter @Setter -@Entity -@Table(name = "fields") +@Embeddable public class Field { - // Id of the field, defined as featureSetId.name - @Id - @Column(name = "id", nullable = false, unique = true) - private String id; - - // FeatureSet this feature belongs to - @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "feature_set_id") - private FeatureSet featureSet; - // Name of the feature @Column(name = "name", nullable = false) private String name; @@ -52,16 +36,18 @@ public class Field { @Column(name = "type", nullable = false) private String type; + // Version of the field + @Column(name = "version") + private int version; + + // Project that this field belongs to + @Column(name = "project") + private String project; + public Field() { - super(); } - public Field(String featureSetId, String name, ValueType.Enum type) { - // TODO: Remove all mention of feature sets inside of this class! - FeatureSet featureSet = new FeatureSet(); - featureSet.setId(featureSetId); - this.featureSet = featureSet; - this.id = String.format("%s:%s", featureSetId, name); + public Field(String name, ValueType.Enum type) { this.name = name; this.type = type.toString(); } @@ -80,6 +66,6 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), id, featureSet, name, type); + return Objects.hash(super.hashCode(), name, type); } } diff --git a/core/src/main/java/feast/core/model/Job.java b/core/src/main/java/feast/core/model/Job.java index 851e68367b..ab0db6f3f9 100644 --- a/core/src/main/java/feast/core/model/Job.java +++ b/core/src/main/java/feast/core/model/Job.java @@ -24,7 +24,6 @@ import javax.persistence.Enumerated; import javax.persistence.Id; import javax.persistence.JoinColumn; -import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; @@ -65,9 +64,6 @@ public class Job extends AbstractTimestampEntity { // FeatureSets populated by the job @ManyToMany - @JoinTable( - joinColumns = {@JoinColumn(name = "job_id")}, - inverseJoinColumns = {@JoinColumn(name = "feature_set_id")}) private List featureSets; // Job Metrics diff --git a/core/src/main/java/feast/core/model/Project.java b/core/src/main/java/feast/core/model/Project.java new file mode 100644 index 0000000000..a4a1997cf6 --- /dev/null +++ b/core/src/main/java/feast/core/model/Project.java @@ -0,0 +1,80 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.model; + +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import javax.persistence.Table; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +@Entity +@Table(name = "projects") +public class Project { + + // Name of the project + @Id + @Column(name = "name", nullable = false, unique = true) + private String name; + + // Flag to set whether the project has been archived + @Column(name = "archived", nullable = false) + private boolean archived; + + @OneToMany(cascade = CascadeType.ALL, fetch = FetchType.EAGER, orphanRemoval = true, mappedBy = "project") + private Set featureSets; + + public Project() { + super(); + } + + public Project(String name) { + this.name = name; + this.featureSets = new HashSet<>(); + } + + public void addFeatureSet(FeatureSet featureSet) { + featureSet.setProject(this); + featureSets.add(featureSet); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Project field = (Project) o; + return name.equals(field.getName()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), name); + } +} diff --git a/core/src/main/java/feast/core/model/Source.java b/core/src/main/java/feast/core/model/Source.java index 7aa6378dee..28db1e9a5b 100644 --- a/core/src/main/java/feast/core/model/Source.java +++ b/core/src/main/java/feast/core/model/Source.java @@ -23,6 +23,7 @@ import feast.core.SourceProto.Source.Builder; import feast.core.SourceProto.SourceType; import io.grpc.Status; +import java.util.Objects; import java.util.Set; import javax.persistence.Column; import javax.persistence.Entity; @@ -199,4 +200,21 @@ private String generateId() { return ""; } } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Source source = (Source) o; + return id.equals(source.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } } diff --git a/core/src/main/java/feast/core/model/Store.java b/core/src/main/java/feast/core/model/Store.java index 9bfc27db1f..4fa70b655a 100644 --- a/core/src/main/java/feast/core/model/Store.java +++ b/core/src/main/java/feast/core/model/Store.java @@ -118,14 +118,17 @@ public List getSubscriptions() { } private static String convertSubscriptionToString(Subscription sub) { - return String.format("%s:%s", sub.getName(), sub.getVersion()); + if(sub.getVersion().isEmpty() || sub.getName().isEmpty() || sub.getProject().isEmpty()){ + throw new IllegalArgumentException(String.format("Missing arguments in subscription string: %s", sub.toString())); + } + return String.format("%s:%s:%s", sub.getProject(), sub.getName(), sub.getVersion()); } private Subscription convertStringToSubscription(String sub) { if (sub.equals("")) { return Subscription.newBuilder().build(); } - String[] split = sub.split(":"); - return Subscription.newBuilder().setName(split[0]).setVersion(split[1]).build(); + String[] split = sub.split(":", 3); + return Subscription.newBuilder().setProject(split[0]).setName(split[1]).setVersion(split[2]).build(); } -} +} \ No newline at end of file diff --git a/core/src/main/java/feast/core/service/AccessManagementService.java b/core/src/main/java/feast/core/service/AccessManagementService.java new file mode 100644 index 0000000000..e17ba156f5 --- /dev/null +++ b/core/src/main/java/feast/core/service/AccessManagementService.java @@ -0,0 +1,80 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.service; + +import feast.core.dao.ProjectRepository; +import feast.core.model.Project; +import java.util.List; +import java.util.Optional; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + + +@Slf4j +@Service +public class AccessManagementService { + + private ProjectRepository projectRepository; + + @Autowired + public AccessManagementService( + ProjectRepository projectRepository) { + this.projectRepository = projectRepository; + } + + /** + * Creates a project + * + * @param name Name of project to be created + */ + @Transactional + public void createProject(String name) { + if(projectRepository.existsById(name)){ + throw new IllegalArgumentException(String.format("Project already exists: %s", name)); + } + Project project = new Project(name); + projectRepository.saveAndFlush(project); + } + + /** + * Archives a project + * + * @param name Name of the project to be archived + */ + @Transactional + public void archiveProject(String name) { + Optional project = projectRepository.findById(name); + if (!project.isPresent()) { + throw new IllegalArgumentException(String.format("Could not find project: \"%s\"", name)); + } + Project p = project.get(); + p.setArchived(true); + projectRepository.saveAndFlush(p); + } + + /** + * List all active projects + * + * @return List of active projects + */ + @Transactional + public List listProjects() { + return projectRepository.findAllByArchivedIsFalse(); + } +} diff --git a/core/src/main/java/feast/core/service/JobCoordinatorService.java b/core/src/main/java/feast/core/service/JobCoordinatorService.java index 76a1cc27dd..b1212ae6e6 100644 --- a/core/src/main/java/feast/core/service/JobCoordinatorService.java +++ b/core/src/main/java/feast/core/service/JobCoordinatorService.java @@ -16,12 +16,10 @@ */ package feast.core.service; -import com.google.protobuf.InvalidProtocolBufferException; import feast.core.CoreServiceProto.ListFeatureSetsRequest; import feast.core.CoreServiceProto.ListStoresRequest.Filter; import feast.core.CoreServiceProto.ListStoresResponse; import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.FeatureSetProto.FeatureSetStatus; import feast.core.StoreProto; import feast.core.StoreProto.Store.Subscription; @@ -94,41 +92,36 @@ public void Poll() { List jobUpdateTasks = new ArrayList<>(); ListStoresResponse listStoresResponse = specService.listStores(Filter.newBuilder().build()); for (StoreProto.Store store : listStoresResponse.getStoreList()) { - Set featureSetSpecs = new HashSet<>(); - try { - for (Subscription subscription : store.getSubscriptionsList()) { - featureSetSpecs.addAll( - specService - .listFeatureSets( - ListFeatureSetsRequest.Filter.newBuilder() - .setFeatureSetName(subscription.getName()) - .setFeatureSetVersion(subscription.getVersion()) - .build()) - .getFeatureSetsList().stream() - .map(FeatureSetProto.FeatureSet::getSpec) - .collect(Collectors.toList())); - } - if (!featureSetSpecs.isEmpty()) { - featureSetSpecs.stream() - .collect(Collectors.groupingBy(FeatureSetSpec::getSource)) - .entrySet() - .stream() - .forEach( - kv -> { - Optional originalJob = - getJob(Source.fromProto(kv.getKey()), Store.fromProto(store)); - jobUpdateTasks.add( - new JobUpdateTask( - kv.getValue(), - kv.getKey(), - store, - originalJob, - jobManager, - jobUpdatesProperties.getTimeoutSeconds())); - }); - } - } catch (InvalidProtocolBufferException e) { - log.warn("Unable to retrieve feature sets for store {}: {}", store, e.getMessage()); + Set featureSets = new HashSet<>(); + for (Subscription subscription : store.getSubscriptionsList()) { + featureSets.addAll( + new ArrayList<>(specService + .listFeatureSets( + ListFeatureSetsRequest.Filter.newBuilder() + .setFeatureSetName(subscription.getName()) + .setFeatureSetVersion(subscription.getVersion()) + .setProject(subscription.getProject()) + .build()) + .getFeatureSetsList())); + } + if (!featureSets.isEmpty()) { + featureSets.stream() + .collect(Collectors.groupingBy(fs -> fs.getSpec().getSource())) + .entrySet() + .stream() + .forEach( + kv -> { + Optional originalJob = + getJob(Source.fromProto(kv.getKey()), Store.fromProto(store)); + jobUpdateTasks.add( + new JobUpdateTask( + kv.getValue(), + kv.getKey(), + store, + originalJob, + jobManager, + jobUpdatesProperties.getTimeoutSeconds())); + }); } } if (jobUpdateTasks.size() == 0) { diff --git a/core/src/main/java/feast/core/service/SpecService.java b/core/src/main/java/feast/core/service/SpecService.java index 937fc29717..17fcd10918 100644 --- a/core/src/main/java/feast/core/service/SpecService.java +++ b/core/src/main/java/feast/core/service/SpecService.java @@ -17,7 +17,7 @@ package feast.core.service; import static feast.core.validators.Matchers.checkValidCharacters; -import static feast.core.validators.Matchers.checkValidFeatureSetFilterName; +import static feast.core.validators.Matchers.checkValidCharactersAllowAsterisk; import com.google.common.collect.Ordering; import com.google.protobuf.InvalidProtocolBufferException; @@ -33,22 +33,22 @@ import feast.core.CoreServiceProto.UpdateStoreRequest; import feast.core.CoreServiceProto.UpdateStoreResponse; import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.SourceProto; import feast.core.StoreProto; +import feast.core.StoreProto.Store.Subscription; import feast.core.dao.FeatureSetRepository; +import feast.core.dao.ProjectRepository; import feast.core.dao.StoreRepository; import feast.core.exception.RetrievalException; import feast.core.model.FeatureSet; +import feast.core.model.Project; import feast.core.model.Source; import feast.core.model.Store; import feast.core.validators.FeatureSetValidator; +import java.util.ArrayList; import java.util.List; -import java.util.function.Predicate; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -62,44 +62,43 @@ public class SpecService { private final FeatureSetRepository featureSetRepository; + private final ProjectRepository projectRepository; private final StoreRepository storeRepository; private final Source defaultSource; - private final Pattern versionPattern = - Pattern.compile("^(?[\\>\\<\\=]{0,2})(?\\d*)$"); - @Autowired public SpecService( FeatureSetRepository featureSetRepository, StoreRepository storeRepository, + ProjectRepository projectRepository, Source defaultSource) { this.featureSetRepository = featureSetRepository; this.storeRepository = storeRepository; + this.projectRepository = projectRepository; this.defaultSource = defaultSource; } /** - * Get a feature set matching the feature name and version provided in the filter. The name is - * required. If the version is provided then it will be used for the lookup. If the version is - * omitted then the latest version will be returned. + * Get a feature set matching the feature name and version and project. The feature set name and + * project are required, but version can be omitted by providing 0 for its value. If the version + * is omitted, the latest feature set will be provided. * - * @param GetFeatureSetRequest containing the name and version of the feature set - * @return GetFeatureSetResponse containing a single feature set + * @param request: GetFeatureSetRequest Request containing filter parameters. + * @return Returns a GetFeatureSetResponse containing a feature set.. */ - public GetFeatureSetResponse getFeatureSet(GetFeatureSetRequest request) - throws InvalidProtocolBufferException { + public GetFeatureSetResponse getFeatureSet(GetFeatureSetRequest request) { // Validate input arguments checkValidCharacters(request.getName(), "featureSetName"); + if (request.getName().isEmpty()) { - throw io.grpc.Status.INVALID_ARGUMENT - .withDescription("No feature set name provided") - .asRuntimeException(); + throw new IllegalArgumentException("No feature set name provided"); + } + if (request.getProject().isEmpty()) { + throw new IllegalArgumentException("No project provided"); } if (request.getVersion() < 0) { - throw io.grpc.Status.INVALID_ARGUMENT - .withDescription("Version number cannot be less than 0") - .asRuntimeException(); + throw new IllegalArgumentException("Version number cannot be less than 0"); } FeatureSet featureSet; @@ -107,27 +106,23 @@ public GetFeatureSetResponse getFeatureSet(GetFeatureSetRequest request) // Filter the list based on version if (request.getVersion() == 0) { featureSet = - featureSetRepository.findFirstFeatureSetByNameOrderByVersionDesc(request.getName()); + featureSetRepository + .findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc(request.getName(), + request.getProject()); if (featureSet == null) { - throw io.grpc.Status.NOT_FOUND - .withDescription( - String.format( - "Feature set with name \"%s\" could not be found.", request.getName())) - .asRuntimeException(); + throw new RetrievalException(String.format( + "Feature set with name \"%s\" could not be found.", request.getName())); } } else { featureSet = - featureSetRepository.findFeatureSetByNameAndVersion( - request.getName(), request.getVersion()); + featureSetRepository.findFeatureSetByNameAndProject_NameAndVersion( + request.getName(), request.getProject(), request.getVersion()); if (featureSet == null) { - throw io.grpc.Status.NOT_FOUND - .withDescription( - String.format( - "Feature set with name \"%s\" and version \"%s\" could " + "not be found.", - request.getName(), request.getVersion())) - .asRuntimeException(); + throw new RetrievalException(String.format( + "Feature set with name \"%s\" and version \"%s\" could " + "not be found.", + request.getName(), request.getVersion())); } } @@ -135,40 +130,119 @@ public GetFeatureSetResponse getFeatureSet(GetFeatureSetRequest request) return GetFeatureSetResponse.newBuilder().setFeatureSet(featureSet.toProto()).build(); } + /** - * Get featureSets matching the feature name and version provided in the filter. If the feature - * name is not provided, the method will return all featureSets currently registered to Feast. + * Return a list of feature sets matching the feature set name, version, and project provided in + * the filter. All fields are requried. Use '*' for all three arguments in order to return all + * feature sets and versions in all projects. + * + *

Project name can be explicitly provided, or an asterisk can be provided to match all + * projects. It is not possible to provide a combination of asterisks/wildcards and text. * - *

The feature set name in the filter accepts any valid regex string. All matching featureSets - * will be returned. + *

The feature set name in the filter accepts an asterisk as a wildcard. All matching + * feature sets will be returned. Regex is not supported. Explicitly defining a feature set name + * is not possible if a project name is not set explicitly * - *

The version filter is optional; If not provided, this method will return all featureSet - * versions of the featureSet name provided. Valid version filters should optionally contain a - * comparator (<, <=, >, etc) and a version number, e.g. 10, <10, >=1 + *

The version field can be one of + * - '*' - This will match all versions + * - 'latest' - This will match the latest feature set version + * - '' - This will match a specific feature set version. This property can only be set + * if both the feature set name and project name are explicitly set. * * @param filter filter containing the desired featureSet name and version filter * @return ListFeatureSetsResponse with list of featureSets found matching the filter */ - public ListFeatureSetsResponse listFeatureSets(ListFeatureSetsRequest.Filter filter) - throws InvalidProtocolBufferException { + public ListFeatureSetsResponse listFeatureSets(ListFeatureSetsRequest.Filter filter) { String name = filter.getFeatureSetName(); - checkValidFeatureSetFilterName(name, "featureSetName"); - List featureSets; - if (name.equals("")) { - featureSets = featureSetRepository.findAllByOrderByNameAscVersionAsc(); + String project = filter.getProject(); + String version = filter.getFeatureSetVersion(); + + if (project.isEmpty() || name.isEmpty() || version.isEmpty()) { + throw new IllegalArgumentException( + String + .format( + "Invalid listFeatureSetRequest, missing arguments. Must provide project, feature set name, and version.", + filter.toString())); + } + + checkValidCharactersAllowAsterisk(name, "featureSetName"); + checkValidCharactersAllowAsterisk(project, "projectName"); + + List featureSets = new ArrayList() { + }; + + if (project.equals("*")) { + // Matching all projects + + if (name.equals("*") && version.equals("*")) { + featureSets = featureSetRepository + .findAllByNameLikeAndProject_NameLikeOrderByNameAscVersionAsc( + name.replace('*', '%'), + project.replace('*', '%')); + } else { + throw new IllegalArgumentException( + String + .format( + "Invalid listFeatureSetRequest. Version and feature set name must be set to " + + "\"*\" if the project name and feature set name aren't set explicitly: \n%s", + filter.toString())); + } + } else if (!project.contains("*")) { + // Matching a specific project + + if (name.contains("*") && version.equals("*")) { + // Find all feature sets matching a pattern and versions in a specific project + featureSets = featureSetRepository + .findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( + name.replace('*', '%'), + project); + + } else if (!name.contains("*") && version.equals("*")) { + // Find all versions of a specific feature set in a specific project + featureSets = featureSetRepository + .findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( + name, + project); + + } else if (version.equals("latest")) { + // Find the latest version of a feature set matching a specific pattern in a specific project + FeatureSet latestFeatureSet = featureSetRepository + .findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc( + name.replace('*', '%'), + project); + featureSets.add(latestFeatureSet); + + } else if (!name.contains("*") && StringUtils.isNumeric(version)) { + // Find a specific version of a feature set matching a specific name in a specific project + FeatureSet specificFeatureSet = featureSetRepository + .findFeatureSetByNameAndProject_NameAndVersion(name, project, + Integer.parseInt(version)); + featureSets.add(specificFeatureSet); + + } else { + throw new IllegalArgumentException( + String + .format( + "Invalid listFeatureSetRequest. Version must be set to \"*\" if the project " + + "name and feature set name aren't set explicitly: \n%s", + filter.toString())); + } } else { - featureSets = - featureSetRepository.findByNameWithWildcardOrderByNameAscVersionAsc( - name.replace('*', '%')); - featureSets = - featureSets.stream() - .filter(getVersionFilter(filter.getFeatureSetVersion())) - .collect(Collectors.toList()); + throw new IllegalArgumentException( + String + .format( + "Invalid listFeatureSetRequest. Project name cannot be a pattern. It may only be" + + "a specific project name or an asterisk: \n%s", + filter.toString())); } + ListFeatureSetsResponse.Builder response = ListFeatureSetsResponse.newBuilder(); - for (FeatureSet featureSet : featureSets) { - response.addFeatureSets(featureSet.toProto()); + if (featureSets.size() > 0) { + for (FeatureSet featureSet : featureSets) { + response.addFeatureSets(featureSet.toProto()); + } } + return response.build(); } @@ -207,26 +281,46 @@ public ListStoresResponse listStores(ListStoresRequest.Filter filter) { } /** - * Adds the featureSet to the repository, and prepares the sink for the feature creator to write - * to. If there is a change in the featureSet's schema or source, the featureSet version will be - * incremented. + * Creates or updates a feature set in the repository. If there is a change in the feature set + * schema, then the feature set version will be incremented. * - *

This function is idempotent. If no changes are detected in the incoming featureSet's schema, - * this method will update the incoming featureSet spec with the latest version stored in the - * repository, and return that. + *

This function is idempotent. If no changes are detected in the incoming featureSet's + * schema, this method will update the incoming featureSet spec with the latest version stored in + * the repository, and return that. * - * @param newFeatureSet featureSet to add. + * @param newFeatureSet Feature set that will be created or updated. */ - public ApplyFeatureSetResponse applyFeatureSet(FeatureSetProto.FeatureSet newFeatureSet) - throws InvalidProtocolBufferException { - FeatureSetSpec newFeatureSetSpec = newFeatureSet.getSpec(); - FeatureSetValidator.validateSpec(newFeatureSetSpec); + public ApplyFeatureSetResponse applyFeatureSet(FeatureSetProto.FeatureSet newFeatureSet) { + + // Validate incoming feature set + FeatureSetValidator.validateSpec(newFeatureSet); + + // Ensure that the project already exists + String project_name = newFeatureSet.getSpec().getProject(); + Project project = projectRepository.findById(newFeatureSet.getSpec().getProject()) + .orElseThrow(() -> new IllegalArgumentException(String + .format("Project name does not exist. Please create a project first: %s", project_name + ))); + + // Ensure that the project is not archived + if (project.isArchived()) { + throw new IllegalArgumentException(String + .format("Project is archived: %s", project_name + )); + } + + // Retrieve all existing FeatureSet objects List existingFeatureSets = - featureSetRepository.findByName(newFeatureSetSpec.getName()); + featureSetRepository + .findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( + newFeatureSet.getSpec().getName(), project_name); if (existingFeatureSets.size() == 0) { - newFeatureSetSpec = newFeatureSetSpec.toBuilder().setVersion(1).build(); + // Create new feature set since it doesn't exist + newFeatureSet = newFeatureSet.toBuilder() + .setSpec(newFeatureSet.getSpec().toBuilder().setVersion(1)).build(); } else { + // Retrieve the latest feature set if the name does exist existingFeatureSets = Ordering.natural().reverse().sortedCopy(existingFeatureSets); FeatureSet latest = existingFeatureSets.get(0); FeatureSet featureSet = FeatureSet.fromProto(newFeatureSet); @@ -238,15 +332,23 @@ public ApplyFeatureSetResponse applyFeatureSet(FeatureSetProto.FeatureSet newFea .setStatus(Status.NO_CHANGE) .build(); } - newFeatureSetSpec = newFeatureSetSpec.toBuilder().setVersion(latest.getVersion() + 1).build(); + // TODO: There is a race condition here with incrementing the version + newFeatureSet = newFeatureSet.toBuilder() + .setSpec(newFeatureSet.getSpec().toBuilder().setVersion(latest.getVersion() + 1)) + .build(); } - newFeatureSet = newFeatureSet.toBuilder().setSpec(newFeatureSetSpec).build(); + + // Build a new FeatureSet object which includes the new properties FeatureSet featureSet = FeatureSet.fromProto(newFeatureSet); - if (newFeatureSetSpec.getSource() == SourceProto.Source.getDefaultInstance()) { + if (newFeatureSet.getSpec().getSource() == SourceProto.Source.getDefaultInstance()) { featureSet.setSource(defaultSource); } - featureSetRepository.saveAndFlush(featureSet); + // Persist the FeatureSet object + project.addFeatureSet(featureSet); + projectRepository.saveAndFlush(project); + + // Build ApplyFeatureSetResponse return ApplyFeatureSetResponse.newBuilder() .setFeatureSet(featureSet.toProto()) .setStatus(Status.CREATED) @@ -258,12 +360,21 @@ public ApplyFeatureSetResponse applyFeatureSet(FeatureSetProto.FeatureSet newFea * * @param updateStoreRequest containing the new store definition * @return UpdateStoreResponse containing the new store definition - * @throws InvalidProtocolBufferException */ @Transactional public UpdateStoreResponse updateStore(UpdateStoreRequest updateStoreRequest) throws InvalidProtocolBufferException { StoreProto.Store newStoreProto = updateStoreRequest.getStore(); + + List subs = newStoreProto.getSubscriptionsList(); + for (Subscription sub : subs) { + // Ensure that all fields in a subscription contain values + if ((sub.getVersion().isEmpty() || sub.getName().isEmpty()) || sub.getProject().isEmpty()) { + throw new IllegalArgumentException( + String + .format("Missing parameter in subscription: %s", sub)); + } + } Store existingStore = storeRepository.findById(newStoreProto.getName()).orElse(null); // Do nothing if no change @@ -282,44 +393,4 @@ public UpdateStoreResponse updateStore(UpdateStoreRequest updateStoreRequest) .build(); } - private Predicate getVersionFilter(String versionFilter) { - if (versionFilter.equals("")) { - return v -> true; - } - Matcher match = versionPattern.matcher(versionFilter); - match.find(); - - if (!match.matches()) { - throw io.grpc.Status.INVALID_ARGUMENT - .withDescription( - String.format( - "Invalid version string '%s' provided. Version string may either " - + "be a fixed version, e.g. 10, or contain a comparator, e.g. >10.", - versionFilter)) - .asRuntimeException(); - } - - int versionNumber = Integer.valueOf(match.group("version")); - String comparator = match.group("comparator"); - switch (comparator) { - case "<": - return v -> v.getVersion() < versionNumber; - case ">": - return v -> v.getVersion() > versionNumber; - case "<=": - return v -> v.getVersion() <= versionNumber; - case ">=": - return v -> v.getVersion() >= versionNumber; - case "": - return v -> v.getVersion() == versionNumber; - default: - throw io.grpc.Status.INVALID_ARGUMENT - .withDescription( - String.format( - "Invalid comparator '%s' provided. Version string may either " - + "be a fixed version, e.g. 10, or contain a comparator, e.g. >10.", - comparator)) - .asRuntimeException(); - } - } } diff --git a/core/src/main/java/feast/core/validators/FeatureSetValidator.java b/core/src/main/java/feast/core/validators/FeatureSetValidator.java index e14fde72cb..7fc7a476bd 100644 --- a/core/src/main/java/feast/core/validators/FeatureSetValidator.java +++ b/core/src/main/java/feast/core/validators/FeatureSetValidator.java @@ -20,20 +20,28 @@ import com.google.common.collect.Sets; import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSet; import feast.core.FeatureSetProto.FeatureSpec; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; public class FeatureSetValidator { - public static void validateSpec(FeatureSetSpec featureSetSpec) { - checkValidCharacters(featureSetSpec.getName(), "name"); - checkUniqueColumns(featureSetSpec.getEntitiesList(), featureSetSpec.getFeaturesList()); - for (EntitySpec entitySpec : featureSetSpec.getEntitiesList()) { + public static void validateSpec(FeatureSet featureSet) { + if(featureSet.getSpec().getProject().isEmpty()){ + throw new IllegalArgumentException("Project name must be provided"); + } + if(featureSet.getSpec().getName().isEmpty()){ + throw new IllegalArgumentException("Feature set name must be provided"); + } + + checkValidCharacters(featureSet.getSpec().getProject(), "project"); + checkValidCharacters(featureSet.getSpec().getName(), "name"); + checkUniqueColumns(featureSet.getSpec().getEntitiesList(), featureSet.getSpec().getFeaturesList()); + for (EntitySpec entitySpec : featureSet.getSpec().getEntitiesList()) { checkValidCharacters(entitySpec.getName(), "entities::name"); } - for (FeatureSpec featureSpec : featureSetSpec.getFeaturesList()) { + for (FeatureSpec featureSpec : featureSet.getSpec().getFeaturesList()) { checkValidCharacters(featureSpec.getName(), "features::name"); } } diff --git a/core/src/main/java/feast/core/validators/Matchers.java b/core/src/main/java/feast/core/validators/Matchers.java index 03bafafdbf..87e2b126f0 100644 --- a/core/src/main/java/feast/core/validators/Matchers.java +++ b/core/src/main/java/feast/core/validators/Matchers.java @@ -23,7 +23,7 @@ public class Matchers { private static Pattern UPPER_SNAKE_CASE_REGEX = Pattern.compile("^[A-Z0-9]+(_[A-Z0-9]+)*$"); private static Pattern LOWER_SNAKE_CASE_REGEX = Pattern.compile("^[a-z0-9]+(_[a-z0-9]+)*$"); private static Pattern VALID_CHARACTERS_REGEX = Pattern.compile("^[a-zA-Z0-9\\-_]*$"); - private static Pattern VALID_CHARACTERS_FSET_FILTER_REGEX = + private static Pattern VALID_CHARACTERS_REGEX_WITH_ASTERISK_WILDCARD = Pattern.compile("^[a-zA-Z0-9\\-_*]*$"); private static String ERROR_MESSAGE_TEMPLATE = "invalid value for field %s: %s"; @@ -61,9 +61,9 @@ public static void checkValidCharacters(String input, String fieldName) } } - public static void checkValidFeatureSetFilterName(String input, String fieldName) + public static void checkValidCharactersAllowAsterisk(String input, String fieldName) throws IllegalArgumentException { - if (!VALID_CHARACTERS_FSET_FILTER_REGEX.matcher(input).matches()) { + if (!VALID_CHARACTERS_REGEX_WITH_ASTERISK_WILDCARD.matcher(input).matches()) { throw new IllegalArgumentException( String.format( ERROR_MESSAGE_TEMPLATE, diff --git a/core/src/main/resources/application.yml b/core/src/main/resources/application.yml index 7c0d90e5f4..dc78719f22 100644 --- a/core/src/main/resources/application.yml +++ b/core/src/main/resources/application.yml @@ -55,7 +55,11 @@ feast: spring: jpa: - properties.hibernate.format_sql: true + properties.hibernate: + format_sql: true + event: + merge: + entity_copy_observer: allow hibernate.naming.physical-strategy=org.hibernate.boot.model.naming: PhysicalNamingStrategyStandardImpl hibernate.ddl-auto: update datasource: diff --git a/core/src/test/java/feast/core/CoreApplicationTest.java b/core/src/test/java/feast/core/CoreApplicationTest.java index 59c4dfdaaa..6c4daafcf6 100644 --- a/core/src/test/java/feast/core/CoreApplicationTest.java +++ b/core/src/test/java/feast/core/CoreApplicationTest.java @@ -14,134 +14,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package feast.core; -// -// import static feast.core.config.StorageConfig.DEFAULT_SERVING_ID; -// import static feast.core.config.StorageConfig.DEFAULT_WAREHOUSE_ID; -// import static org.junit.Assert.assertEquals; -// import static org.mockito.ArgumentMatchers.any; -// import static org.mockito.Mockito.when; -// -// import feast.core.config.ImportJobDefaults; -// import feast.core.job.JobManager; -// import feast.core.model.StorageInfo; -// import feast.core.service.SpecService; -// import feast.core.stream.FeatureStream; -// import feast.specs.EntitySpecProto.EntitySpec; -// import feast.specs.FeatureSpecProto.FeatureSpec; -// import feast.specs.StorageSpecProto.StorageSpec; -// import feast.types.ValueProto.ValueType; -// import io.grpc.ManagedChannel; -// import io.grpc.ManagedChannelBuilder; -// import java.io.IOException; -// import java.nio.file.Files; -// import java.nio.file.Paths; -// import java.util.Collections; -// import java.util.HashMap; -// import java.util.List; -// import java.util.Map; -// import org.junit.Test; -// import org.junit.runner.RunWith; -// import org.mockito.ArgumentMatchers; -// import org.mockito.Mockito; -// import org.mockito.stubbing.Answer; -// import org.springframework.beans.factory.annotation.Autowired; -// import org.springframework.boot.test.context.SpringBootTest; -// import org.springframework.boot.test.context.TestConfiguration; -// import org.springframework.context.annotation.Bean; -// import org.springframework.test.annotation.DirtiesContext; -// import org.springframework.test.context.junit4.SpringRunner; -// -/// ** -// * Starts the application context with some properties -// */ -// @RunWith(SpringRunner.class) -// @SpringBootTest(properties = { -// "feast.jobs.workspace=${java.io.tmpdir}/${random.uuid}", -// "spring.datasource.url=jdbc:h2:mem:testdb", -// "feast.store.warehouse.type=FILE.JSON", -// "feast.store.warehouse.options={\"path\":\"/tmp/foobar\"}", -// "feast.store.serving.type=REDIS", -// "feast.store.serving.options={\"host\":\"localhost\",\"port\":1234}", -// "feast.store.errors.type=STDERR", -// "feast.stream.type=kafka", -// "feast.stream.options={\"bootstrapServers\":\"localhost:8081\"}" -// }) -// @DirtiesContext + public class CoreApplicationTest { - // - // @Autowired - // SpecService specService; - // @Autowired - // ImportJobDefaults jobDefaults; - // @Autowired - // JobManager jobManager; - // @Autowired - // FeatureStream featureStream; - // - // @Test - // public void test_withProperties_systemServingAndWarehouseStoresRegistered() throws IOException - // { - // Files.createDirectory(Paths.get(jobDefaults.getWorkspace())); - // - // List warehouseStorageInfo = specService - // .getStorage(Collections.singletonList(DEFAULT_WAREHOUSE_ID)); - // assertEquals(warehouseStorageInfo.size(), 1); - // assertEquals(warehouseStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() - // .setId(DEFAULT_WAREHOUSE_ID).setType("FILE.JSON").putOptions("path", "/tmp/foobar") - // .build()); - // - // List servingStorageInfo = specService - // .getStorage(Collections.singletonList(DEFAULT_SERVING_ID)); - // assertEquals(servingStorageInfo.size(), 1); - // assertEquals(servingStorageInfo.get(0).getStorageSpec(), StorageSpec.newBuilder() - // .setId(DEFAULT_SERVING_ID).setType("REDIS") - // .putOptions("host", "localhost") - // .putOptions("port", "1234") - // .build()); - // - // ManagedChannelBuilder channelBuilder = ManagedChannelBuilder.forAddress("localhost", - // 6565); - // ManagedChannel channel = channelBuilder.usePlaintext(true).build(); - // CoreServiceGrpc.CoreServiceBlockingStub coreService = - // CoreServiceGrpc.newBlockingStub(channel); - // - // EntitySpec entitySpec = EntitySpec.newBuilder().setName("test").build(); - // FeatureSpec featureSpec = FeatureSpec.newBuilder() - // .setId("test.int64") - // .setName("int64") - // .setEntity("test") - // .setValueType(ValueType.Enum.INT64) - // .setOwner("hermione@example.com") - // .setDescription("Test is a test") - // .setUri("http://example.com/test.int64").build(); - // - // when(featureStream.generateTopicName(ArgumentMatchers.anyString())).thenReturn("my-topic"); - // when(featureStream.getType()).thenReturn("kafka"); - // - // coreService.applyEntity(entitySpec); - // - // Map args = new HashMap<>(); - // when(jobManager.startJob(any(), any())).thenAnswer((Answer) invocation -> { - // args.put(0, invocation.getArgument(0)); - // args.put(1, invocation.getArgument(1)); - // return "externalJobId1234"; - // }); - // - // coreService.applyFeature(featureSpec); - // } - // - // @TestConfiguration - // public static class MockProvider { - // - // @Bean - // public JobManager jobManager() { - // return Mockito.mock(JobManager.class); - // } - // - // @Bean - // public FeatureStream featureStream() { - // return Mockito.mock(FeatureStream.class); - // } - // } } diff --git a/core/src/test/java/feast/core/job/JobUpdateTaskTest.java b/core/src/test/java/feast/core/job/JobUpdateTaskTest.java index a1b4cdbab2..ad6881eca7 100644 --- a/core/src/test/java/feast/core/job/JobUpdateTaskTest.java +++ b/core/src/test/java/feast/core/job/JobUpdateTaskTest.java @@ -25,6 +25,7 @@ import static org.mockito.MockitoAnnotations.initMocks; import feast.core.FeatureSetProto; +import feast.core.FeatureSetProto.FeatureSetMeta; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.SourceProto; import feast.core.SourceProto.KafkaSourceConfig; @@ -60,7 +61,7 @@ public void setUp() { .setName("test") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().build()) - .addSubscriptions(Subscription.newBuilder().setName("*").setVersion(">0").build()) + .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build()) .build(); source = @@ -79,12 +80,14 @@ public void shouldUpdateJobIfPresent() { FeatureSetProto.FeatureSet featureSet1 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( - FeatureSetSpec.newBuilder().setName("featureSet1").setVersion(1).setSource(source)) + FeatureSetSpec.newBuilder().setSource(source).setProject("project1").setName("featureSet1").setVersion(1)) + .setMeta(FeatureSetMeta.newBuilder()) .build(); FeatureSetProto.FeatureSet featureSet2 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( - FeatureSetSpec.newBuilder().setName("featureSet2").setVersion(1).setSource(source)) + FeatureSetSpec.newBuilder().setSource(source).setProject("project1").setName("featureSet2").setVersion(1)) + .setMeta(FeatureSetMeta.newBuilder()) .build(); Job originalJob = new Job( @@ -97,7 +100,7 @@ public void shouldUpdateJobIfPresent() { JobStatus.RUNNING); JobUpdateTask jobUpdateTask = new JobUpdateTask( - Arrays.asList(featureSet1.getSpec(), featureSet2.getSpec()), + Arrays.asList(featureSet1, featureSet2), source, store, Optional.of(originalJob), @@ -134,12 +137,13 @@ public void shouldCreateJobIfNotPresent() { FeatureSetProto.FeatureSet featureSet1 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( - FeatureSetSpec.newBuilder().setName("featureSet1").setVersion(1).setSource(source)) + FeatureSetSpec.newBuilder().setSource(source).setProject("project1").setName("featureSet1").setVersion(1)) + .setMeta(FeatureSetMeta.newBuilder()) .build(); JobUpdateTask jobUpdateTask = spy( new JobUpdateTask( - Arrays.asList(featureSet1.getSpec()), + Arrays.asList(featureSet1), source, store, Optional.empty(), @@ -179,7 +183,8 @@ public void shouldUpdateJobStatusIfNotCreateOrUpdate() { FeatureSetProto.FeatureSet featureSet1 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( - FeatureSetSpec.newBuilder().setName("featureSet1").setVersion(1).setSource(source)) + FeatureSetSpec.newBuilder().setSource(source).setProject("project1").setName("featureSet1").setVersion(1)) + .setMeta(FeatureSetMeta.newBuilder()) .build(); Job originalJob = new Job( @@ -192,7 +197,7 @@ public void shouldUpdateJobStatusIfNotCreateOrUpdate() { JobStatus.RUNNING); JobUpdateTask jobUpdateTask = new JobUpdateTask( - Arrays.asList(featureSet1.getSpec()), + Arrays.asList(featureSet1), source, store, Optional.of(originalJob), @@ -219,12 +224,13 @@ public void shouldReturnJobWithErrorStatusIfFailedToSubmit() { FeatureSetProto.FeatureSet featureSet1 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( - FeatureSetSpec.newBuilder().setName("featureSet1").setVersion(1).setSource(source)) + FeatureSetSpec.newBuilder().setSource(source).setProject("project1").setName("featureSet1").setVersion(1)) + .setMeta(FeatureSetMeta.newBuilder()) .build(); JobUpdateTask jobUpdateTask = spy( new JobUpdateTask( - Arrays.asList(featureSet1.getSpec()), + Arrays.asList(featureSet1), source, store, Optional.empty(), @@ -262,8 +268,12 @@ public void shouldReturnJobWithErrorStatusIfFailedToSubmit() { @Test public void shouldTimeout() { - FeatureSetSpec featureSet1 = - FeatureSetSpec.newBuilder().setName("featureSet1").setVersion(1).setSource(source).build(); + FeatureSetProto.FeatureSet featureSet1 = + FeatureSetProto.FeatureSet.newBuilder() + .setSpec( + FeatureSetSpec.newBuilder().setSource(source).setProject("project1").setName("featureSet1").setVersion(1)) + .setMeta(FeatureSetMeta.newBuilder()).build(); + JobUpdateTask jobUpdateTask = spy( new JobUpdateTask( diff --git a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java index 5f72f0dd7a..76ba2040c9 100644 --- a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java +++ b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java @@ -32,6 +32,7 @@ import com.google.protobuf.util.JsonFormat; import com.google.protobuf.util.JsonFormat.Printer; import feast.core.FeatureSetProto; +import feast.core.FeatureSetProto.FeatureSetMeta; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.SourceProto; import feast.core.SourceProto.KafkaSourceConfig; @@ -66,9 +67,11 @@ public class DataflowJobManagerTest { - @Rule public final ExpectedException expectedException = ExpectedException.none(); + @Rule + public final ExpectedException expectedException = ExpectedException.none(); - @Mock private Dataflow dataflow; + @Mock + private Dataflow dataflow; private Map defaults; private DataflowJobManager dfJobManager; @@ -92,7 +95,7 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException { .setName("SERVING") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) - .addSubscriptions(Subscription.newBuilder().setName("*").setVersion(">0").build()) + .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build()) .build(); SourceProto.Source source = @@ -105,12 +108,12 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException { .build()) .build(); - FeatureSetSpec featureSetSpec = - FeatureSetSpec.newBuilder() - .setName("featureSet") - .setVersion(1) - .setSource(source) - .setMaxAge(Duration.newBuilder().build()) + FeatureSetProto.FeatureSet featureSet = + FeatureSetProto.FeatureSet.newBuilder() + .setMeta(FeatureSetMeta.newBuilder()) + .setSpec(FeatureSetSpec.newBuilder().setSource(source).setName("featureSet") + .setVersion(1) + .setMaxAge(Duration.newBuilder().build())) .build(); Printer printer = JsonFormat.printer(); @@ -126,8 +129,8 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException { expectedPipelineOptions.setAppName("DataflowJobManager"); expectedPipelineOptions.setJobName(jobName); expectedPipelineOptions.setStoreJson(Lists.newArrayList(printer.print(store))); - expectedPipelineOptions.setFeatureSetSpecJson( - Lists.newArrayList(printer.print(featureSetSpec))); + expectedPipelineOptions.setFeatureSetJson( + Lists.newArrayList(printer.print(featureSet.getSpec()))); ArgumentCaptor captor = ArgumentCaptor.forClass(ImportOptions.class); @@ -145,7 +148,7 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException { Store.fromProto(store), Lists.newArrayList( FeatureSet.fromProto( - FeatureSetProto.FeatureSet.newBuilder().setSpec(featureSetSpec).build())), + featureSet)), JobStatus.PENDING); Job actual = dfJobManager.startJob(job); @@ -190,8 +193,11 @@ public void shouldThrowExceptionWhenJobStateTerminal() throws IOException { .build()) .build(); - FeatureSetSpec featureSetSpec = - FeatureSetSpec.newBuilder().setName("featureSet").setVersion(1).setSource(source).build(); + FeatureSetProto.FeatureSet featureSet = + FeatureSetProto.FeatureSet.newBuilder() + .setSpec( + FeatureSetSpec.newBuilder().setName("featureSet").setVersion(1).setSource(source) + .build()).build(); dfJobManager = Mockito.spy(dfJobManager); @@ -208,8 +214,7 @@ public void shouldThrowExceptionWhenJobStateTerminal() throws IOException { Source.fromProto(source), Store.fromProto(store), Lists.newArrayList( - FeatureSet.fromProto( - FeatureSetProto.FeatureSet.newBuilder().setSpec(featureSetSpec).build())), + FeatureSet.fromProto(featureSet)), JobStatus.PENDING); expectedException.expect(JobExecutionException.class); diff --git a/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java index 73cbd9030f..53983a775a 100644 --- a/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java +++ b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java @@ -86,7 +86,7 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { .setName("SERVING") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) - .addSubscriptions(Subscription.newBuilder().setName("*").setVersion(">0").build()) + .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build()) .build(); SourceProto.Source source = @@ -99,12 +99,9 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { .build()) .build(); - FeatureSetSpec featureSetSpec = - FeatureSetSpec.newBuilder() - .setName("featureSet") - .setVersion(1) - .setMaxAge(Duration.newBuilder()) - .setSource(source) + FeatureSetProto.FeatureSet featureSet = + FeatureSetProto.FeatureSet.newBuilder() + .setSpec(FeatureSetSpec.newBuilder().setName("featureSet").setVersion(1).setMaxAge(Duration.newBuilder()).setSource(source).build()) .build(); Printer printer = JsonFormat.printer(); @@ -117,8 +114,8 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { expectedPipelineOptions.setProject(""); expectedPipelineOptions.setStoreJson(Lists.newArrayList(printer.print(store))); expectedPipelineOptions.setProject(""); - expectedPipelineOptions.setFeatureSetSpecJson( - Lists.newArrayList(printer.print(featureSetSpec))); + expectedPipelineOptions.setFeatureSetJson( + Lists.newArrayList(printer.print(featureSet.getSpec()))); String expectedJobId = "feast-job-0"; ArgumentCaptor pipelineOptionsCaptor = @@ -136,8 +133,7 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { Source.fromProto(source), Store.fromProto(store), Lists.newArrayList( - FeatureSet.fromProto( - FeatureSetProto.FeatureSet.newBuilder().setSpec(featureSetSpec).build())), + FeatureSet.fromProto(featureSet)), JobStatus.PENDING); Job actual = drJobManager.startJob(job); verify(drJobManager, times(1)).runPipeline(pipelineOptionsCaptor.capture()); diff --git a/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java index 5b892d30aa..937c0f5eba 100644 --- a/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java +++ b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java @@ -30,6 +30,7 @@ import feast.core.CoreServiceProto.ListFeatureSetsResponse; import feast.core.CoreServiceProto.ListStoresResponse; import feast.core.FeatureSetProto; +import feast.core.FeatureSetProto.FeatureSetMeta; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.SourceProto.KafkaSourceConfig; import feast.core.SourceProto.Source; @@ -91,12 +92,12 @@ public void shouldDoNothingIfNoMatchingFeatureSetsFound() throws InvalidProtocol .setName("test") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().build()) - .addSubscriptions(Subscription.newBuilder().setName("*").setVersion(">0").build()) + .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build()) .build(); when(specService.listStores(any())) .thenReturn(ListStoresResponse.newBuilder().addStore(store).build()); when(specService.listFeatureSets( - Filter.newBuilder().setFeatureSetName("*").setFeatureSetVersion(">0").build())) + Filter.newBuilder().setProject("*").setFeatureSetName("*").setFeatureSetVersion("*").build())) .thenReturn(ListFeatureSetsResponse.newBuilder().build()); JobCoordinatorService jcs = new JobCoordinatorService( @@ -113,7 +114,7 @@ public void shouldGenerateAndSubmitJobsIfAny() throws InvalidProtocolBufferExcep .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().build()) .addSubscriptions( - Subscription.newBuilder().setName("features").setVersion(">0").build()) + Subscription.newBuilder().setProject("project1").setName("features").setVersion("*").build()) .build(); Source source = Source.newBuilder() @@ -128,12 +129,17 @@ public void shouldGenerateAndSubmitJobsIfAny() throws InvalidProtocolBufferExcep FeatureSetProto.FeatureSet featureSet1 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( - FeatureSetSpec.newBuilder().setName("features").setVersion(1).setSource(source)) + FeatureSetSpec.newBuilder().setSource(source).setProject("project1").setName("features").setVersion(1)) + .setMeta( + FeatureSetMeta.newBuilder() + ) .build(); FeatureSetProto.FeatureSet featureSet2 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( - FeatureSetSpec.newBuilder().setName("features").setVersion(2).setSource(source)) + FeatureSetSpec.newBuilder().setSource(source).setProject("project1").setName("features").setVersion(2)) + .setMeta( + FeatureSetMeta.newBuilder()) .build(); String extId = "ext"; ArgumentCaptor jobArgCaptor = ArgumentCaptor.forClass(Job.class); @@ -159,7 +165,7 @@ public void shouldGenerateAndSubmitJobsIfAny() throws InvalidProtocolBufferExcep JobStatus.RUNNING); when(specService.listFeatureSets( - Filter.newBuilder().setFeatureSetName("features").setFeatureSetVersion(">0").build())) + Filter.newBuilder().setProject("project1").setFeatureSetName("features").setFeatureSetVersion("*").build())) .thenReturn( ListFeatureSetsResponse.newBuilder() .addFeatureSets(featureSet1) @@ -188,7 +194,7 @@ public void shouldGroupJobsBySource() throws InvalidProtocolBufferException { .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().build()) .addSubscriptions( - Subscription.newBuilder().setName("features").setVersion(">0").build()) + Subscription.newBuilder().setProject("project1").setName("features").setVersion("*").build()) .build(); Source source1 = Source.newBuilder() @@ -212,12 +218,16 @@ public void shouldGroupJobsBySource() throws InvalidProtocolBufferException { FeatureSetProto.FeatureSet featureSet1 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( - FeatureSetSpec.newBuilder().setName("features").setVersion(1).setSource(source1)) + FeatureSetSpec.newBuilder().setSource(source1).setProject("project1").setName("features").setVersion(1)) + .setMeta( + FeatureSetMeta.newBuilder()) .build(); FeatureSetProto.FeatureSet featureSet2 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( - FeatureSetSpec.newBuilder().setName("features").setVersion(2).setSource(source2)) + FeatureSetSpec.newBuilder().setSource(source2).setProject("project1").setName("features").setVersion(2)) + .setMeta( + FeatureSetMeta.newBuilder()) .build(); Job expectedInput1 = @@ -262,7 +272,7 @@ public void shouldGroupJobsBySource() throws InvalidProtocolBufferException { ArgumentCaptor jobArgCaptor = ArgumentCaptor.forClass(Job.class); when(specService.listFeatureSets( - Filter.newBuilder().setFeatureSetName("features").setFeatureSetVersion(">0").build())) + Filter.newBuilder().setProject("project1").setFeatureSetName("features").setFeatureSetVersion("*").build())) .thenReturn( ListFeatureSetsResponse.newBuilder() .addFeatureSets(featureSet1) diff --git a/core/src/test/java/feast/core/service/SpecServiceTest.java b/core/src/test/java/feast/core/service/SpecServiceTest.java index dbf1290fb6..6a51521271 100644 --- a/core/src/test/java/feast/core/service/SpecServiceTest.java +++ b/core/src/test/java/feast/core/service/SpecServiceTest.java @@ -18,6 +18,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsEqual.equalTo; +import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -46,18 +47,20 @@ import feast.core.StoreProto.Store.StoreType; import feast.core.StoreProto.Store.Subscription; import feast.core.dao.FeatureSetRepository; +import feast.core.dao.ProjectRepository; import feast.core.dao.StoreRepository; import feast.core.exception.RetrievalException; import feast.core.model.FeatureSet; import feast.core.model.Field; +import feast.core.model.Project; import feast.core.model.Source; import feast.core.model.Store; import feast.types.ValueProto.ValueType.Enum; -import io.grpc.StatusRuntimeException; import java.sql.Date; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; @@ -71,11 +74,17 @@ public class SpecServiceTest { - @Mock private FeatureSetRepository featureSetRepository; + @Mock + private FeatureSetRepository featureSetRepository; - @Mock private StoreRepository storeRepository; + @Mock + private StoreRepository storeRepository; - @Rule public final ExpectedException expectedException = ExpectedException.none(); + @Mock + private ProjectRepository projectRepository; + + @Rule + public final ExpectedException expectedException = ExpectedException.none(); private SpecService specService; private List featureSets; @@ -94,17 +103,18 @@ public void setUp() { .build(), true); - FeatureSet featureSet1v1 = newDummyFeatureSet("f1", 1); - FeatureSet featureSet1v2 = newDummyFeatureSet("f1", 2); - FeatureSet featureSet1v3 = newDummyFeatureSet("f1", 3); - FeatureSet featureSet2v1 = newDummyFeatureSet("f2", 1); + FeatureSet featureSet1v1 = newDummyFeatureSet("f1", 1, "project1"); + FeatureSet featureSet1v2 = newDummyFeatureSet("f1", 2, "project1"); + FeatureSet featureSet1v3 = newDummyFeatureSet("f1", 3, "project1"); + FeatureSet featureSet2v1 = newDummyFeatureSet("f2", 1, "project1"); - Field f3f1 = new Field("f3", "f3f1", Enum.INT64); - Field f3f2 = new Field("f3", "f3f2", Enum.INT64); - Field f3e1 = new Field("f3", "f3e1", Enum.STRING); + Field f3f1 = new Field("f3f1", Enum.INT64); + Field f3f2 = new Field("f3f2", Enum.INT64); + Field f3e1 = new Field("f3e1", Enum.STRING); FeatureSet featureSet3v1 = new FeatureSet( "f3", + "project1", 1, 100L, Arrays.asList(f3e1), @@ -115,17 +125,41 @@ public void setUp() { featureSets = Arrays.asList(featureSet1v1, featureSet1v2, featureSet1v3, featureSet2v1, featureSet3v1); when(featureSetRepository.findAll()).thenReturn(featureSets); - when(featureSetRepository.findAllByOrderByNameAscVersionAsc()).thenReturn(featureSets); - when(featureSetRepository.findByName("f1")).thenReturn(featureSets.subList(0, 3)); - when(featureSetRepository.findByName("f3")).thenReturn(featureSets.subList(4, 5)); - when(featureSetRepository.findFirstFeatureSetByNameOrderByVersionDesc("f1")) + when(featureSetRepository.findAllByOrderByNameAscVersionAsc()) + .thenReturn(featureSets); + + when(featureSetRepository + .findFeatureSetByNameAndProject_NameAndVersion("f1", + "project1", 1)).thenReturn(featureSets.get(0)); + when(featureSetRepository + .findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc("f1", + "project1")).thenReturn(featureSets.subList(0, 3)); + when(featureSetRepository + .findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc("f3", + "project1")).thenReturn(featureSets.subList(4, 5)); + when(featureSetRepository + .findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc("f1", "project1")) .thenReturn(featureSet1v3); - when(featureSetRepository.findByNameWithWildcardOrderByNameAscVersionAsc("f1")) + when(featureSetRepository + .findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc("f1", "project1")) .thenReturn(featureSets.subList(0, 3)); - when(featureSetRepository.findByName("asd")).thenReturn(Lists.newArrayList()); - when(featureSetRepository.findByNameWithWildcardOrderByNameAscVersionAsc("f%")) + when(featureSetRepository + .findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc("asd", + "project1")).thenReturn(Lists.newArrayList()); + when(featureSetRepository + .findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc("f%", + "project1")) + .thenReturn(featureSets); + when(featureSetRepository + .findAllByNameLikeAndProject_NameLikeOrderByNameAscVersionAsc("%", + "%")) .thenReturn(featureSets); + + when(projectRepository.findAllByArchivedIsFalse()) + .thenReturn(Collections.singletonList(new Project("project1"))); + when(projectRepository.findById("project1")).thenReturn(Optional.of(new Project("project1"))); + Store store1 = newDummyStore("SERVING"); Store store2 = newDummyStore("WAREHOUSE"); stores = Arrays.asList(store1, store2); @@ -133,13 +167,16 @@ public void setUp() { when(storeRepository.findById("SERVING")).thenReturn(Optional.of(store1)); when(storeRepository.findById("NOTFOUND")).thenReturn(Optional.empty()); - specService = new SpecService(featureSetRepository, storeRepository, defaultSource); + specService = new SpecService(featureSetRepository, storeRepository, projectRepository, + defaultSource); } @Test - public void shouldGetAllFeatureSetsIfNoFilterProvided() throws InvalidProtocolBufferException { + public void shouldGetAllFeatureSetsIfOnlyWildcardsProvided() { ListFeatureSetsResponse actual = - specService.listFeatureSets(Filter.newBuilder().setFeatureSetName("").build()); + specService.listFeatureSets( + Filter.newBuilder().setFeatureSetName("*").setProject("*").setFeatureSetVersion("*") + .build()); List list = new ArrayList<>(); for (FeatureSet featureSet : featureSets) { FeatureSetProto.FeatureSet toProto = featureSet.toProto(); @@ -151,10 +188,19 @@ public void shouldGetAllFeatureSetsIfNoFilterProvided() throws InvalidProtocolBu } @Test - public void shouldGetAllFeatureSetsMatchingNameIfNoVersionProvided() - throws InvalidProtocolBufferException { + public void listFeatureSetShouldFailIfFeatureSetProvidedWithoutProject() { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage( + "Invalid listFeatureSetRequest, missing arguments. Must provide project, feature set name, and version."); + specService.listFeatureSets( + Filter.newBuilder().setFeatureSetName("f1").setFeatureSetVersion("1").build()); + } + + @Test + public void shouldGetAllFeatureSetsMatchingNameIfWildcardVersionProvided() { ListFeatureSetsResponse actual = - specService.listFeatureSets(Filter.newBuilder().setFeatureSetName("f1").build()); + specService.listFeatureSets( + Filter.newBuilder().setProject("project1").setFeatureSetName("f1").setFeatureSetVersion("*").build()); List expectedFeatureSets = featureSets.stream().filter(fs -> fs.getName().equals("f1")).collect(Collectors.toList()); List list = new ArrayList<>(); @@ -168,10 +214,11 @@ public void shouldGetAllFeatureSetsMatchingNameIfNoVersionProvided() } @Test - public void shouldGetAllFeatureSetsMatchingNameWithWildcardSearch() - throws InvalidProtocolBufferException { + public void shouldGetAllFeatureSetsMatchingNameWithWildcardSearch() { ListFeatureSetsResponse actual = - specService.listFeatureSets(Filter.newBuilder().setFeatureSetName("f*").build()); + specService.listFeatureSets( + Filter.newBuilder().setProject("project1").setFeatureSetName("f*") + .setFeatureSetVersion("*").build()); List expectedFeatureSets = featureSets.stream() .filter(fs -> fs.getName().startsWith("f")) @@ -187,11 +234,11 @@ public void shouldGetAllFeatureSetsMatchingNameWithWildcardSearch() } @Test - public void shouldGetAllFeatureSetsMatchingVersionIfNoComparator() - throws InvalidProtocolBufferException { + public void shouldGetAllFeatureSetsMatchingVersionIfNoComparator() { ListFeatureSetsResponse actual = specService.listFeatureSets( - Filter.newBuilder().setFeatureSetName("f1").setFeatureSetVersion("1").build()); + Filter.newBuilder().setProject("project1").setFeatureSetName("f1") + .setFeatureSetVersion("1").build()); List expectedFeatureSets = featureSets.stream() .filter(fs -> fs.getName().equals("f1")) @@ -208,71 +255,62 @@ public void shouldGetAllFeatureSetsMatchingVersionIfNoComparator() } @Test - public void shouldGetAllFeatureSetsGivenVersionWithComparator() - throws InvalidProtocolBufferException { - ListFeatureSetsResponse actual = - specService.listFeatureSets( - Filter.newBuilder().setFeatureSetName("f1").setFeatureSetVersion(">1").build()); - List expectedFeatureSets = - featureSets.stream() - .filter(fs -> fs.getName().equals("f1")) - .filter(fs -> fs.getVersion() > 1) - .collect(Collectors.toList()); - List list = new ArrayList<>(); - for (FeatureSet expectedFeatureSet : expectedFeatureSets) { - FeatureSetProto.FeatureSet toProto = expectedFeatureSet.toProto(); - list.add(toProto); - } - ListFeatureSetsResponse expected = - ListFeatureSetsResponse.newBuilder().addAllFeatureSets(list).build(); - assertThat(actual, equalTo(expected)); + public void shouldThrowExceptionIfGetAllFeatureSetsGivenVersionWithComparator() { + expectedException.expect(IllegalArgumentException.class); + specService.listFeatureSets( + Filter.newBuilder().setProject("project1").setFeatureSetName("f1") + .setFeatureSetVersion(">1").build()); } @Test - public void shouldGetLatestFeatureSetGivenMissingVersionFilter() - throws InvalidProtocolBufferException { + public void shouldGetLatestFeatureSetGivenMissingVersionFilter() { GetFeatureSetResponse actual = - specService.getFeatureSet(GetFeatureSetRequest.newBuilder().setName("f1").build()); + specService.getFeatureSet( + GetFeatureSetRequest.newBuilder().setName("f1").setProject("project1").build()); FeatureSet expected = featureSets.get(2); assertThat(actual.getFeatureSet(), equalTo(expected.toProto())); } @Test - public void shouldGetSpecificFeatureSetGivenSpecificVersionFilter() - throws InvalidProtocolBufferException { - when(featureSetRepository.findFeatureSetByNameAndVersion("f1", 2)) + public void shouldGetSpecificFeatureSetGivenSpecificVersionFilter() { + when(featureSetRepository.findFeatureSetByNameAndProject_NameAndVersion("f1", "project1", 2)) .thenReturn(featureSets.get(1)); GetFeatureSetResponse actual = specService.getFeatureSet( - GetFeatureSetRequest.newBuilder().setName("f1").setVersion(2).build()); + GetFeatureSetRequest.newBuilder().setProject("project1").setName("f1").setVersion(2) + .build()); FeatureSet expected = featureSets.get(1); assertThat(actual.getFeatureSet(), equalTo(expected.toProto())); } @Test - public void shouldThrowExceptionGivenMissingFeatureSetName() - throws InvalidProtocolBufferException { - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("INVALID_ARGUMENT: No feature set name provided"); + public void shouldThrowExceptionGivenMissingFeatureSetName() { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("No feature set name provided"); specService.getFeatureSet(GetFeatureSetRequest.newBuilder().setVersion(2).build()); } @Test - public void shouldThrowExceptionGivenMissingFeatureSet() throws InvalidProtocolBufferException { - expectedException.expect(StatusRuntimeException.class); + public void shouldThrowExceptionGivenMissingFeatureSet() { + expectedException.expect(RetrievalException.class); expectedException.expectMessage( - "NOT_FOUND: Feature set with name \"f1000\" and version \"2\" could not be found."); + "Feature set with name \"f1000\" and version \"2\" could not be found."); specService.getFeatureSet( - GetFeatureSetRequest.newBuilder().setName("f1000").setVersion(2).build()); + GetFeatureSetRequest.newBuilder().setName("f1000").setProject("project1").setVersion(2) + .build()); } @Test - public void shouldThrowRetrievalExceptionGivenInvalidFeatureSetVersionComparator() - throws InvalidProtocolBufferException { - expectedException.expect(StatusRuntimeException.class); - expectedException.expectMessage("Invalid comparator '=<' provided."); + public void shouldThrowRetrievalExceptionGivenInvalidFeatureSetVersionComparator() { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage( + "Invalid listFeatureSetRequest. Version must be set to \"*\" if the project name and feature set name aren't set explicitly: \n" + + "feature_set_name: \"f1\"\n" + + "feature_set_version: \">1\"\n" + + "project: \"project1\""); specService.listFeatureSets( - Filter.newBuilder().setFeatureSetName("f1").setFeatureSetVersion("=<1").build()); + Filter.newBuilder().setProject("project1").setFeatureSetName("f1") + .setFeatureSetVersion(">1").build()); } @Test @@ -307,13 +345,14 @@ public void shouldThrowRetrievalExceptionIfNoStoresFoundWithName() { } @Test - public void applyFeatureSetShouldReturnFeatureSetWithLatestVersionIfFeatureSetHasNotChanged() - throws InvalidProtocolBufferException { + public void applyFeatureSetShouldReturnFeatureSetWithLatestVersionIfFeatureSetHasNotChanged() { FeatureSetSpec incomingFeatureSetSpec = featureSets.get(2).toProto().getSpec().toBuilder().clearVersion().build(); + ApplyFeatureSetResponse applyFeatureSetResponse = specService.applyFeatureSet( - FeatureSetProto.FeatureSet.newBuilder().setSpec(incomingFeatureSetSpec).build()); + FeatureSetProto.FeatureSet.newBuilder() + .setSpec(incomingFeatureSetSpec).build()); verify(featureSetRepository, times(0)).save(ArgumentMatchers.any(FeatureSet.class)); assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.NO_CHANGE)); @@ -321,75 +360,92 @@ public void applyFeatureSetShouldReturnFeatureSetWithLatestVersionIfFeatureSetHa } @Test - public void applyFeatureSetShouldApplyFeatureSetWithInitVersionIfNotExists() - throws InvalidProtocolBufferException { - when(featureSetRepository.findByName("f2")).thenReturn(Lists.newArrayList()); - FeatureSetSpec incomingFeatureSetSpec = - newDummyFeatureSet("f2", 1).toProto().getSpec().toBuilder().clearVersion().build(); + public void applyFeatureSetShouldApplyFeatureSetWithInitVersionIfNotExists() { + when(featureSetRepository + .findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( + "f2", "project1")).thenReturn(Lists.newArrayList()); + + FeatureSetProto.FeatureSet incomingFeatureSet = + newDummyFeatureSet("f2", 1, "project1").toProto(); + + FeatureSetProto.FeatureSetSpec incomingFeatureSetSpec = + incomingFeatureSet.getSpec().toBuilder().clearVersion().build(); ApplyFeatureSetResponse applyFeatureSetResponse = specService.applyFeatureSet( - FeatureSetProto.FeatureSet.newBuilder().setSpec(incomingFeatureSetSpec).build()); - verify(featureSetRepository).saveAndFlush(ArgumentMatchers.any(FeatureSet.class)); - FeatureSetSpec expected = - incomingFeatureSetSpec.toBuilder().setVersion(1).setSource(defaultSource.toProto()).build(); + FeatureSetProto.FeatureSet.newBuilder() + .setSpec(incomingFeatureSet.getSpec()).build()); + verify(projectRepository).saveAndFlush(ArgumentMatchers.any(Project.class)); + + FeatureSetProto.FeatureSet expected = + FeatureSetProto.FeatureSet.newBuilder() + .setSpec( + incomingFeatureSetSpec.toBuilder().setVersion(1).setSource(defaultSource.toProto()) + .build()) + .build(); assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED)); - assertThat(applyFeatureSetResponse.getFeatureSet().getSpec(), equalTo(expected)); + assertThat(applyFeatureSetResponse.getFeatureSet().getSpec(), equalTo(expected.getSpec())); + assertThat(applyFeatureSetResponse.getFeatureSet().getSpec().getVersion(), + equalTo(expected.getSpec().getVersion())); } @Test - public void applyFeatureSetShouldIncrementFeatureSetVersionIfAlreadyExists() - throws InvalidProtocolBufferException { - FeatureSetSpec incomingFeatureSet = + public void applyFeatureSetShouldIncrementFeatureSetVersionIfAlreadyExists() { + FeatureSetProto.FeatureSet incomingFeatureSet = featureSets .get(2) - .toProto() - .getSpec() - .toBuilder() - .clearVersion() - .addFeatures(FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.STRING)) - .build(); - FeatureSetSpec expected = - incomingFeatureSet.toBuilder().setVersion(4).setSource(defaultSource.toProto()).build(); + .toProto(); + incomingFeatureSet = incomingFeatureSet.toBuilder() + .setMeta(incomingFeatureSet.getMeta()).setSpec( + incomingFeatureSet.getSpec().toBuilder().clearVersion().addFeatures( + FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.STRING)).build()) + .build(); + + FeatureSetProto.FeatureSet expected = incomingFeatureSet.toBuilder() + .setMeta(incomingFeatureSet.getMeta().toBuilder().build()).setSpec( + incomingFeatureSet.getSpec().toBuilder().setVersion(4) + .setSource(defaultSource.toProto()).build()).build(); + ApplyFeatureSetResponse applyFeatureSetResponse = - specService.applyFeatureSet( - FeatureSetProto.FeatureSet.newBuilder().setSpec(expected).build()); - verify(featureSetRepository).saveAndFlush(ArgumentMatchers.any(FeatureSet.class)); + specService.applyFeatureSet(incomingFeatureSet); + verify(projectRepository).saveAndFlush(ArgumentMatchers.any(Project.class)); assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED)); - assertThat(applyFeatureSetResponse.getFeatureSet().getSpec(), equalTo(expected)); + assertEquals(FeatureSet.fromProto(applyFeatureSetResponse.getFeatureSet()), + FeatureSet.fromProto(expected)); + assertThat(applyFeatureSetResponse.getFeatureSet().getSpec().getVersion(), + equalTo(expected.getSpec().getVersion())); } @Test - public void applyFeatureSetShouldNotCreateFeatureSetIfFieldsUnordered() - throws InvalidProtocolBufferException { + public void applyFeatureSetShouldNotCreateFeatureSetIfFieldsUnordered() { - Field f3f1 = new Field("f3", "f3f1", Enum.INT64); - Field f3f2 = new Field("f3", "f3f2", Enum.INT64); - Field f3e1 = new Field("f3", "f3e1", Enum.STRING); + Field f3f1 = new Field("f3f1", Enum.INT64); + Field f3f2 = new Field("f3f2", Enum.INT64); + Field f3e1 = new Field("f3e1", Enum.STRING); FeatureSetProto.FeatureSet incomingFeatureSet = (new FeatureSet( - "f3", - 5, - 100L, - Arrays.asList(f3e1), - Arrays.asList(f3f2, f3f1), - defaultSource, - FeatureSetStatus.STATUS_READY)) + "f3", + "project1", + 5, + 100L, + Arrays.asList(f3e1), + Arrays.asList(f3f2, f3f1), + defaultSource, + FeatureSetStatus.STATUS_READY)) .toProto(); - FeatureSetProto.FeatureSet expected = incomingFeatureSet; ApplyFeatureSetResponse applyFeatureSetResponse = specService.applyFeatureSet(incomingFeatureSet); assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.NO_CHANGE)); assertThat( applyFeatureSetResponse.getFeatureSet().getSpec().getMaxAge(), - equalTo(expected.getSpec().getMaxAge())); + equalTo(incomingFeatureSet.getSpec().getMaxAge())); assertThat( applyFeatureSetResponse.getFeatureSet().getSpec().getEntities(0), - equalTo(expected.getSpec().getEntities(0))); + equalTo(incomingFeatureSet.getSpec().getEntities(0))); assertThat( applyFeatureSetResponse.getFeatureSet().getSpec().getName(), - equalTo(expected.getSpec().getName())); + equalTo(incomingFeatureSet.getSpec().getName())); } @Test @@ -400,7 +456,8 @@ public void shouldUpdateStoreIfConfigChanges() throws InvalidProtocolBufferExcep .setName("SERVING") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder()) - .addSubscriptions(Subscription.newBuilder().setName("a").setVersion(">1")) + .addSubscriptions( + Subscription.newBuilder().setProject("project1").setName("a").setVersion(">1")) .build(); UpdateStoreResponse actual = specService.updateStore(UpdateStoreRequest.newBuilder().setStore(newStore).build()); @@ -430,12 +487,20 @@ public void shouldDoNothingIfNoChange() throws InvalidProtocolBufferException { assertThat(actual, equalTo(expected)); } - private FeatureSet newDummyFeatureSet(String name, int version) { - Field feature = new Field(name, "feature", Enum.INT64); - Field entity = new Field(name, "entity", Enum.STRING); + @Test + public void shouldFailIfGetFeatureSetWithoutProject() { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("No project provided"); + specService.getFeatureSet(GetFeatureSetRequest.newBuilder().setName("f1").build()); + } + + private FeatureSet newDummyFeatureSet(String name, int version, String project) { + Field feature = new Field("feature", Enum.INT64); + Field entity = new Field("entity", Enum.STRING); FeatureSet fs = new FeatureSet( name, + project, version, 100L, Arrays.asList(entity), @@ -451,7 +516,7 @@ private Store newDummyStore(String name) { Store store = new Store(); store.setName(name); store.setType(StoreType.REDIS.toString()); - store.setSubscriptions(""); + store.setSubscriptions("*:*:*"); store.setConfig(RedisConfig.newBuilder().setPort(6379).build().toByteArray()); return store; } diff --git a/infra/charts/feast/charts/feast-serving/values.yaml b/infra/charts/feast/charts/feast-serving/values.yaml index 857c3fcfd5..d489a48748 100644 --- a/infra/charts/feast/charts/feast-serving/values.yaml +++ b/infra/charts/feast/charts/feast-serving/values.yaml @@ -90,8 +90,9 @@ application.yaml: # host: localhost # port: 6379 # subscriptions: -# - name: "*" -# version: ">0" +# - project: "*" +# name: "*" +# version: "*" # # store.yaml: # name: bigquery @@ -100,8 +101,9 @@ application.yaml: # project_id: PROJECT_ID # dataset_id: DATASET_ID # subscriptions: -# - name: "*" -# version: ">0" +# - project: "*" +# name: "*" +# version: "*" # springConfigMountPath is the directory path where application.yaml and # store.yaml will be mounted in the container. diff --git a/infra/charts/feast/values-demo.yaml b/infra/charts/feast/values-demo.yaml index 9212070eb5..fad4bc0afb 100644 --- a/infra/charts/feast/values-demo.yaml +++ b/infra/charts/feast/values-demo.yaml @@ -64,7 +64,8 @@ feast-serving-online: type: REDIS subscriptions: - name: "*" - version: ">0" + project: "*" + version: "*" feast-serving-batch: enabled: false diff --git a/infra/charts/feast/values.yaml b/infra/charts/feast/values.yaml index fd75a3fce6..ebc8c802a1 100644 --- a/infra/charts/feast/values.yaml +++ b/infra/charts/feast/values.yaml @@ -124,7 +124,8 @@ feast-serving-online: port: 6379 subscriptions: - name: "*" - version: ">0" + project: "*" + version: "*" # ============================================================ # Feast Serving Batch @@ -200,4 +201,5 @@ feast-serving-batch: dataset_id: DATASET_ID subscriptions: - name: "*" - version: ">0" + project: "*" + version: "*" diff --git a/infra/docker-compose/serving/bq-store.yml b/infra/docker-compose/serving/bq-store.yml index bb91281931..cdebee3497 100644 --- a/infra/docker-compose/serving/bq-store.yml +++ b/infra/docker-compose/serving/bq-store.yml @@ -4,5 +4,6 @@ bigquery_config: project_id: dataset_id: subscriptions: - - name: "*" - version: ">0" \ No newline at end of file +- name: "*" + project: "*" + version: "*" \ No newline at end of file diff --git a/infra/docker-compose/serving/redis-store.yml b/infra/docker-compose/serving/redis-store.yml index 600a2e2e4f..16870be94d 100644 --- a/infra/docker-compose/serving/redis-store.yml +++ b/infra/docker-compose/serving/redis-store.yml @@ -4,5 +4,6 @@ redis_config: host: redis port: 6379 subscriptions: - - name: "*" - version: ">0" +- name: "*" + project: "*" + version: "*" diff --git a/ingestion/example/core_specs/entity/product.json b/ingestion/example/core_specs/entity/product.json deleted file mode 100644 index c8cb1177a5..0000000000 --- a/ingestion/example/core_specs/entity/product.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "product", - "description": "This entity capture features for products, keys for this entity are product ids", - "tags": [] -} \ No newline at end of file diff --git a/ingestion/example/core_specs/entity/user.json b/ingestion/example/core_specs/entity/user.json deleted file mode 100644 index b1864fe4af..0000000000 --- a/ingestion/example/core_specs/entity/user.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "user", - "description": "This entity capture features for users, keys for this entity are user ids", - "tags": [] -} \ No newline at end of file diff --git a/ingestion/example/core_specs/feature/product.day.completed_orders.json b/ingestion/example/core_specs/feature/product.day.completed_orders.json deleted file mode 100644 index a4582848e0..0000000000 --- a/ingestion/example/core_specs/feature/product.day.completed_orders.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "id": "product.completed_orders", - "entity": "product", - "name": "completed_orders", - "owner": "feast@example.com", - "description": "This feature represents a product's completed orders per day", - "uri": "https://example.com/", - "valueType": "INT32", - "tags": [], - "options": {}, - "dataStores": { - "serving": { - "id": "example_serving" - }, - "warehouse": { - "id": "example_warehouse" - } - } -} \ No newline at end of file diff --git a/ingestion/example/core_specs/feature/user.none.age.json b/ingestion/example/core_specs/feature/user.none.age.json deleted file mode 100644 index 9b8f8a68cf..0000000000 --- a/ingestion/example/core_specs/feature/user.none.age.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "id": "user.age", - "entity": "user", - "name": "age", - "owner": "feast@example.com", - "description": "This feature represents a user's age", - "uri": "https://example.com/", - "valueType": "INT32", - "tags": [], - "options": {}, - "dataStores": { - "serving": { - "id": "example_serving" - }, - "warehouse": { - "id": "example_warehouse" - } - } -} \ No newline at end of file diff --git a/ingestion/example/core_specs/feature/user.none.completed_orders.json b/ingestion/example/core_specs/feature/user.none.completed_orders.json deleted file mode 100644 index 0280cf7483..0000000000 --- a/ingestion/example/core_specs/feature/user.none.completed_orders.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "id": "user.completed_orders", - "entity": "user", - "name": "completed_orders", - "owner": "feast@example.com", - "description": "This feature represents a user's total completed orders", - "uri": "https://example.com/", - "valueType": "INT32", - "tags": [], - "options": {}, - "dataStores": { - "serving": { - "id": "example_serving" - }, - "warehouse": { - "id": "example_warehouse" - } - } -} diff --git a/ingestion/example/core_specs/storage/example_errors.json b/ingestion/example/core_specs/storage/example_errors.json deleted file mode 100644 index f5f0be752d..0000000000 --- a/ingestion/example/core_specs/storage/example_errors.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "id": "example_errors", - "type": "file.json", - "options": { - "path": "output/errors/data" - } -} \ No newline at end of file diff --git a/ingestion/example/core_specs/storage/example_serving.json b/ingestion/example/core_specs/storage/example_serving.json deleted file mode 100644 index 49224898a9..0000000000 --- a/ingestion/example/core_specs/storage/example_serving.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "id": "example_serving", - "type": "file.json", - "options": { - "path": "output/serving/data" - } -} \ No newline at end of file diff --git a/ingestion/example/core_specs/storage/example_warehouse.json b/ingestion/example/core_specs/storage/example_warehouse.json deleted file mode 100644 index baa5b5a055..0000000000 --- a/ingestion/example/core_specs/storage/example_warehouse.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "id": "example_warehouse", - "type": "file.json", - "options": { - "path": "output/warehouse/data" - } -} \ No newline at end of file diff --git a/ingestion/example/import_products.yaml b/ingestion/example/import_products.yaml deleted file mode 100644 index cb52580cf0..0000000000 --- a/ingestion/example/import_products.yaml +++ /dev/null @@ -1,15 +0,0 @@ ---- -type: file -options: - format: csv - path: sample_data/daily_products.csv -entities: - - product -schema: - entityIdColumn: id - timestampColumn: timestamp - fields: - - name: id - - name: timestamp - - featureId: product.completed_orders - diff --git a/ingestion/example/import_users.yaml b/ingestion/example/import_users.yaml deleted file mode 100644 index 5e2d6e553a..0000000000 --- a/ingestion/example/import_users.yaml +++ /dev/null @@ -1,16 +0,0 @@ ---- -type: file -options: - format: csv - path: sample_data/users.csv -entities: - - user -schema: - entityIdColumn: id - timestampValue: 2018-10-25T00:00:00.000Z - fields: - - name: id - - name: name - - featureId: user.age - - featureId: user.completed_orders - diff --git a/ingestion/example/sample_data/daily_products.csv b/ingestion/example/sample_data/daily_products.csv deleted file mode 100644 index 66939b7c19..0000000000 --- a/ingestion/example/sample_data/daily_products.csv +++ /dev/null @@ -1,5 +0,0 @@ -1,2018-10-25,7 -1,2018-10-26,6 -2,2018-10-26,1 -3,2018-10-25,15 -3,2018-10-26,13 diff --git a/ingestion/example/sample_data/users.csv b/ingestion/example/sample_data/users.csv deleted file mode 100644 index f4eda616a2..0000000000 --- a/ingestion/example/sample_data/users.csv +++ /dev/null @@ -1,3 +0,0 @@ -1,Tim,37,13 -2,Aria,28,12 -2,Zhiling,28,14 \ No newline at end of file diff --git a/ingestion/src/main/java/feast/ingestion/ImportJob.java b/ingestion/src/main/java/feast/ingestion/ImportJob.java index fb719120d4..41af5f9bb4 100644 --- a/ingestion/src/main/java/feast/ingestion/ImportJob.java +++ b/ingestion/src/main/java/feast/ingestion/ImportJob.java @@ -16,8 +16,10 @@ */ package feast.ingestion; +import static feast.ingestion.utils.SpecUtil.getFeatureSetReference; + import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSet; import feast.core.SourceProto.Source; import feast.core.StoreProto.Store; import feast.ingestion.options.ImportOptions; @@ -31,16 +33,15 @@ import feast.ingestion.utils.StoreUtil; import feast.ingestion.values.FailedElement; import feast.types.FeatureRowProto.FeatureRow; +import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.options.PipelineOptionsValidator; import org.apache.beam.sdk.values.PCollectionTuple; import org.apache.beam.sdk.values.TupleTag; -import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; public class ImportJob { @@ -79,26 +80,25 @@ public static PipelineResult runPipeline(ImportOptions options) log.info("Starting import job with settings: \n{}", options.toString()); - List featureSetSpecs = - SpecUtil.parseFeatureSetSpecJsonList(options.getFeatureSetSpecJson()); + List featureSets = + SpecUtil.parseFeatureSetSpecJsonList(options.getFeatureSetJson()); List stores = SpecUtil.parseStoreJsonList(options.getStoreJson()); for (Store store : stores) { - List subscribedFeatureSets = - SpecUtil.getSubscribedFeatureSets(store.getSubscriptionsList(), featureSetSpecs); + List subscribedFeatureSets = + SpecUtil.getSubscribedFeatureSets(store.getSubscriptionsList(), featureSets); // Generate tags by key - Map featureSetSpecsByKey = - subscribedFeatureSets.stream() - .map( - fs -> { - String id = String.format("%s:%s", fs.getName(), fs.getVersion()); - return Pair.of(id, fs); - }) - .collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); + Map featureSetsByKey = new HashMap<>(); + subscribedFeatureSets.stream() + .forEach( + fs -> { + String ref = getFeatureSetReference(fs); + featureSetsByKey.put(ref, fs); + }); // TODO: make the source part of the job initialisation options - Source source = subscribedFeatureSets.get(0).getSource(); + Source source = subscribedFeatureSets.get(0).getSpec().getSource(); // Step 1. Read messages from Feast Source as FeatureRow. PCollectionTuple convertedFeatureRows = @@ -110,7 +110,7 @@ public static PipelineResult runPipeline(ImportOptions options) .setFailureTag(DEADLETTER_OUT) .build()); - for (FeatureSetSpec featureSet : subscribedFeatureSets) { + for (FeatureSet featureSet : subscribedFeatureSets) { // Ensure Store has valid configuration and Feast can access it. StoreUtil.setupStore(store, featureSet); } @@ -121,7 +121,7 @@ public static PipelineResult runPipeline(ImportOptions options) .get(FEATURE_ROW_OUT) .apply( ValidateFeatureRows.newBuilder() - .setFeatureSetSpecs(featureSetSpecsByKey) + .setFeatureSets(featureSetsByKey) .setSuccessTag(FEATURE_ROW_OUT) .setFailureTag(DEADLETTER_OUT) .build()); @@ -131,10 +131,7 @@ public static PipelineResult runPipeline(ImportOptions options) .get(FEATURE_ROW_OUT) .apply( "WriteFeatureRowToStore", - WriteToStore.newBuilder() - .setFeatureSetSpecs(featureSetSpecsByKey) - .setStore(store) - .build()); + WriteToStore.newBuilder().setFeatureSets(featureSetsByKey).setStore(store).build()); // Step 4. Write FailedElements to a dead letter table in BigQuery. if (options.getDeadLetterTableSpec() != null) { diff --git a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java index 21df87e4b2..b299bb47e5 100644 --- a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java +++ b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java @@ -28,16 +28,16 @@ public interface ImportOptions extends PipelineOptions, DataflowPipelineOptions, DirectOptions { @Required @Description( - "JSON string representation of the FeatureSetSpec that the import job will process." - + "FeatureSetSpec follows the format in feast.core.FeatureSet proto." - + "Mutliple FeatureSetSpec can be passed by specifying '--featureSetSpec={...}' multiple times" + "JSON string representation of the FeatureSet that the import job will process." + + "FeatureSet follows the format in feast.core.FeatureSet proto." + + "Mutliple FeatureSetSpec can be passed by specifying '--featureSet={...}' multiple times" + "The conversion of Proto message to JSON should follow this mapping:" + "https://developers.google.com/protocol-buffers/docs/proto3#json" + "Please minify and remove all insignificant whitespace such as newline in the JSON string" + "to prevent error when parsing the options") - List getFeatureSetSpecJson(); + List getFeatureSetJson(); - void setFeatureSetSpecJson(List featureSetSpecJson); + void setFeatureSetJson(List featureSetJson); @Required @Description( diff --git a/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java b/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java index 19b1f1f860..5ca6a710f6 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java +++ b/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java @@ -20,7 +20,7 @@ import feast.core.FeatureSetProto; import feast.ingestion.transform.fn.ValidateFeatureRowDoFn; import feast.ingestion.values.FailedElement; -import feast.ingestion.values.FeatureSetSpec; +import feast.ingestion.values.FeatureSet; import feast.types.FeatureRowProto.FeatureRow; import java.util.Map; import java.util.stream.Collectors; @@ -36,7 +36,7 @@ public abstract class ValidateFeatureRows extends PTransform, PCollectionTuple> { - public abstract Map getFeatureSetSpecs(); + public abstract Map getFeatureSets(); public abstract TupleTag getSuccessTag(); @@ -49,8 +49,7 @@ public static Builder newBuilder() { @AutoValue.Builder public abstract static class Builder { - public abstract Builder setFeatureSetSpecs( - Map featureSetSpec); + public abstract Builder setFeatureSets(Map featureSets); public abstract Builder setSuccessTag(TupleTag successTag); @@ -62,16 +61,16 @@ public abstract Builder setFeatureSetSpecs( @Override public PCollectionTuple expand(PCollection input) { - Map featureSetSpecs = - getFeatureSetSpecs().entrySet().stream() - .map(e -> Pair.of(e.getKey(), new FeatureSetSpec(e.getValue()))) + Map featureSets = + getFeatureSets().entrySet().stream() + .map(e -> Pair.of(e.getKey(), new FeatureSet(e.getValue()))) .collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); return input.apply( "ValidateFeatureRows", ParDo.of( ValidateFeatureRowDoFn.newBuilder() - .setFeatureSetSpecs(featureSetSpecs) + .setFeatureSets(featureSets) .setSuccessTag(getSuccessTag()) .setFailureTag(getFailureTag()) .build()) diff --git a/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java b/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java index 2e3a0a5dde..6aed943931 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java +++ b/ingestion/src/main/java/feast/ingestion/transform/WriteToStore.java @@ -19,7 +19,7 @@ import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors; import com.google.api.services.bigquery.model.TableRow; import com.google.auto.value.AutoValue; -import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSet; import feast.core.StoreProto.Store; import feast.core.StoreProto.Store.BigQueryConfig; import feast.core.StoreProto.Store.RedisConfig; @@ -65,7 +65,7 @@ public abstract class WriteToStore extends PTransform, P public abstract Store getStore(); - public abstract Map getFeatureSetSpecs(); + public abstract Map getFeatureSets(); public static Builder newBuilder() { return new AutoValue_WriteToStore.Builder(); @@ -76,7 +76,7 @@ public abstract static class Builder { public abstract Builder setStore(Store store); - public abstract Builder setFeatureSetSpecs(Map featureSetSpecs); + public abstract Builder setFeatureSets(Map featureSets); public abstract WriteToStore build(); } @@ -92,7 +92,7 @@ public PDone expand(PCollection input) { input .apply( "FeatureRowToRedisMutation", - ParDo.of(new FeatureRowToRedisMutationDoFn(getFeatureSetSpecs()))) + ParDo.of(new FeatureRowToRedisMutationDoFn(getFeatureSets()))) .apply( "WriteRedisMutationToRedis", RedisCustomIO.write(redisConfig.getHost(), redisConfig.getPort())); diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java index eeb5ce6732..dfbb48fc85 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java @@ -18,8 +18,7 @@ import com.google.auto.value.AutoValue; import feast.ingestion.values.FailedElement; -import feast.ingestion.values.FailedElement.Builder; -import feast.ingestion.values.FeatureSetSpec; +import feast.ingestion.values.FeatureSet; import feast.ingestion.values.Field; import feast.types.FeatureRowProto.FeatureRow; import feast.types.FieldProto; @@ -31,7 +30,7 @@ @AutoValue public abstract class ValidateFeatureRowDoFn extends DoFn { - public abstract Map getFeatureSetSpecs(); + public abstract Map getFeatureSets(); public abstract TupleTag getSuccessTag(); @@ -44,7 +43,7 @@ public static Builder newBuilder() { @AutoValue.Builder public abstract static class Builder { - public abstract Builder setFeatureSetSpecs(Map featureSetSpecs); + public abstract Builder setFeatureSets(Map featureSets); public abstract Builder setSuccessTag(TupleTag successTag); @@ -57,17 +56,16 @@ public abstract static class Builder { public void processElement(ProcessContext context) { String error = null; FeatureRow featureRow = context.element(); - FeatureSetSpec featureSetSpec = - getFeatureSetSpecs().getOrDefault(featureRow.getFeatureSet(), null); - if (featureSetSpec != null) { + FeatureSet featureSet = getFeatureSets().getOrDefault(featureRow.getFeatureSet(), null); + if (featureSet != null) { for (FieldProto.Field field : featureRow.getFieldsList()) { - Field fieldSpec = featureSetSpec.getField(field.getName()); + Field fieldSpec = featureSet.getField(field.getName()); if (fieldSpec == null) { error = String.format( "FeatureRow contains field '%s' which do not exists in FeatureSet '%s' version '%d'. Please check the FeatureRow data.", - field.getName(), featureSetSpec.getId()); + field.getName(), featureSet.getReference()); break; } // If value is set in the FeatureRow, make sure the value type matches @@ -98,9 +96,14 @@ public void processElement(ProcessContext context) { .setJobName(context.getPipelineOptions().getJobName()) .setPayload(featureRow.toString()) .setErrorMessage(error); - if (featureSetSpec != null) { - String[] split = featureSetSpec.getId().split(":"); - failedElement = failedElement.setFeatureSetName(split[0]).setFeatureSetVersion(split[1]); + if (featureSet != null) { + String[] split = featureSet.getReference().split(":"); + String[] nameSplit = split[0].split("/"); + failedElement = + failedElement + .setProjectName(nameSplit[0]) + .setFeatureSetName(nameSplit[1]) + .setFeatureSetVersion(split[1]); } context.output(getFailureTag(), failedElement.build()); } else { diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java index 452bfb2377..687670c5cf 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java @@ -33,6 +33,7 @@ public abstract class WriteDeadletterRowMetricsDoFn extends DoFn { private final String METRIC_PREFIX = "feast_ingestion"; private final String STORE_TAG_KEY = "feast_store"; + private final String FEATURE_SET_PROJECT_TAG_KEY = "feast_project_name"; private final String FEATURE_SET_NAME_TAG_KEY = "feast_featureSet_name"; private final String FEATURE_SET_VERSION_TAG_KEY = "feast_featureSet_version"; private final String FEATURE_TAG_KEY = "feast_feature_name"; @@ -46,10 +46,7 @@ public abstract class WriteRowMetricsDoFn extends DoFn { public abstract int getStatsdPort(); public static WriteRowMetricsDoFn create( - String newStoreName, - FeatureSetSpec newFeatureSetSpec, - String newStatsdHost, - int newStatsdPort) { + String newStoreName, String newStatsdHost, int newStatsdPort) { return newBuilder() .setStoreName(newStoreName) .setStatsdHost(newStatsdHost) @@ -88,13 +85,15 @@ public void processElement(ProcessContext c) { long eventTimestamp = com.google.protobuf.util.Timestamps.toMillis(row.getEventTimestamp()); String[] split = row.getFeatureSet().split(":"); - String featureSetName = split[0]; + String featureSetProject = split[0].split("/")[0]; + String featureSetName = split[0].split("/")[1]; String featureSetVersion = split[1]; statsd.histogram( "feature_row_lag_ms", System.currentTimeMillis() - eventTimestamp, STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); @@ -103,6 +102,7 @@ public void processElement(ProcessContext c) { "feature_row_event_time_epoch_ms", eventTimestamp, STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); @@ -113,6 +113,7 @@ public void processElement(ProcessContext c) { "feature_value_lag_ms", System.currentTimeMillis() - eventTimestamp, STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, FEATURE_TAG_KEY + ":" + field.getName(), @@ -122,6 +123,7 @@ public void processElement(ProcessContext c) { "feature_value_missing_count", 1, STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, FEATURE_TAG_KEY + ":" + field.getName(), @@ -133,6 +135,7 @@ public void processElement(ProcessContext c) { "feature_row_ingested_count", 1, STORE_TAG_KEY + ":" + getStoreName(), + FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); diff --git a/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java b/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java index 132a2e93bf..5419115e2e 100644 --- a/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java @@ -19,6 +19,7 @@ import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSet; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.FeatureSetProto.FeatureSpec; import feast.core.StoreProto.Store; @@ -32,12 +33,46 @@ public class SpecUtil { - /** Get only feature set specs that matches the subscription */ - public static List getSubscribedFeatureSets( - List subscriptions, List featureSetSpecs) { - List subscribed = new ArrayList<>(); - for (FeatureSetSpec featureSet : featureSetSpecs) { + public static String getFeatureSetReference(FeatureSet featureSet) { + FeatureSetSpec spec = featureSet.getSpec(); + return String.format("%s/%s:%d", spec.getProject(), spec.getName(), spec.getVersion()); + } + + /** + * Get only feature set specs that matches the subscription + */ + public static List getSubscribedFeatureSets( + List subscriptions, List featureSets) { + List subscribed = new ArrayList<>(); + for (FeatureSet featureSet : featureSets) { for (Subscription sub : subscriptions) { + // If configuration missing, fail + if (sub.getProject().isEmpty() || sub.getName().isEmpty() || sub.getVersion().isEmpty()) { + throw new IllegalArgumentException( + String.format("Subscription is missing arguments: %s", sub.toString())); + } + + // If all wildcards, subscribe to everything + if (sub.getProject().equals("*") || sub.getName().equals("*") || sub.getVersion() + .equals("*")) { + subscribed.add(featureSet); + break; + } + + // If all wildcards, subscribe to everything + if (sub.getProject().equals("*") && (!sub.getName().equals("*") || !sub.getVersion() + .equals("*"))) { + throw new IllegalArgumentException( + String.format( + "Subscription cannot have feature set name and/or version set if project is not defined: %s", + sub.toString())); + } + + // Match project name + if (!featureSet.getSpec().getProject().equals(sub.getProject())) { + continue; + } + // Convert wildcard to regex String subName = sub.getName(); if (!sub.getName().contains(".*")) { @@ -46,25 +81,25 @@ public static List getSubscribedFeatureSets( // Match feature set name to pattern Pattern pattern = Pattern.compile(subName); - if (!pattern.matcher(featureSet.getName()).matches()) { + if (!pattern.matcher(featureSet.getSpec().getName()).matches()) { continue; } - // If version is empty, match all - if (sub.getVersion().isEmpty()) { + // If version is '*', match all + if (sub.getVersion().equals("*")) { subscribed.add(featureSet); break; - } else if (sub.getVersion().startsWith(">") && sub.getVersion().length() > 1) { - // if version starts with >, match only those greater than the version number - int lowerBoundIncl = Integer.parseInt(sub.getVersion().substring(1)); - if (featureSet.getVersion() >= lowerBoundIncl) { - subscribed.add(featureSet); - break; - } + } else if (sub.getVersion().equals("latest")) { + // if version is "latest" + throw new RuntimeException( + String.format( + "Support for latest feature set subscription has not been implemented yet: %s", + sub.toString())); + } else { // If a specific version, match that version alone int version = Integer.parseInt(sub.getVersion()); - if (featureSet.getVersion() == version) { + if (featureSet.getSpec().getVersion() == version) { subscribed.add(featureSet); break; } @@ -74,15 +109,15 @@ public static List getSubscribedFeatureSets( return subscribed; } - public static List parseFeatureSetSpecJsonList(List jsonList) + public static List parseFeatureSetSpecJsonList(List jsonList) throws InvalidProtocolBufferException { - List featureSetSpecs = new ArrayList<>(); + List featureSets = new ArrayList<>(); for (String json : jsonList) { FeatureSetSpec.Builder builder = FeatureSetSpec.newBuilder(); JsonFormat.parser().merge(json, builder); - featureSetSpecs.add(builder.build()); + featureSets.add(FeatureSet.newBuilder().setSpec(builder.build()).build()); } - return featureSetSpecs; + return featureSets; } public static List parseStoreJsonList(List jsonList) diff --git a/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java b/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java index 5ceb8bd2f9..dc67bd86b4 100644 --- a/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java @@ -36,6 +36,7 @@ import com.google.cloud.bigquery.TimePartitioning.Type; import com.google.common.collect.ImmutableMap; import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSet; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.FeatureSetProto.FeatureSpec; import feast.core.StoreProto.Store; @@ -99,7 +100,7 @@ public class StoreUtil { VALUE_TYPE_TO_STANDARD_SQL_TYPE.put(Enum.BOOL_LIST, StandardSQLTypeName.BOOL); } - public static void setupStore(Store store, FeatureSetSpec featureSetSpec) { + public static void setupStore(Store store, FeatureSet featureSet) { StoreType storeType = store.getType(); switch (storeType) { case REDIS: @@ -107,7 +108,7 @@ public static void setupStore(Store store, FeatureSetSpec featureSetSpec) { break; case BIGQUERY: StoreUtil.setupBigQuery( - featureSetSpec, + featureSet, store.getBigqueryConfig().getProjectId(), store.getBigqueryConfig().getDatasetId(), BigQueryOptions.getDefaultInstance().getService()); @@ -185,17 +186,18 @@ public static TableDefinition createBigQueryTableDefinition(FeatureSetSpec featu *

Refer to protos/feast/core/Store.proto for the derivation of the table name and schema from * a FeatureSetSpec object. * - * @param featureSetSpec FeatureSetSpec object + * @param featureSet FeatureSet object * @param bigqueryProjectId BigQuery project id * @param bigqueryDatasetId BigQuery dataset id * @param bigquery BigQuery service object */ public static void setupBigQuery( - FeatureSetSpec featureSetSpec, + FeatureSet featureSet, String bigqueryProjectId, String bigqueryDatasetId, BigQuery bigquery) { + FeatureSetSpec featureSetSpec = featureSet.getSpec(); // Ensure BigQuery dataset exists. DatasetId datasetId = DatasetId.of(bigqueryProjectId, bigqueryDatasetId); if (bigquery.getDataset(datasetId) == null) { @@ -204,7 +206,9 @@ public static void setupBigQuery( } String tableName = - String.format("%s_v%d", featureSetSpec.getName(), featureSetSpec.getVersion()) + String.format( + "%s_%s_v%d", + featureSetSpec.getProject(), featureSetSpec.getName(), featureSetSpec.getVersion()) .replaceAll("-", "_"); TableId tableId = TableId.of(bigqueryProjectId, datasetId.getDataset(), tableName); @@ -224,7 +228,7 @@ public static void setupBigQuery( tableId.getTable(), datasetId.getDataset(), bigqueryProjectId); - TableDefinition tableDefinition = createBigQueryTableDefinition(featureSetSpec); + TableDefinition tableDefinition = createBigQueryTableDefinition(featureSet.getSpec()); TableInfo tableInfo = TableInfo.of(tableId, tableDefinition); bigquery.create(tableInfo); } diff --git a/ingestion/src/main/java/feast/ingestion/values/FailedElement.java b/ingestion/src/main/java/feast/ingestion/values/FailedElement.java index a7fd162f35..9606c27d19 100644 --- a/ingestion/src/main/java/feast/ingestion/values/FailedElement.java +++ b/ingestion/src/main/java/feast/ingestion/values/FailedElement.java @@ -33,6 +33,9 @@ public abstract class FailedElement { @Nullable public abstract String getJobName(); + @Nullable + public abstract String getProjectName(); + @Nullable public abstract String getFeatureSetName(); @@ -59,6 +62,8 @@ public static Builder newBuilder() { public abstract static class Builder { public abstract Builder setTimestamp(Instant timestamp); + public abstract Builder setProjectName(String projectName); + public abstract Builder setFeatureSetName(String featureSetName); public abstract Builder setFeatureSetVersion(String featureSetVersion); diff --git a/ingestion/src/main/java/feast/ingestion/values/FeatureSetSpec.java b/ingestion/src/main/java/feast/ingestion/values/FeatureSet.java similarity index 76% rename from ingestion/src/main/java/feast/ingestion/values/FeatureSetSpec.java rename to ingestion/src/main/java/feast/ingestion/values/FeatureSet.java index 8c6e804a06..bf07bcec96 100644 --- a/ingestion/src/main/java/feast/ingestion/values/FeatureSetSpec.java +++ b/ingestion/src/main/java/feast/ingestion/values/FeatureSet.java @@ -16,6 +16,7 @@ */ package feast.ingestion.values; +import static feast.ingestion.utils.SpecUtil.getFeatureSetReference; import static feast.ingestion.utils.SpecUtil.getFieldsByName; import feast.core.FeatureSetProto; @@ -28,18 +29,18 @@ * *

The use for this class is mainly for validating the Fields in FeatureRow. */ -public class FeatureSetSpec implements Serializable { - private final String id; +public class FeatureSet implements Serializable { + private final String reference; private final Map fields; - public FeatureSetSpec(FeatureSetProto.FeatureSetSpec featureSetSpec) { - this.id = String.format("%s:%d", featureSetSpec.getName(), featureSetSpec.getVersion()); - this.fields = getFieldsByName(featureSetSpec); + public FeatureSet(FeatureSetProto.FeatureSet featureSet) { + this.reference = getFeatureSetReference(featureSet); + this.fields = getFieldsByName(featureSet.getSpec()); } - public String getId() { - return id; + public String getReference() { + return reference; } public Field getField(String fieldName) { diff --git a/ingestion/src/main/java/feast/store/serving/bigquery/GetTableDestination.java b/ingestion/src/main/java/feast/store/serving/bigquery/GetTableDestination.java index cf02713ca9..eb37db9449 100644 --- a/ingestion/src/main/java/feast/store/serving/bigquery/GetTableDestination.java +++ b/ingestion/src/main/java/feast/store/serving/bigquery/GetTableDestination.java @@ -36,6 +36,7 @@ public GetTableDestination(String projectId, String datasetId) { @Override public TableDestination apply(ValueInSingleWindow input) { String[] split = input.getValue().getFeatureSet().split(":"); + String[] splitName = split[0].split("/"); TimePartitioning timePartitioning = new TimePartitioning() @@ -43,7 +44,8 @@ public TableDestination apply(ValueInSingleWindow input) { .setField(FeatureRowToTableRow.getEventTimestampColumn()); return new TableDestination( - String.format("%s:%s.%s_v%s", projectId, datasetId, split[0], split[1]), + String.format( + "%s:%s.%s_%s_v%s", projectId, datasetId, splitName[0], splitName[1], split[1]), String.format("Feast table for %s", input.getValue().getFeatureSet()), timePartitioning); } diff --git a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java b/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java index 9bc503f987..27cca2ffb2 100644 --- a/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java +++ b/ingestion/src/main/java/feast/store/serving/redis/FeatureRowToRedisMutationDoFn.java @@ -17,7 +17,7 @@ package feast.store.serving.redis; import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSet; import feast.storage.RedisProto.RedisKey; import feast.storage.RedisProto.RedisKey.Builder; import feast.store.serving.redis.RedisCustomIO.Method; @@ -34,16 +34,16 @@ public class FeatureRowToRedisMutationDoFn extends DoFn featureSetSpecs; + private Map featureSets; - public FeatureRowToRedisMutationDoFn(Map featureSetSpecs) { - this.featureSetSpecs = featureSetSpecs; + public FeatureRowToRedisMutationDoFn(Map featureSets) { + this.featureSets = featureSets; } private RedisKey getKey(FeatureRow featureRow) { - FeatureSetSpec featureSetSpec = featureSetSpecs.get(featureRow.getFeatureSet()); + FeatureSet featureSet = featureSets.get(featureRow.getFeatureSet()); Set entityNames = - featureSetSpec.getEntitiesList().stream() + featureSet.getSpec().getEntitiesList().stream() .map(EntitySpec::getName) .collect(Collectors.toSet()); diff --git a/ingestion/src/test/java/feast/ingestion/ImportJobTest.java b/ingestion/src/test/java/feast/ingestion/ImportJobTest.java index 4a09bee82f..290b38dabe 100644 --- a/ingestion/src/test/java/feast/ingestion/ImportJobTest.java +++ b/ingestion/src/test/java/feast/ingestion/ImportJobTest.java @@ -20,6 +20,7 @@ import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSet; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.FeatureSetProto.FeatureSpec; import feast.core.SourceProto.KafkaSourceConfig; @@ -113,6 +114,7 @@ public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow() FeatureSetSpec.newBuilder() .setName("feature_set") .setVersion(3) + .setProject("myproject") .addEntities( EntitySpec.newBuilder() .setName("entity_id_primary") @@ -143,6 +145,8 @@ public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow() .build()) .build(); + FeatureSet featureSet = FeatureSet.newBuilder().setSpec(spec).build(); + Store redis = Store.newBuilder() .setName(StoreType.REDIS.toString()) @@ -151,15 +155,16 @@ public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow() RedisConfig.newBuilder().setHost(REDIS_HOST).setPort(REDIS_PORT).build()) .addSubscriptions( Subscription.newBuilder() + .setProject(spec.getProject()) .setName(spec.getName()) .setVersion(String.valueOf(spec.getVersion())) .build()) .build(); ImportOptions options = PipelineOptionsFactory.create().as(ImportOptions.class); - options.setFeatureSetSpecJson( + options.setFeatureSetJson( Collections.singletonList( - JsonFormat.printer().omittingInsignificantWhitespace().print(spec))); + JsonFormat.printer().omittingInsignificantWhitespace().print(featureSet.getSpec()))); options.setStoreJson( Collections.singletonList( JsonFormat.printer().omittingInsignificantWhitespace().print(redis))); @@ -173,8 +178,8 @@ public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow() IntStream.range(0, IMPORT_JOB_SAMPLE_FEATURE_ROW_SIZE) .forEach( i -> { - FeatureRow randomRow = TestUtil.createRandomFeatureRow(spec); - RedisKey redisKey = TestUtil.createRedisKey(spec, randomRow); + FeatureRow randomRow = TestUtil.createRandomFeatureRow(featureSet); + RedisKey redisKey = TestUtil.createRedisKey(featureSet, randomRow); input.add(randomRow); expected.put(redisKey, randomRow); }); diff --git a/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java b/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java index 7f2d717688..87af809120 100644 --- a/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java +++ b/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java @@ -19,6 +19,8 @@ import static org.junit.Assert.*; import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSet; +import feast.core.FeatureSetProto.FeatureSetMeta; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.FeatureSetProto.FeatureSpec; import feast.ingestion.values.FailedElement; @@ -40,6 +42,7 @@ import org.junit.Test; public class ValidateFeatureRowsTest { + @Rule public transient TestPipeline p = TestPipeline.create(); private static final TupleTag SUCCESS_TAG = new TupleTag() {}; @@ -48,55 +51,73 @@ public class ValidateFeatureRowsTest { @Test public void shouldWriteSuccessAndFailureTagsCorrectly() { - FeatureSetSpec fs1 = - FeatureSetSpec.newBuilder() - .setName("feature_set") - .setVersion(1) - .addEntities( - EntitySpec.newBuilder() - .setName("entity_id_primary") - .setValueType(Enum.INT32) - .build()) - .addEntities( - EntitySpec.newBuilder() - .setName("entity_id_secondary") - .setValueType(Enum.STRING) - .build()) - .addFeatures( - FeatureSpec.newBuilder().setName("feature_1").setValueType(Enum.STRING).build()) - .addFeatures( - FeatureSpec.newBuilder().setName("feature_2").setValueType(Enum.INT64).build()) + FeatureSet fs1 = + FeatureSet.newBuilder() + .setSpec( + FeatureSetSpec.newBuilder() + .setName("feature_set") + .setVersion(1) + .setProject("myproject") + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_primary") + .setValueType(Enum.INT32) + .build()) + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_secondary") + .setValueType(Enum.STRING) + .build()) + .addFeatures( + FeatureSpec.newBuilder() + .setName("feature_1") + .setValueType(Enum.STRING) + .build()) + .addFeatures( + FeatureSpec.newBuilder() + .setName("feature_2") + .setValueType(Enum.INT64) + .build())) .build(); - FeatureSetSpec fs2 = - FeatureSetSpec.newBuilder() - .setName("feature_set") - .setVersion(2) - .addEntities( - EntitySpec.newBuilder() - .setName("entity_id_primary") - .setValueType(Enum.INT32) - .build()) - .addEntities( - EntitySpec.newBuilder() - .setName("entity_id_secondary") - .setValueType(Enum.STRING) - .build()) - .addFeatures( - FeatureSpec.newBuilder().setName("feature_1").setValueType(Enum.STRING).build()) - .addFeatures( - FeatureSpec.newBuilder().setName("feature_2").setValueType(Enum.INT64).build()) + FeatureSet fs2 = + FeatureSet.newBuilder() + .setSpec( + FeatureSetSpec.newBuilder() + .setName("feature_set") + .setVersion(2) + .setProject("myproject") + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_primary") + .setValueType(Enum.INT32) + .build()) + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_secondary") + .setValueType(Enum.STRING) + .build()) + .addFeatures( + FeatureSpec.newBuilder() + .setName("feature_1") + .setValueType(Enum.STRING) + .build()) + .addFeatures( + FeatureSpec.newBuilder() + .setName("feature_2") + .setValueType(Enum.INT64) + .build())) .build(); - Map featureSetSpecs = new HashMap<>(); - featureSetSpecs.put("feature_set:1", fs1); - featureSetSpecs.put("feature_set:2", fs2); + Map featureSets = new HashMap<>(); + featureSets.put("myproject/feature_set:1", fs1); + featureSets.put("myproject/feature_set:2", fs2); List input = new ArrayList<>(); List expected = new ArrayList<>(); - for (FeatureSetSpec featureSetSpec : featureSetSpecs.values()) { - FeatureRow randomRow = TestUtil.createRandomFeatureRow(featureSetSpec); + for (FeatureSet featureSet : featureSets.values()) { + FeatureRow randomRow = TestUtil.createRandomFeatureRow(featureSet); input.add(randomRow); expected.add(randomRow); } @@ -110,7 +131,7 @@ public void shouldWriteSuccessAndFailureTagsCorrectly() { ValidateFeatureRows.newBuilder() .setFailureTag(FAILURE_TAG) .setSuccessTag(SUCCESS_TAG) - .setFeatureSetSpecs(featureSetSpecs) + .setFeatureSets(featureSets) .build()); PAssert.that(output.get(SUCCESS_TAG)).containsInAnyOrder(expected); diff --git a/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java b/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java index 1579cc7a6b..4e2297e405 100644 --- a/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java +++ b/ingestion/src/test/java/feast/ingestion/util/StoreUtilTest.java @@ -37,6 +37,7 @@ import com.google.cloud.bigquery.Schema; import com.google.cloud.bigquery.StandardSQLTypeName; import feast.core.FeatureSetProto.EntitySpec; +import feast.core.FeatureSetProto.FeatureSet; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.core.FeatureSetProto.FeatureSpec; import feast.ingestion.utils.StoreUtil; @@ -49,16 +50,20 @@ public class StoreUtilTest { @Test public void setupBigQuery_shouldCreateTable_givenValidFeatureSetSpec() { - FeatureSetSpec featureSetSpec = - FeatureSetSpec.newBuilder() - .setName("feature_set_1") - .setVersion(1) - .addEntities(EntitySpec.newBuilder().setName("entity_1").setValueType(INT32)) - .addFeatures(FeatureSpec.newBuilder().setName("feature_1").setValueType(INT32)) - .addFeatures(FeatureSpec.newBuilder().setName("feature_2").setValueType(STRING_LIST)) + FeatureSet featureSet = + FeatureSet.newBuilder() + .setSpec( + FeatureSetSpec.newBuilder() + .setName("feature_set_1") + .setVersion(1) + .setProject("feast-project") + .addEntities(EntitySpec.newBuilder().setName("entity_1").setValueType(INT32)) + .addFeatures(FeatureSpec.newBuilder().setName("feature_1").setValueType(INT32)) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_2").setValueType(STRING_LIST))) .build(); BigQuery mockedBigquery = Mockito.mock(BigQuery.class); - StoreUtil.setupBigQuery(featureSetSpec, "project-1", "dataset_1", mockedBigquery); + StoreUtil.setupBigQuery(featureSet, "project-1", "dataset_1", mockedBigquery); } @Test diff --git a/ingestion/src/test/java/feast/test/TestUtil.java b/ingestion/src/test/java/feast/test/TestUtil.java index d66ef4a97d..5c16d7e9e3 100644 --- a/ingestion/src/test/java/feast/test/TestUtil.java +++ b/ingestion/src/test/java/feast/test/TestUtil.java @@ -16,9 +16,11 @@ */ package feast.test; +import static feast.ingestion.utils.SpecUtil.getFeatureSetReference; + import com.google.protobuf.ByteString; import com.google.protobuf.util.Timestamps; -import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSet; import feast.ingestion.transform.WriteToStore; import feast.storage.RedisProto.RedisKey; import feast.types.FeatureRowProto.FeatureRow; @@ -166,31 +168,33 @@ public static void publishFeatureRowsToKafka( /** * Create a Feature Row with random value according to the FeatureSetSpec * - *

See {@link #createRandomFeatureRow(FeatureSetSpec, int)} + *

See {@link #createRandomFeatureRow(FeatureSet, int)} */ - public static FeatureRow createRandomFeatureRow(FeatureSetSpec spec) { + public static FeatureRow createRandomFeatureRow(FeatureSet featureSet) { ThreadLocalRandom random = ThreadLocalRandom.current(); int randomStringSizeMaxSize = 12; - return createRandomFeatureRow(spec, random.nextInt(0, randomStringSizeMaxSize) + 4); + return createRandomFeatureRow(featureSet, random.nextInt(0, randomStringSizeMaxSize) + 4); } /** - * Create a Feature Row with random value according to the FeatureSetSpec. + * Create a Feature Row with random value according to the FeatureSet. * *

The Feature Row created contains fields according to the entities and features defined in - * FeatureSetSpec, matching the value type of the field, with randomized value for testing. + * FeatureSet, matching the value type of the field, with randomized value for testing. * - * @param spec {@link FeatureSetSpec} + * @param featureSet {@link FeatureSet} * @param randomStringSize number of characters for the generated random string * @return {@link FeatureRow} */ - public static FeatureRow createRandomFeatureRow(FeatureSetSpec spec, int randomStringSize) { + public static FeatureRow createRandomFeatureRow(FeatureSet featureSet, int randomStringSize) { Builder builder = FeatureRow.newBuilder() - .setFeatureSet(spec.getName() + ":" + spec.getVersion()) + .setFeatureSet(getFeatureSetReference(featureSet)) .setEventTimestamp(Timestamps.fromMillis(System.currentTimeMillis())); - spec.getEntitiesList() + featureSet + .getSpec() + .getEntitiesList() .forEach( field -> { builder.addFields( @@ -200,7 +204,9 @@ public static FeatureRow createRandomFeatureRow(FeatureSetSpec spec, int randomS .build()); }); - spec.getFeaturesList() + featureSet + .getSpec() + .getFeaturesList() .forEach( field -> { builder.addFields( @@ -284,19 +290,21 @@ public static Value createRandomValue(ValueType.Enum type, int randomStringSize) } /** - * Create {@link RedisKey} from {@link FeatureSetSpec} and {@link FeatureRow}. + * Create {@link RedisKey} from {@link FeatureSet} and {@link FeatureRow}. * *

The entities in the created {@link RedisKey} will contain the value with matching field name * in the {@link FeatureRow} * - * @param spec {@link FeatureSetSpec} - * @param row {@link FeatureSetSpec} + * @param featureSet {@link FeatureSet} + * @param row {@link FeatureSet} * @return {@link RedisKey} */ - public static RedisKey createRedisKey(FeatureSetSpec spec, FeatureRow row) { + public static RedisKey createRedisKey(FeatureSet featureSet, FeatureRow row) { RedisKey.Builder builder = - RedisKey.newBuilder().setFeatureSet(spec.getName() + ":" + spec.getVersion()); - spec.getEntitiesList() + RedisKey.newBuilder().setFeatureSet(getFeatureSetReference(featureSet)); + featureSet + .getSpec() + .getEntitiesList() .forEach( entityField -> row.getFieldsList().stream() diff --git a/ingestion/src/test/resources/import-job-specs/invalid-empty.yaml b/ingestion/src/test/resources/import-job-specs/invalid-empty.yaml deleted file mode 100644 index 7a1bbc9607..0000000000 --- a/ingestion/src/test/resources/import-job-specs/invalid-empty.yaml +++ /dev/null @@ -1,2 +0,0 @@ - - \ No newline at end of file diff --git a/ingestion/src/test/resources/import-job-specs/invalid-source-spec-1.yaml b/ingestion/src/test/resources/import-job-specs/invalid-source-spec-1.yaml deleted file mode 100644 index 26c1ff28e5..0000000000 --- a/ingestion/src/test/resources/import-job-specs/invalid-source-spec-1.yaml +++ /dev/null @@ -1,34 +0,0 @@ -sourceSpec: - type: NON_EXISTENT_TYPE - options: - bootstrapServers: localhost:9092 - topics: topic1 - -entitySpec: - name: entity1 - description: description for entity1 - -featureSpecs: -- id: entity1.feature1 - name: feature1 - valueType: INT64 - entity: entity1 -- id: entity1.feature2 - name: feature2 - valueType: DOUBLE - entity: entity1 -- id: entity1.feature3 - name: feature3 - valueType: TIMESTAMP - entity: entity1 -- id: entity1.feature4 - name: feature4 - valueType: DOUBLE - entity: entity1 - -sinkStorageSpec: - id: storage1 - type: BIGQUERY - options: - datasetId: dataset1 - projectId: project1 diff --git a/ingestion/src/test/resources/import-job-specs/valid-1.yaml b/ingestion/src/test/resources/import-job-specs/valid-1.yaml deleted file mode 100644 index 80eaa0c5e8..0000000000 --- a/ingestion/src/test/resources/import-job-specs/valid-1.yaml +++ /dev/null @@ -1,34 +0,0 @@ -sourceSpec: - type: KAFKA - options: - bootstrapServers: localhost:9092 - topics: topic1 - -entitySpec: - name: entity1 - description: description for entity1 - -featureSpecs: -- id: entity1.feature1 - name: feature1 - valueType: INT64 - entity: entity1 -- id: entity1.feature2 - name: feature2 - valueType: DOUBLE - entity: entity1 -- id: entity1.feature3 - name: feature3 - valueType: TIMESTAMP - entity: entity1 -- id: entity1.feature4 - name: feature4 - valueType: DOUBLE - entity: entity1 - -sinkStorageSpec: - id: storage1 - type: BIGQUERY - options: - datasetId: dataset1 - projectId: project1 diff --git a/ingestion/src/test/resources/import-job-specs/valid-2.yaml b/ingestion/src/test/resources/import-job-specs/valid-2.yaml deleted file mode 100644 index cc8762d424..0000000000 --- a/ingestion/src/test/resources/import-job-specs/valid-2.yaml +++ /dev/null @@ -1,33 +0,0 @@ -sourceSpec: - type: KAFKA - options: - bootstrapServers: localhost:9092 - topics: topic1 - -entitySpec: - name: entity1 - description: description for entity1 - -featureSpecs: -- id: entity1.feature1 - name: feature1 - valueType: INT64 - entity: entity1 -- id: entity1.feature2 - name: feature2 - valueType: DOUBLE - entity: entity1 -- id: entity1.feature3 - name: feature3 - valueType: TIMESTAMP - entity: entity1 -- id: entity1.feature4 - name: feature4 - valueType: DOUBLE - entity: entity1 - -sinkStorageSpec: - id: storage1 - type: REDIS - options: - host: localhost diff --git a/ingestion/src/test/resources/import-specs/csv_to_store1.yaml b/ingestion/src/test/resources/import-specs/csv_to_store1.yaml deleted file mode 100644 index 57c8d4be5f..0000000000 --- a/ingestion/src/test/resources/import-specs/csv_to_store1.yaml +++ /dev/null @@ -1,16 +0,0 @@ ---- -type: file -options: - format: csv - path: # to be overwritten in tests -entities: - - testEntity -schema: - entityIdColumn: id - timestampValue: 2018-09-25T00:00:00.000Z - fields: - - name: timestamp - - name: id - - featureId: testEntity.testInt32 - - featureId: testEntity.testString - diff --git a/ingestion/src/test/resources/specs/importJobSpecs.yaml b/ingestion/src/test/resources/specs/importJobSpecs.yaml deleted file mode 100644 index 6f52cb30aa..0000000000 --- a/ingestion/src/test/resources/specs/importJobSpecs.yaml +++ /dev/null @@ -1,44 +0,0 @@ -sourceSpec: {} -sinkStorageSpec: - id: TEST_SERVING - type: serving.mock - options: {} -errorsStorageSpec: - id: errors - type: errors.mock - options: {} -entitySpec: - name: testEntity - description: This is a test entity - tags: [] -featureSpecs: - - id: testEntity.testInt64 - entity: testEntity - name: testInt64 - owner: feast@example.com - description: This is test feature of long - uri: https://example.com/ - valueType: INT64 - tags: [] - options: - nonsense: "I should be safely ignored" - - id: testEntity.testInt32 - entity: testEntity - name: testInt32 - owner: feast@example.com - description: This is test feature of type integer - uri: https://example.com/ - valueType: INT32 - tags: [] - options: - nonsense: "I should be safely ignored" - - id: testEntity.testString - entity: testEntity - name: testString - owner: feast@example.com - description: This is test feature of type string - uri: https://example.com/ - valueType: STRING - tags: [] - options: - nonsense: "I should be safely ignored" diff --git a/protos/feast/core/CoreService.proto b/protos/feast/core/CoreService.proto index 9a9eaa64fd..35b96e1789 100644 --- a/protos/feast/core/CoreService.proto +++ b/protos/feast/core/CoreService.proto @@ -58,11 +58,28 @@ service CoreService { // // If the changes are valid, core will return the given store configuration in response, and // start or update the necessary feature population jobs for the updated store. - rpc UpdateStore(UpdateStoreRequest) returns (UpdateStoreResponse); + rpc UpdateStore (UpdateStoreRequest) returns (UpdateStoreResponse); + + // Creates a project. Projects serve as namespaces within which resources like features will be + // created. Both feature set names as well as field names must be unique within a project. Project + // names themselves must be globally unique. + rpc CreateProject (CreateProjectRequest) returns (CreateProjectResponse); + + // Archives a project. Archived projects will continue to exist and function, but won't be visible + // through the Core API. Any existing ingestion or serving requests will continue to function, + // but will result in warning messages being logged. It is not possible to unarchive a project + // through the Core API + rpc ArchiveProject (ArchiveProjectRequest) returns (ArchiveProjectResponse); + + // Lists all projects active projects. + rpc ListProjects (ListProjectsRequest) returns (ListProjectsResponse); } // Request for a single feature set message GetFeatureSetRequest { + // Name of project the feature set belongs to (required) + string project = 3; + // Name of feature set (required). string name = 1; @@ -77,21 +94,35 @@ message GetFeatureSetResponse { // Retrieves details for all versions of a specific feature set message ListFeatureSetsRequest { + Filter filter = 1; + message Filter { - // Name of the desired feature set. Valid regex strings are allowed. + // Name of project that the feature sets belongs to. This can be one of + // - [project_name] + // - * + // If an asterisk is provided, filtering on projects will be disabled. All projects will + // be matched. It is NOT possible to provide an asterisk with a string in order to do + // pattern matching. + string project = 3; + + // Name of the desired feature set. Asterisks can be used as wildcards in the name. + // Matching on names is only permitted if a specific project is defined. It is disallowed + // If the project name is set to "*" // e.g. - // - .* can be used to match all feature sets - // - my-project-.* can be used to match all features prefixed by "my-project" + // - * can be used to match all feature sets + // - my-feature-set* can be used to match all features prefixed by "my-feature-set" + // - my-feature-set-6 can be used to select a single feature set string feature_set_name = 1; - // Version of the desired feature set. Either a number or valid expression can be provided. - // e.g. - // - 1 will match version 1 exactly - // - >=1 will match all versions greater or equal to 1 - // - <10 will match all versions less than 10 + + + // Versions of the given feature sets that will be returned. + // Valid options for version: + // "latest": only the latest version is returned. + // "*": Subscribe to all versions + // [version number]: pin to a specific version. Project and feature set name must be + // explicitly defined if a specific version is pinned. string feature_set_version = 2; } - - Filter filter = 1; } message ListFeatureSetsResponse { @@ -133,7 +164,8 @@ message ApplyFeatureSetResponse { Status status = 2; } -message GetFeastCoreVersionRequest {} +message GetFeastCoreVersionRequest { +} message GetFeastCoreVersionResponse { string version = 1; @@ -153,4 +185,34 @@ message UpdateStoreResponse { } feast.core.Store store = 1; Status status = 2; +} + +// Request to create a project +message CreateProjectRequest { + // Name of project (required) + string name = 1; +} + +// Response for creation of a project +message CreateProjectResponse { +} + +// Request for the archival of a project +message ArchiveProjectRequest { + // Name of project to be archived + string name = 1; +} + +// Response for archival of a project +message ArchiveProjectResponse { +} + +// Request for listing of projects +message ListProjectsRequest { +} + +// Response for listing of projects +message ListProjectsResponse { + // List of project names (archived projects are filtered out) + repeated string projects = 1; } \ No newline at end of file diff --git a/protos/feast/core/FeatureSet.proto b/protos/feast/core/FeatureSet.proto index a5adf139bf..910cc375f7 100644 --- a/protos/feast/core/FeatureSet.proto +++ b/protos/feast/core/FeatureSet.proto @@ -15,9 +15,7 @@ // syntax = "proto3"; - package feast.core; - option java_package = "feast.core"; option java_outer_classname = "FeatureSetProto"; option go_package = "github.com/gojek/feast/sdk/go/protos/feast/core"; @@ -30,16 +28,18 @@ import "google/protobuf/timestamp.proto"; message FeatureSet { // User-specified specifications of this feature set. FeatureSetSpec spec = 1; - // System-populated metadata for this feature set. FeatureSetMeta meta = 2; } message FeatureSetSpec { - // Name of the featureSet. Must be unique. + // Name of project that this feature set belongs to. + string project = 7; + + // Name of the feature set. Must be unique. string name = 1; - // FeatureSet version. + // Feature set version. int32 version = 2; // List of entities contained within this featureSet. @@ -51,8 +51,8 @@ message FeatureSetSpec { // List of features contained within this featureSet. repeated FeatureSpec features = 4; - // Features in this feature set will only be retrieved if they are found - // after [time - max_age]. Missing or older feature values will be returned + // Features in this feature set will only be retrieved if they are found + // after [time - max_age]. Missing or older feature values will be returned // as nulls and indicated to end user google.protobuf.Duration max_age = 5; @@ -77,7 +77,6 @@ message FeatureSpec { feast.types.ValueType.Enum value_type = 2; } - message FeatureSetMeta { // Created timestamp of this specific feature set. google.protobuf.Timestamp created_timestamp = 1; @@ -95,4 +94,4 @@ enum FeatureSetStatus { STATUS_INVALID = 0; STATUS_PENDING = 1; STATUS_READY = 2; -} \ No newline at end of file +} diff --git a/protos/feast/core/Store.proto b/protos/feast/core/Store.proto index e1b8c581a3..bbb4ed8000 100644 --- a/protos/feast/core/Store.proto +++ b/protos/feast/core/Store.proto @@ -123,16 +123,30 @@ message Store { } message Subscription { - // Name of featureSet to subscribe to. This field supports any valid basic POSIX regex, - // e.g. customer_.* or .* - // https://www.regular-expressions.info/posix.html + // Name of project that the feature sets belongs to. This can be one of + // - [project_name] + // - * + // If an asterisk is provided, filtering on projects will be disabled. All projects will + // be matched. It is NOT possible to provide an asterisk with a string in order to do + // pattern matching. + string project = 3; + + + // Name of the desired feature set. Asterisks can be used as wildcards in the name. + // Matching on names is only permitted if a specific project is defined. It is disallowed + // If the project name is set to "*" + // e.g. + // - * can be used to match all feature sets + // - my-feature-set* can be used to match all features prefixed by "my-feature-set" + // - my-feature-set-6 can be used to select a single feature set string name = 1; - // Versions of the given featureSet that will be ingested into this store. + // Versions of the given feature sets that will be returned. // Valid options for version: - // latest: only subscribe to latest version of feature set - // [version number]: pin to a specific version - // >[version number]: subscribe to all versions larger than or equal to [version number] + // "latest": only the latest version is returned. + // "*": Subscribe to all versions + // [version number]: pin to a specific version. Project and feature set name must be + // explicitly defined if a specific version is pinned. string version = 2; } diff --git a/protos/feast/serving/ServingService.proto b/protos/feast/serving/ServingService.proto index fe896253a9..5145670ec9 100644 --- a/protos/feast/serving/ServingService.proto +++ b/protos/feast/serving/ServingService.proto @@ -62,15 +62,15 @@ message GetFeastServingInfoResponse { string job_staging_location = 10; } -message FeatureSetRequest { - // Feature set name - string name = 1; +message FeatureReference { + // Project name + string project = 1; - // Feature set version - int32 version = 2; + // Feature name + string name = 2; - // Features that should be retrieved from this feature set - repeated string feature_names = 3; + // Feature version + int32 version = 3; // The features will be retrieved if: // entity_timestamp - max_age <= event_timestamp <= entity_timestamp @@ -81,8 +81,8 @@ message FeatureSetRequest { } message GetOnlineFeaturesRequest { - // List of feature sets and their features that are being retrieved - repeated FeatureSetRequest feature_sets = 1; + // List of features that are being retrieved + repeated FeatureReference features = 4; // List of entity rows, containing entity id and timestamp data. // Used during retrieval of feature rows and for joining feature @@ -104,8 +104,8 @@ message GetOnlineFeaturesRequest { } message GetBatchFeaturesRequest { - // List of feature sets and their features that are being retrieved. - repeated FeatureSetRequest feature_sets = 1; + // List of features that are being retrieved + repeated FeatureReference features = 3; // Source of the entity dataset containing the timestamps and entity keys to retrieve // features for. diff --git a/sdk/go/README.md b/sdk/go/README.md new file mode 100644 index 0000000000..6084f90993 --- /dev/null +++ b/sdk/go/README.md @@ -0,0 +1,49 @@ +# Feast Golang SDK + +The Feast golang SDK currently only supports retrieval from online stores. + +## Quickstart +```{go} +import ( + "context" + feast "github.com/gojek/feast/sdk/go" +) + +func main() { + cli, err := feast.NewGrpcClient("localhost", 6565) + if err != nil { + panic(err) + } + + ctx := context.Background() + req := feast.OnlineFeaturesRequest{ + Features: []string{"my_project_1/feature1:1", "my_project_2/feature1:1", "my_project_4/feature3", "feature2:2", "feature2"}, + Entities: []feast.Row{ + {"entity1": feast.Int64Val(1), "entity2": feast.StrVal("bob")}, + {"entity1": feast.Int64Val(1), "entity2": feast.StrVal("annie")}, + {"entity1": feast.Int64Val(1), "entity2": feast.StrVal("jane")}, + }, + Project: "my_project_3", + } + + resp, err := cli.GetOnlineFeatures(ctx, &req) + if err != nil { + panic(err) + } + + // returns a list of rows (map[string]featureValue) + out := resp.Rows() +} + +``` + +If all features retrieved are of a single type, Feast provides convenience functions to retrieve your features as a vector of feature values: +```{go} +arr, err := resp.Int64Arrays( + []string{"my_project_1/feature1:1", + "my_project_2/feature1:1", + "my_project_4/feature3", + "feature2:2", + "feature2"}, // order of features + []int64{1,2,3,4,5}) // fillNa values +``` diff --git a/sdk/go/go.mod b/sdk/go/go.mod index 7c029da109..0def759a4f 100644 --- a/sdk/go/go.mod +++ b/sdk/go/go.mod @@ -4,7 +4,7 @@ go 1.13 require ( github.com/golang/protobuf v1.3.2 - github.com/google/go-cmp v0.3.0 + github.com/google/go-cmp v0.3.1 github.com/opentracing/opentracing-go v1.1.0 github.com/stretchr/testify v1.4.0 // indirect go.opencensus.io v0.22.1 diff --git a/sdk/go/go.sum b/sdk/go/go.sum index 56df48673e..04cf3d8d7f 100644 --- a/sdk/go/go.sum +++ b/sdk/go/go.sum @@ -16,6 +16,8 @@ github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= diff --git a/sdk/go/protos/feast/core/CoreService.pb.go b/sdk/go/protos/feast/core/CoreService.pb.go index e0af92433b..45ad9ed79a 100644 --- a/sdk/go/protos/feast/core/CoreService.pb.go +++ b/sdk/go/protos/feast/core/CoreService.pb.go @@ -84,6 +84,8 @@ func (UpdateStoreResponse_Status) EnumDescriptor() ([]byte, []int) { // Request for a single feature set type GetFeatureSetRequest struct { + // Name of project the feature set belongs to (required) + Project string `protobuf:"bytes,3,opt,name=project,proto3" json:"project,omitempty"` // Name of feature set (required). Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Version of feature set (optional). If omitted then latest feature set will be returned. @@ -118,6 +120,13 @@ func (m *GetFeatureSetRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetFeatureSetRequest proto.InternalMessageInfo +func (m *GetFeatureSetRequest) GetProject() string { + if m != nil { + return m.Project + } + return "" +} + func (m *GetFeatureSetRequest) GetName() string { if m != nil { return m.Name @@ -134,10 +143,10 @@ func (m *GetFeatureSetRequest) GetVersion() int32 { // Response containing a single feature set type GetFeatureSetResponse struct { - FeatureSet *FeatureSetSpec `protobuf:"bytes,1,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + FeatureSet *FeatureSet `protobuf:"bytes,1,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *GetFeatureSetResponse) Reset() { *m = GetFeatureSetResponse{} } @@ -165,7 +174,7 @@ func (m *GetFeatureSetResponse) XXX_DiscardUnknown() { var xxx_messageInfo_GetFeatureSetResponse proto.InternalMessageInfo -func (m *GetFeatureSetResponse) GetFeatureSet() *FeatureSetSpec { +func (m *GetFeatureSetResponse) GetFeatureSet() *FeatureSet { if m != nil { return m.FeatureSet } @@ -213,16 +222,27 @@ func (m *ListFeatureSetsRequest) GetFilter() *ListFeatureSetsRequest_Filter { } type ListFeatureSetsRequest_Filter struct { - // Name of the desired feature set. Valid regex strings are allowed. + // Name of project that the feature sets belongs to. This can be one of + // - [project_name] + // - * + // If an asterisk is provided, filtering on projects will be disabled. All projects will + // be matched. It is NOT possible to provide an asterisk with a string in order to do + // pattern matching. + Project string `protobuf:"bytes,3,opt,name=project,proto3" json:"project,omitempty"` + // Name of the desired feature set. Asterisks can be used as wildcards in the name. + // Matching on names is only permitted if a specific project is defined. It is disallowed + // If the project name is set to "*" // e.g. - // - .* can be used to match all feature sets - // - my-project-.* can be used to match all features prefixed by "my-project" + // - * can be used to match all feature sets + // - my-feature-set* can be used to match all features prefixed by "my-feature-set" + // - my-feature-set-6 can be used to select a single feature set FeatureSetName string `protobuf:"bytes,1,opt,name=feature_set_name,json=featureSetName,proto3" json:"feature_set_name,omitempty"` - // Version of the desired feature set. Either a number or valid expression can be provided. - // e.g. - // - 1 will match version 1 exactly - // - >=1 will match all versions greater or equal to 1 - // - <10 will match all versions less than 10 + // Versions of the given feature sets that will be returned. + // Valid options for version: + // "latest": only the latest version is returned. + // "*": Subscribe to all versions + // [version number]: pin to a specific version. Project and feature set name must be + // explicitly defined if a specific version is pinned. FeatureSetVersion string `protobuf:"bytes,2,opt,name=feature_set_version,json=featureSetVersion,proto3" json:"feature_set_version,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -254,6 +274,13 @@ func (m *ListFeatureSetsRequest_Filter) XXX_DiscardUnknown() { var xxx_messageInfo_ListFeatureSetsRequest_Filter proto.InternalMessageInfo +func (m *ListFeatureSetsRequest_Filter) GetProject() string { + if m != nil { + return m.Project + } + return "" +} + func (m *ListFeatureSetsRequest_Filter) GetFeatureSetName() string { if m != nil { return m.FeatureSetName @@ -269,10 +296,10 @@ func (m *ListFeatureSetsRequest_Filter) GetFeatureSetVersion() string { } type ListFeatureSetsResponse struct { - FeatureSets []*FeatureSetSpec `protobuf:"bytes,1,rep,name=feature_sets,json=featureSets,proto3" json:"feature_sets,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + FeatureSets []*FeatureSet `protobuf:"bytes,1,rep,name=feature_sets,json=featureSets,proto3" json:"feature_sets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ListFeatureSetsResponse) Reset() { *m = ListFeatureSetsResponse{} } @@ -300,7 +327,7 @@ func (m *ListFeatureSetsResponse) XXX_DiscardUnknown() { var xxx_messageInfo_ListFeatureSetsResponse proto.InternalMessageInfo -func (m *ListFeatureSetsResponse) GetFeatureSets() []*FeatureSetSpec { +func (m *ListFeatureSetsResponse) GetFeatureSets() []*FeatureSet { if m != nil { return m.FeatureSets } @@ -427,10 +454,10 @@ func (m *ListStoresResponse) GetStore() []*Store { type ApplyFeatureSetRequest struct { // Feature set version and source will be ignored - FeatureSet *FeatureSetSpec `protobuf:"bytes,1,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + FeatureSet *FeatureSet `protobuf:"bytes,1,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ApplyFeatureSetRequest) Reset() { *m = ApplyFeatureSetRequest{} } @@ -458,7 +485,7 @@ func (m *ApplyFeatureSetRequest) XXX_DiscardUnknown() { var xxx_messageInfo_ApplyFeatureSetRequest proto.InternalMessageInfo -func (m *ApplyFeatureSetRequest) GetFeatureSet() *FeatureSetSpec { +func (m *ApplyFeatureSetRequest) GetFeatureSet() *FeatureSet { if m != nil { return m.FeatureSet } @@ -467,7 +494,7 @@ func (m *ApplyFeatureSetRequest) GetFeatureSet() *FeatureSetSpec { type ApplyFeatureSetResponse struct { // Feature set response has been enriched with version and source information - FeatureSet *FeatureSetSpec `protobuf:"bytes,1,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` + FeatureSet *FeatureSet `protobuf:"bytes,1,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` Status ApplyFeatureSetResponse_Status `protobuf:"varint,2,opt,name=status,proto3,enum=feast.core.ApplyFeatureSetResponse_Status" json:"status,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -499,7 +526,7 @@ func (m *ApplyFeatureSetResponse) XXX_DiscardUnknown() { var xxx_messageInfo_ApplyFeatureSetResponse proto.InternalMessageInfo -func (m *ApplyFeatureSetResponse) GetFeatureSet() *FeatureSetSpec { +func (m *ApplyFeatureSetResponse) GetFeatureSet() *FeatureSet { if m != nil { return m.FeatureSet } @@ -669,6 +696,225 @@ func (m *UpdateStoreResponse) GetStatus() UpdateStoreResponse_Status { return UpdateStoreResponse_NO_CHANGE } +// Request to create a project +type CreateProjectRequest struct { + // Name of project (required) + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateProjectRequest) Reset() { *m = CreateProjectRequest{} } +func (m *CreateProjectRequest) String() string { return proto.CompactTextString(m) } +func (*CreateProjectRequest) ProtoMessage() {} +func (*CreateProjectRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{12} +} + +func (m *CreateProjectRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateProjectRequest.Unmarshal(m, b) +} +func (m *CreateProjectRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateProjectRequest.Marshal(b, m, deterministic) +} +func (m *CreateProjectRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateProjectRequest.Merge(m, src) +} +func (m *CreateProjectRequest) XXX_Size() int { + return xxx_messageInfo_CreateProjectRequest.Size(m) +} +func (m *CreateProjectRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateProjectRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateProjectRequest proto.InternalMessageInfo + +func (m *CreateProjectRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Response for creation of a project +type CreateProjectResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateProjectResponse) Reset() { *m = CreateProjectResponse{} } +func (m *CreateProjectResponse) String() string { return proto.CompactTextString(m) } +func (*CreateProjectResponse) ProtoMessage() {} +func (*CreateProjectResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{13} +} + +func (m *CreateProjectResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateProjectResponse.Unmarshal(m, b) +} +func (m *CreateProjectResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateProjectResponse.Marshal(b, m, deterministic) +} +func (m *CreateProjectResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateProjectResponse.Merge(m, src) +} +func (m *CreateProjectResponse) XXX_Size() int { + return xxx_messageInfo_CreateProjectResponse.Size(m) +} +func (m *CreateProjectResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CreateProjectResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateProjectResponse proto.InternalMessageInfo + +// Request for the archival of a project +type ArchiveProjectRequest struct { + // Name of project to be archived + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ArchiveProjectRequest) Reset() { *m = ArchiveProjectRequest{} } +func (m *ArchiveProjectRequest) String() string { return proto.CompactTextString(m) } +func (*ArchiveProjectRequest) ProtoMessage() {} +func (*ArchiveProjectRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{14} +} + +func (m *ArchiveProjectRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ArchiveProjectRequest.Unmarshal(m, b) +} +func (m *ArchiveProjectRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ArchiveProjectRequest.Marshal(b, m, deterministic) +} +func (m *ArchiveProjectRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArchiveProjectRequest.Merge(m, src) +} +func (m *ArchiveProjectRequest) XXX_Size() int { + return xxx_messageInfo_ArchiveProjectRequest.Size(m) +} +func (m *ArchiveProjectRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ArchiveProjectRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ArchiveProjectRequest proto.InternalMessageInfo + +func (m *ArchiveProjectRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Response for archival of a project +type ArchiveProjectResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ArchiveProjectResponse) Reset() { *m = ArchiveProjectResponse{} } +func (m *ArchiveProjectResponse) String() string { return proto.CompactTextString(m) } +func (*ArchiveProjectResponse) ProtoMessage() {} +func (*ArchiveProjectResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{15} +} + +func (m *ArchiveProjectResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ArchiveProjectResponse.Unmarshal(m, b) +} +func (m *ArchiveProjectResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ArchiveProjectResponse.Marshal(b, m, deterministic) +} +func (m *ArchiveProjectResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArchiveProjectResponse.Merge(m, src) +} +func (m *ArchiveProjectResponse) XXX_Size() int { + return xxx_messageInfo_ArchiveProjectResponse.Size(m) +} +func (m *ArchiveProjectResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ArchiveProjectResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ArchiveProjectResponse proto.InternalMessageInfo + +// Request for listing of projects +type ListProjectsRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProjectsRequest) Reset() { *m = ListProjectsRequest{} } +func (m *ListProjectsRequest) String() string { return proto.CompactTextString(m) } +func (*ListProjectsRequest) ProtoMessage() {} +func (*ListProjectsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{16} +} + +func (m *ListProjectsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProjectsRequest.Unmarshal(m, b) +} +func (m *ListProjectsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProjectsRequest.Marshal(b, m, deterministic) +} +func (m *ListProjectsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProjectsRequest.Merge(m, src) +} +func (m *ListProjectsRequest) XXX_Size() int { + return xxx_messageInfo_ListProjectsRequest.Size(m) +} +func (m *ListProjectsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProjectsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProjectsRequest proto.InternalMessageInfo + +// Response for listing of projects +type ListProjectsResponse struct { + // List of project names (archived projects are filtered out) + Projects []string `protobuf:"bytes,1,rep,name=projects,proto3" json:"projects,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProjectsResponse) Reset() { *m = ListProjectsResponse{} } +func (m *ListProjectsResponse) String() string { return proto.CompactTextString(m) } +func (*ListProjectsResponse) ProtoMessage() {} +func (*ListProjectsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_d9be266444105411, []int{17} +} + +func (m *ListProjectsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProjectsResponse.Unmarshal(m, b) +} +func (m *ListProjectsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProjectsResponse.Marshal(b, m, deterministic) +} +func (m *ListProjectsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProjectsResponse.Merge(m, src) +} +func (m *ListProjectsResponse) XXX_Size() int { + return xxx_messageInfo_ListProjectsResponse.Size(m) +} +func (m *ListProjectsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProjectsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProjectsResponse proto.InternalMessageInfo + +func (m *ListProjectsResponse) GetProjects() []string { + if m != nil { + return m.Projects + } + return nil +} + func init() { proto.RegisterEnum("feast.core.ApplyFeatureSetResponse_Status", ApplyFeatureSetResponse_Status_name, ApplyFeatureSetResponse_Status_value) proto.RegisterEnum("feast.core.UpdateStoreResponse_Status", UpdateStoreResponse_Status_name, UpdateStoreResponse_Status_value) @@ -686,52 +932,66 @@ func init() { proto.RegisterType((*GetFeastCoreVersionResponse)(nil), "feast.core.GetFeastCoreVersionResponse") proto.RegisterType((*UpdateStoreRequest)(nil), "feast.core.UpdateStoreRequest") proto.RegisterType((*UpdateStoreResponse)(nil), "feast.core.UpdateStoreResponse") + proto.RegisterType((*CreateProjectRequest)(nil), "feast.core.CreateProjectRequest") + proto.RegisterType((*CreateProjectResponse)(nil), "feast.core.CreateProjectResponse") + proto.RegisterType((*ArchiveProjectRequest)(nil), "feast.core.ArchiveProjectRequest") + proto.RegisterType((*ArchiveProjectResponse)(nil), "feast.core.ArchiveProjectResponse") + proto.RegisterType((*ListProjectsRequest)(nil), "feast.core.ListProjectsRequest") + proto.RegisterType((*ListProjectsResponse)(nil), "feast.core.ListProjectsResponse") } func init() { proto.RegisterFile("feast/core/CoreService.proto", fileDescriptor_d9be266444105411) } var fileDescriptor_d9be266444105411 = []byte{ - // 636 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xdd, 0x72, 0xd2, 0x40, - 0x14, 0x36, 0xb5, 0xa5, 0xc3, 0x89, 0xad, 0xb0, 0x28, 0x65, 0x52, 0xac, 0x18, 0x3b, 0x16, 0xbd, - 0x48, 0x66, 0xf0, 0xc2, 0x0b, 0xc5, 0x19, 0xfe, 0x5a, 0x67, 0x74, 0xa0, 0xb3, 0x80, 0xe3, 0xf4, - 0x86, 0x01, 0xba, 0x20, 0xb6, 0x65, 0x63, 0x76, 0xe9, 0x8c, 0x6f, 0xe3, 0x85, 0xef, 0xe1, 0x03, - 0xf8, 0x52, 0x4e, 0xb2, 0x5b, 0xb2, 0x09, 0x21, 0x5c, 0xe8, 0x5d, 0xb2, 0xe7, 0x3b, 0xdf, 0x9e, - 0xfd, 0xce, 0x1f, 0x14, 0x27, 0x64, 0xc8, 0xb8, 0x3d, 0xa6, 0x2e, 0xb1, 0x1b, 0xd4, 0x25, 0x5d, - 0xe2, 0xde, 0xce, 0xc6, 0xc4, 0x72, 0x5c, 0xca, 0x29, 0x02, 0xdf, 0x6a, 0x79, 0x56, 0xe3, 0x50, - 0x41, 0x9e, 0x92, 0x21, 0x5f, 0x78, 0x60, 0x2e, 0x80, 0x46, 0x5e, 0x31, 0x76, 0x39, 0x75, 0x25, - 0x81, 0xd9, 0x84, 0x47, 0x67, 0x84, 0x07, 0x70, 0x4c, 0xbe, 0x2f, 0x08, 0xe3, 0x08, 0xc1, 0xf6, - 0x7c, 0x78, 0x43, 0x0a, 0x5a, 0x49, 0x2b, 0xa7, 0xb1, 0xff, 0x8d, 0x0a, 0xb0, 0x7b, 0x4b, 0x5c, - 0x36, 0xa3, 0xf3, 0xc2, 0x56, 0x49, 0x2b, 0xef, 0xe0, 0xbb, 0x5f, 0xb3, 0x07, 0x8f, 0x23, 0x2c, - 0xcc, 0xa1, 0x73, 0x46, 0xd0, 0x5b, 0xd0, 0x27, 0xe2, 0x74, 0xc0, 0x08, 0xf7, 0xd9, 0xf4, 0x8a, - 0x61, 0x05, 0x51, 0x5b, 0x81, 0x53, 0xd7, 0x21, 0x63, 0x0c, 0x93, 0xe5, 0xbf, 0xf9, 0x5b, 0x83, - 0xfc, 0xa7, 0x19, 0x53, 0x78, 0xd9, 0x5d, 0x78, 0x35, 0x48, 0x4d, 0x66, 0xd7, 0x9c, 0xb8, 0x92, - 0xf2, 0xa5, 0x4a, 0x19, 0xef, 0x63, 0x9d, 0xfa, 0x0e, 0x58, 0x3a, 0x1a, 0x23, 0x48, 0x89, 0x13, - 0x54, 0x86, 0x8c, 0x12, 0xe4, 0x40, 0x79, 0xf7, 0x7e, 0x10, 0x4d, 0xdb, 0x53, 0xc0, 0x82, 0x9c, - 0x8a, 0x54, 0xd5, 0x48, 0xe3, 0x6c, 0x00, 0xfe, 0x2c, 0x75, 0xf9, 0x02, 0x07, 0x2b, 0xc1, 0x48, - 0x65, 0xaa, 0xf0, 0x40, 0xa1, 0x62, 0x05, 0xad, 0x74, 0x7f, 0x83, 0x34, 0x7a, 0xc0, 0xcf, 0x4c, - 0x0a, 0x59, 0x8f, 0xd9, 0x4f, 0xe5, 0x52, 0x95, 0x77, 0x11, 0x55, 0x8e, 0xa3, 0xaa, 0x84, 0xe0, - 0x51, 0x41, 0x8a, 0x4b, 0x41, 0x62, 0x92, 0x6f, 0x56, 0x01, 0xa9, 0x0c, 0xf2, 0x15, 0x27, 0xb0, - 0xc3, 0xbc, 0x13, 0x19, 0x7e, 0x56, 0xbd, 0xd0, 0x87, 0x62, 0x61, 0x37, 0xfb, 0x90, 0xaf, 0x39, - 0xce, 0xf5, 0x8f, 0xd5, 0x4a, 0xfb, 0xa7, 0x12, 0xf9, 0xa3, 0xc1, 0xc1, 0x0a, 0xef, 0x7f, 0xa8, - 0x3d, 0x54, 0x87, 0x14, 0xe3, 0x43, 0xbe, 0x60, 0x7e, 0x72, 0xf7, 0x2b, 0xaf, 0x54, 0xbf, 0x35, - 0x37, 0x5a, 0x5d, 0xdf, 0x03, 0x4b, 0x4f, 0xd3, 0x86, 0x94, 0x38, 0x41, 0x7b, 0x90, 0x6e, 0x77, - 0x06, 0x8d, 0x0f, 0xb5, 0xf6, 0x59, 0x2b, 0x73, 0x0f, 0xe9, 0xb0, 0xdb, 0xc0, 0xad, 0x5a, 0xaf, - 0xd5, 0xcc, 0x68, 0x28, 0x0d, 0x3b, 0x2d, 0x8c, 0x3b, 0x38, 0xb3, 0x65, 0x16, 0xc1, 0x10, 0x6d, - 0xc4, 0xb8, 0xd7, 0xea, 0xb2, 0x8a, 0xa4, 0x50, 0xe6, 0x1b, 0x38, 0x8c, 0xb5, 0xca, 0xe7, 0x2a, - 0xdd, 0x29, 0xf2, 0xb6, 0xec, 0xce, 0x2a, 0xa0, 0xbe, 0x73, 0x39, 0xe4, 0x44, 0x64, 0x44, 0xea, - 0xae, 0xa4, 0x4e, 0x4b, 0x4c, 0xdd, 0x2f, 0x0d, 0x72, 0x21, 0xff, 0xd5, 0xdc, 0x27, 0x12, 0xa0, - 0xf7, 0x11, 0x2d, 0x5f, 0xa8, 0xc8, 0x18, 0xe6, 0xa8, 0x8e, 0xc7, 0x09, 0x3a, 0xf6, 0xcf, 0x9b, - 0x42, 0xc7, 0xca, 0xcf, 0x6d, 0xd0, 0x95, 0x01, 0x89, 0x26, 0x90, 0x8b, 0x91, 0x0b, 0x85, 0x2e, - 0x5f, 0xaf, 0xb6, 0x71, 0xb2, 0x11, 0x27, 0x65, 0xe8, 0xc1, 0x5e, 0x68, 0xf6, 0xa1, 0xd2, 0xaa, - 0x67, 0xb8, 0xe4, 0x8d, 0x67, 0x09, 0x08, 0xc9, 0x7a, 0x01, 0x0f, 0x23, 0x93, 0x03, 0x99, 0x9b, - 0x67, 0x9c, 0xf1, 0x3c, 0x11, 0x23, 0xb9, 0x3f, 0x02, 0x04, 0xad, 0x8c, 0x9e, 0x24, 0x0e, 0x09, - 0xe3, 0x68, 0x9d, 0x39, 0x08, 0x34, 0xd2, 0x0e, 0xe1, 0x40, 0xe3, 0xbb, 0x3e, 0x1c, 0xe8, 0xba, - 0x0e, 0x6e, 0x83, 0xae, 0x94, 0x07, 0x3a, 0x5a, 0x5b, 0x37, 0x82, 0xf3, 0xe9, 0x86, 0xba, 0xaa, - 0x77, 0x40, 0xd9, 0x97, 0xf5, 0x8c, 0x52, 0x2d, 0xe7, 0xde, 0x32, 0xbc, 0xb0, 0xa7, 0x33, 0xfe, - 0x75, 0x31, 0xb2, 0xc6, 0xf4, 0xc6, 0x9e, 0xd2, 0x6f, 0xe4, 0xca, 0x16, 0x5b, 0x93, 0x5d, 0x5e, - 0xd9, 0x53, 0x6a, 0xfb, 0x1b, 0x93, 0xd9, 0xc1, 0x26, 0x1d, 0xa5, 0xfc, 0xa3, 0xd7, 0x7f, 0x03, - 0x00, 0x00, 0xff, 0xff, 0xc7, 0xc1, 0x10, 0xf1, 0xa5, 0x07, 0x00, 0x00, + // 762 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xef, 0x4e, 0x13, 0x4f, + 0x14, 0xfd, 0x2d, 0xfc, 0x28, 0xf6, 0x16, 0xb0, 0x4c, 0x69, 0x69, 0x16, 0x84, 0x3a, 0x12, 0x41, + 0x4c, 0x76, 0x93, 0xfa, 0x81, 0x98, 0x88, 0x49, 0x29, 0x05, 0x13, 0x4d, 0x29, 0x0b, 0x68, 0xc2, + 0x07, 0x49, 0x29, 0x53, 0x28, 0xff, 0xa6, 0xee, 0x4c, 0x49, 0x4c, 0x7c, 0x1a, 0xe3, 0xbb, 0xf8, + 0x0c, 0xbe, 0x8d, 0xd9, 0x9d, 0x69, 0x77, 0x66, 0xba, 0xdd, 0x1a, 0xfd, 0xd6, 0xbd, 0x73, 0xee, + 0xd9, 0xbb, 0xe7, 0xde, 0x73, 0x3b, 0xb0, 0xdc, 0x26, 0x4d, 0xc6, 0xdd, 0x16, 0xf5, 0x89, 0x5b, + 0xa5, 0x3e, 0x39, 0x22, 0xfe, 0x43, 0xa7, 0x45, 0x9c, 0xae, 0x4f, 0x39, 0x45, 0x10, 0x9e, 0x3a, + 0xc1, 0xa9, 0xbd, 0xa4, 0x20, 0xf7, 0x48, 0x93, 0xf7, 0x02, 0x30, 0x17, 0x40, 0xbb, 0xa0, 0x1c, + 0x1e, 0x71, 0xea, 0x4b, 0x02, 0xfc, 0x19, 0x16, 0xf6, 0x09, 0x8f, 0xe0, 0x1e, 0xf9, 0xd2, 0x23, + 0x8c, 0xa3, 0x22, 0x4c, 0x77, 0x7d, 0x7a, 0x4d, 0x5a, 0xbc, 0x38, 0x59, 0xb2, 0x36, 0xd2, 0x5e, + 0xff, 0x11, 0x21, 0xf8, 0xff, 0xbe, 0x79, 0x47, 0x8a, 0x56, 0x18, 0x0e, 0x7f, 0x07, 0xe8, 0x07, + 0xe2, 0xb3, 0x0e, 0xbd, 0x2f, 0x4e, 0x94, 0xac, 0x8d, 0x29, 0xaf, 0xff, 0x88, 0x1b, 0x90, 0x37, + 0xf8, 0x59, 0x97, 0xde, 0x33, 0x82, 0xb6, 0x20, 0xd3, 0x16, 0xd1, 0x33, 0x46, 0x78, 0xc8, 0x96, + 0x29, 0x17, 0x9c, 0xe8, 0x7b, 0x1c, 0x25, 0x09, 0xda, 0x83, 0xdf, 0xf8, 0x97, 0x05, 0x85, 0x0f, + 0x1d, 0xa6, 0x70, 0xb2, 0x7e, 0xd1, 0x15, 0x48, 0xb5, 0x3b, 0xb7, 0x9c, 0xf8, 0x92, 0xee, 0x85, + 0x4a, 0x17, 0x9f, 0xe3, 0xec, 0x85, 0x09, 0x9e, 0x4c, 0xb4, 0xbf, 0x41, 0x4a, 0x44, 0x12, 0x14, + 0xd8, 0x80, 0xac, 0x52, 0xfa, 0x99, 0xa2, 0xc6, 0x5c, 0x54, 0x67, 0x3d, 0xd0, 0xc5, 0x81, 0x9c, + 0x8a, 0x54, 0x35, 0x4a, 0x7b, 0xf3, 0x11, 0xf8, 0xa3, 0x54, 0xeb, 0x18, 0x16, 0x87, 0xca, 0x94, + 0x7a, 0xbd, 0x86, 0x19, 0x85, 0x8a, 0x15, 0xad, 0xd2, 0x64, 0x82, 0x60, 0x99, 0x88, 0x9b, 0x61, + 0x0a, 0xf3, 0x01, 0x6b, 0xd8, 0xf6, 0x81, 0x56, 0x6f, 0x0c, 0xad, 0xd6, 0x4c, 0xad, 0x34, 0xb8, + 0x29, 0xd3, 0xf2, 0x40, 0xa6, 0x98, 0x71, 0xc0, 0xdb, 0x80, 0x54, 0x06, 0xf9, 0x05, 0xeb, 0x30, + 0xc5, 0x82, 0x88, 0x2c, 0x7d, 0x5e, 0x7d, 0x61, 0x08, 0xf5, 0xc4, 0x39, 0x3e, 0x84, 0x42, 0xa5, + 0xdb, 0xbd, 0xfd, 0x3a, 0x3c, 0x95, 0x7f, 0x3d, 0x34, 0x3f, 0x2d, 0x58, 0x1c, 0xe2, 0xfc, 0xc7, + 0x49, 0x44, 0x3b, 0x90, 0x62, 0xbc, 0xc9, 0x7b, 0x2c, 0x6c, 0xe8, 0x5c, 0x79, 0x53, 0xcd, 0x19, + 0xf1, 0x36, 0xe7, 0x28, 0xcc, 0xf0, 0x64, 0x26, 0x76, 0x21, 0x25, 0x22, 0x68, 0x16, 0xd2, 0xf5, + 0x83, 0xb3, 0xea, 0xbb, 0x4a, 0x7d, 0xbf, 0x96, 0xfd, 0x0f, 0x65, 0x60, 0xba, 0xea, 0xd5, 0x2a, + 0xc7, 0xb5, 0xdd, 0xac, 0x85, 0xd2, 0x30, 0x55, 0xf3, 0xbc, 0x03, 0x2f, 0x3b, 0x81, 0x97, 0xc1, + 0x16, 0x86, 0x62, 0x3c, 0x58, 0x07, 0x72, 0x72, 0xa4, 0x40, 0x78, 0x0b, 0x96, 0x62, 0x4f, 0xe5, + 0xa7, 0x2a, 0x3e, 0x15, 0xfd, 0x1a, 0xf8, 0x74, 0x1b, 0xd0, 0x49, 0xf7, 0xa2, 0xc9, 0x89, 0xe8, + 0x84, 0xd4, 0x5b, 0x69, 0x99, 0x95, 0xd8, 0xb2, 0x1f, 0x16, 0xe4, 0xb4, 0xfc, 0xe1, 0x9e, 0x27, + 0x12, 0xa0, 0xb7, 0x86, 0x96, 0xcf, 0x55, 0x64, 0x0c, 0xb3, 0xa9, 0xe3, 0x5a, 0x82, 0x8e, 0x27, + 0x8d, 0x5d, 0xa1, 0x23, 0xde, 0x84, 0x85, 0xaa, 0x4f, 0x9a, 0x9c, 0x34, 0x84, 0x95, 0xfb, 0xdf, + 0x19, 0x37, 0xc4, 0x8b, 0x90, 0x37, 0xb0, 0xe2, 0xcd, 0xf8, 0x25, 0xe4, 0x2b, 0x7e, 0xeb, 0xaa, + 0xf3, 0xf0, 0x27, 0x2c, 0x45, 0x28, 0x98, 0x60, 0x49, 0x93, 0x87, 0x5c, 0x60, 0x12, 0x19, 0xee, + 0x1b, 0x0d, 0x97, 0x61, 0x41, 0x0f, 0x4b, 0x25, 0x6d, 0x78, 0x24, 0xf7, 0x8f, 0xf0, 0x7e, 0xda, + 0x1b, 0x3c, 0x97, 0xbf, 0xa7, 0x20, 0xa3, 0xfc, 0x37, 0xa0, 0x36, 0xe4, 0x62, 0xa6, 0x00, 0x69, + 0x9a, 0x8e, 0x1e, 0x22, 0x7b, 0x7d, 0x2c, 0x4e, 0xd6, 0x74, 0x0c, 0xb3, 0xda, 0x72, 0x47, 0xa5, + 0xe1, 0x4c, 0xdd, 0xc1, 0xf6, 0xd3, 0x04, 0x84, 0x64, 0x3d, 0x85, 0xc7, 0xc6, 0x12, 0x44, 0x78, + 0xfc, 0x22, 0xb7, 0x9f, 0x25, 0x62, 0x24, 0xf7, 0x7b, 0x80, 0x68, 0x33, 0xa1, 0x27, 0x89, 0x3b, + 0xcf, 0x5e, 0x19, 0x75, 0x1c, 0x15, 0x6a, 0xb8, 0x5c, 0x2f, 0x34, 0x7e, 0x89, 0xe9, 0x85, 0x8e, + 0x5a, 0x4a, 0x75, 0xc8, 0x28, 0x53, 0x8f, 0x56, 0x46, 0xda, 0x41, 0x70, 0xae, 0x8e, 0xb1, 0x4b, + 0xd0, 0x2a, 0x6d, 0x9a, 0xf5, 0x56, 0xc5, 0x99, 0x42, 0x6f, 0x55, 0xac, 0x15, 0xd0, 0x27, 0x98, + 0xd3, 0xa7, 0x1b, 0x69, 0x49, 0xb1, 0x36, 0xb1, 0x71, 0x12, 0x44, 0x12, 0x1f, 0xc2, 0x8c, 0xea, + 0x02, 0xb4, 0x6a, 0xb6, 0xc2, 0xb0, 0x8d, 0x5d, 0x1a, 0x0d, 0x10, 0x94, 0x3b, 0x07, 0xa0, 0x5c, + 0x96, 0x76, 0xb2, 0x8a, 0x5f, 0x1a, 0xc1, 0x4d, 0xe8, 0xd4, 0xbd, 0xec, 0xf0, 0xab, 0xde, 0xb9, + 0xd3, 0xa2, 0x77, 0xee, 0x25, 0xbd, 0x26, 0x37, 0xae, 0xb8, 0x32, 0xb1, 0x8b, 0x1b, 0xf7, 0x92, + 0xba, 0xe1, 0x75, 0x89, 0xb9, 0xd1, 0x35, 0xea, 0x3c, 0x15, 0x86, 0x5e, 0xfd, 0x0e, 0x00, 0x00, + 0xff, 0xff, 0xe2, 0x7d, 0x9e, 0xca, 0xa2, 0x09, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -774,6 +1034,17 @@ type CoreServiceClient interface { // If the changes are valid, core will return the given store configuration in response, and // start or update the necessary feature population jobs for the updated store. UpdateStore(ctx context.Context, in *UpdateStoreRequest, opts ...grpc.CallOption) (*UpdateStoreResponse, error) + // Creates a project. Projects serve as namespaces within which resources like features will be + // created. Both feature set names as well as field names must be unique within a project. Project + // names themselves must be globally unique. + CreateProject(ctx context.Context, in *CreateProjectRequest, opts ...grpc.CallOption) (*CreateProjectResponse, error) + // Archives a project. Archived projects will continue to exist and function, but won't be visible + // through the Core API. Any existing ingestion or serving requests will continue to function, + // but will result in warning messages being logged. It is not possible to unarchive a project + // through the Core API + ArchiveProject(ctx context.Context, in *ArchiveProjectRequest, opts ...grpc.CallOption) (*ArchiveProjectResponse, error) + // Lists all projects active projects. + ListProjects(ctx context.Context, in *ListProjectsRequest, opts ...grpc.CallOption) (*ListProjectsResponse, error) } type coreServiceClient struct { @@ -838,6 +1109,33 @@ func (c *coreServiceClient) UpdateStore(ctx context.Context, in *UpdateStoreRequ return out, nil } +func (c *coreServiceClient) CreateProject(ctx context.Context, in *CreateProjectRequest, opts ...grpc.CallOption) (*CreateProjectResponse, error) { + out := new(CreateProjectResponse) + err := c.cc.Invoke(ctx, "/feast.core.CoreService/CreateProject", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *coreServiceClient) ArchiveProject(ctx context.Context, in *ArchiveProjectRequest, opts ...grpc.CallOption) (*ArchiveProjectResponse, error) { + out := new(ArchiveProjectResponse) + err := c.cc.Invoke(ctx, "/feast.core.CoreService/ArchiveProject", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *coreServiceClient) ListProjects(ctx context.Context, in *ListProjectsRequest, opts ...grpc.CallOption) (*ListProjectsResponse, error) { + out := new(ListProjectsResponse) + err := c.cc.Invoke(ctx, "/feast.core.CoreService/ListProjects", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // CoreServiceServer is the server API for CoreService service. type CoreServiceServer interface { // Retrieve version information about this Feast deployment @@ -868,6 +1166,17 @@ type CoreServiceServer interface { // If the changes are valid, core will return the given store configuration in response, and // start or update the necessary feature population jobs for the updated store. UpdateStore(context.Context, *UpdateStoreRequest) (*UpdateStoreResponse, error) + // Creates a project. Projects serve as namespaces within which resources like features will be + // created. Both feature set names as well as field names must be unique within a project. Project + // names themselves must be globally unique. + CreateProject(context.Context, *CreateProjectRequest) (*CreateProjectResponse, error) + // Archives a project. Archived projects will continue to exist and function, but won't be visible + // through the Core API. Any existing ingestion or serving requests will continue to function, + // but will result in warning messages being logged. It is not possible to unarchive a project + // through the Core API + ArchiveProject(context.Context, *ArchiveProjectRequest) (*ArchiveProjectResponse, error) + // Lists all projects active projects. + ListProjects(context.Context, *ListProjectsRequest) (*ListProjectsResponse, error) } // UnimplementedCoreServiceServer can be embedded to have forward compatible implementations. @@ -892,6 +1201,15 @@ func (*UnimplementedCoreServiceServer) ApplyFeatureSet(ctx context.Context, req func (*UnimplementedCoreServiceServer) UpdateStore(ctx context.Context, req *UpdateStoreRequest) (*UpdateStoreResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method UpdateStore not implemented") } +func (*UnimplementedCoreServiceServer) CreateProject(ctx context.Context, req *CreateProjectRequest) (*CreateProjectResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateProject not implemented") +} +func (*UnimplementedCoreServiceServer) ArchiveProject(ctx context.Context, req *ArchiveProjectRequest) (*ArchiveProjectResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ArchiveProject not implemented") +} +func (*UnimplementedCoreServiceServer) ListProjects(ctx context.Context, req *ListProjectsRequest) (*ListProjectsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListProjects not implemented") +} func RegisterCoreServiceServer(s *grpc.Server, srv CoreServiceServer) { s.RegisterService(&_CoreService_serviceDesc, srv) @@ -1005,6 +1323,60 @@ func _CoreService_UpdateStore_Handler(srv interface{}, ctx context.Context, dec return interceptor(ctx, in, info, handler) } +func _CoreService_CreateProject_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateProjectRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CoreServiceServer).CreateProject(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.core.CoreService/CreateProject", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CoreServiceServer).CreateProject(ctx, req.(*CreateProjectRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CoreService_ArchiveProject_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ArchiveProjectRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CoreServiceServer).ArchiveProject(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.core.CoreService/ArchiveProject", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CoreServiceServer).ArchiveProject(ctx, req.(*ArchiveProjectRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CoreService_ListProjects_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProjectsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CoreServiceServer).ListProjects(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/feast.core.CoreService/ListProjects", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CoreServiceServer).ListProjects(ctx, req.(*ListProjectsRequest)) + } + return interceptor(ctx, in, info, handler) +} + var _CoreService_serviceDesc = grpc.ServiceDesc{ ServiceName: "feast.core.CoreService", HandlerType: (*CoreServiceServer)(nil), @@ -1033,6 +1405,18 @@ var _CoreService_serviceDesc = grpc.ServiceDesc{ MethodName: "UpdateStore", Handler: _CoreService_UpdateStore_Handler, }, + { + MethodName: "CreateProject", + Handler: _CoreService_CreateProject_Handler, + }, + { + MethodName: "ArchiveProject", + Handler: _CoreService_ArchiveProject_Handler, + }, + { + MethodName: "ListProjects", + Handler: _CoreService_ListProjects_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "feast/core/CoreService.proto", diff --git a/sdk/go/protos/feast/core/FeatureSet.pb.go b/sdk/go/protos/feast/core/FeatureSet.pb.go index 79cbcaa94b..26d9d9c4f7 100644 --- a/sdk/go/protos/feast/core/FeatureSet.pb.go +++ b/sdk/go/protos/feast/core/FeatureSet.pb.go @@ -8,6 +8,7 @@ import ( types "github.com/gojek/feast/sdk/go/protos/feast/types" proto "github.com/golang/protobuf/proto" duration "github.com/golang/protobuf/ptypes/duration" + timestamp "github.com/golang/protobuf/ptypes/timestamp" math "math" ) @@ -22,10 +23,89 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package +type FeatureSetStatus int32 + +const ( + FeatureSetStatus_STATUS_INVALID FeatureSetStatus = 0 + FeatureSetStatus_STATUS_PENDING FeatureSetStatus = 1 + FeatureSetStatus_STATUS_READY FeatureSetStatus = 2 +) + +var FeatureSetStatus_name = map[int32]string{ + 0: "STATUS_INVALID", + 1: "STATUS_PENDING", + 2: "STATUS_READY", +} + +var FeatureSetStatus_value = map[string]int32{ + "STATUS_INVALID": 0, + "STATUS_PENDING": 1, + "STATUS_READY": 2, +} + +func (x FeatureSetStatus) String() string { + return proto.EnumName(FeatureSetStatus_name, int32(x)) +} + +func (FeatureSetStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_972fbd278ac19c0c, []int{0} +} + +type FeatureSet struct { + // User-specified specifications of this feature set. + Spec *FeatureSetSpec `protobuf:"bytes,1,opt,name=spec,proto3" json:"spec,omitempty"` + // System-populated metadata for this feature set. + Meta *FeatureSetMeta `protobuf:"bytes,2,opt,name=meta,proto3" json:"meta,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeatureSet) Reset() { *m = FeatureSet{} } +func (m *FeatureSet) String() string { return proto.CompactTextString(m) } +func (*FeatureSet) ProtoMessage() {} +func (*FeatureSet) Descriptor() ([]byte, []int) { + return fileDescriptor_972fbd278ac19c0c, []int{0} +} + +func (m *FeatureSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeatureSet.Unmarshal(m, b) +} +func (m *FeatureSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeatureSet.Marshal(b, m, deterministic) +} +func (m *FeatureSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureSet.Merge(m, src) +} +func (m *FeatureSet) XXX_Size() int { + return xxx_messageInfo_FeatureSet.Size(m) +} +func (m *FeatureSet) XXX_DiscardUnknown() { + xxx_messageInfo_FeatureSet.DiscardUnknown(m) +} + +var xxx_messageInfo_FeatureSet proto.InternalMessageInfo + +func (m *FeatureSet) GetSpec() *FeatureSetSpec { + if m != nil { + return m.Spec + } + return nil +} + +func (m *FeatureSet) GetMeta() *FeatureSetMeta { + if m != nil { + return m.Meta + } + return nil +} + type FeatureSetSpec struct { - // Name of the featureSet. Must be unique. + // Name of project that this feature set belongs to. + Project string `protobuf:"bytes,7,opt,name=project,proto3" json:"project,omitempty"` + // Name of the feature set. Must be unique. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // FeatureSet version. + // Feature set version. Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` // List of entities contained within this featureSet. // This allows the feature to be used during joins between feature sets. @@ -50,7 +130,7 @@ func (m *FeatureSetSpec) Reset() { *m = FeatureSetSpec{} } func (m *FeatureSetSpec) String() string { return proto.CompactTextString(m) } func (*FeatureSetSpec) ProtoMessage() {} func (*FeatureSetSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_972fbd278ac19c0c, []int{0} + return fileDescriptor_972fbd278ac19c0c, []int{1} } func (m *FeatureSetSpec) XXX_Unmarshal(b []byte) error { @@ -71,6 +151,13 @@ func (m *FeatureSetSpec) XXX_DiscardUnknown() { var xxx_messageInfo_FeatureSetSpec proto.InternalMessageInfo +func (m *FeatureSetSpec) GetProject() string { + if m != nil { + return m.Project + } + return "" +} + func (m *FeatureSetSpec) GetName() string { if m != nil { return m.Name @@ -127,7 +214,7 @@ func (m *EntitySpec) Reset() { *m = EntitySpec{} } func (m *EntitySpec) String() string { return proto.CompactTextString(m) } func (*EntitySpec) ProtoMessage() {} func (*EntitySpec) Descriptor() ([]byte, []int) { - return fileDescriptor_972fbd278ac19c0c, []int{1} + return fileDescriptor_972fbd278ac19c0c, []int{2} } func (m *EntitySpec) XXX_Unmarshal(b []byte) error { @@ -176,7 +263,7 @@ func (m *FeatureSpec) Reset() { *m = FeatureSpec{} } func (m *FeatureSpec) String() string { return proto.CompactTextString(m) } func (*FeatureSpec) ProtoMessage() {} func (*FeatureSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_972fbd278ac19c0c, []int{2} + return fileDescriptor_972fbd278ac19c0c, []int{3} } func (m *FeatureSpec) XXX_Unmarshal(b []byte) error { @@ -211,37 +298,103 @@ func (m *FeatureSpec) GetValueType() types.ValueType_Enum { return types.ValueType_INVALID } +type FeatureSetMeta struct { + // Created timestamp of this specific feature set. + CreatedTimestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=created_timestamp,json=createdTimestamp,proto3" json:"created_timestamp,omitempty"` + // Status of the feature set. + // Used to indicate whether the feature set is ready for consumption or ingestion. + // Currently supports 2 states: + // 1) STATUS_PENDING - A feature set is in pending state if Feast has not spun up the jobs + // necessary to push rows for this feature set to stores subscribing to this feature set. + // 2) STATUS_READY - Feature set is ready for consumption or ingestion + Status FeatureSetStatus `protobuf:"varint,2,opt,name=status,proto3,enum=feast.core.FeatureSetStatus" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeatureSetMeta) Reset() { *m = FeatureSetMeta{} } +func (m *FeatureSetMeta) String() string { return proto.CompactTextString(m) } +func (*FeatureSetMeta) ProtoMessage() {} +func (*FeatureSetMeta) Descriptor() ([]byte, []int) { + return fileDescriptor_972fbd278ac19c0c, []int{4} +} + +func (m *FeatureSetMeta) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeatureSetMeta.Unmarshal(m, b) +} +func (m *FeatureSetMeta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeatureSetMeta.Marshal(b, m, deterministic) +} +func (m *FeatureSetMeta) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureSetMeta.Merge(m, src) +} +func (m *FeatureSetMeta) XXX_Size() int { + return xxx_messageInfo_FeatureSetMeta.Size(m) +} +func (m *FeatureSetMeta) XXX_DiscardUnknown() { + xxx_messageInfo_FeatureSetMeta.DiscardUnknown(m) +} + +var xxx_messageInfo_FeatureSetMeta proto.InternalMessageInfo + +func (m *FeatureSetMeta) GetCreatedTimestamp() *timestamp.Timestamp { + if m != nil { + return m.CreatedTimestamp + } + return nil +} + +func (m *FeatureSetMeta) GetStatus() FeatureSetStatus { + if m != nil { + return m.Status + } + return FeatureSetStatus_STATUS_INVALID +} + func init() { + proto.RegisterEnum("feast.core.FeatureSetStatus", FeatureSetStatus_name, FeatureSetStatus_value) + proto.RegisterType((*FeatureSet)(nil), "feast.core.FeatureSet") proto.RegisterType((*FeatureSetSpec)(nil), "feast.core.FeatureSetSpec") proto.RegisterType((*EntitySpec)(nil), "feast.core.EntitySpec") proto.RegisterType((*FeatureSpec)(nil), "feast.core.FeatureSpec") + proto.RegisterType((*FeatureSetMeta)(nil), "feast.core.FeatureSetMeta") } func init() { proto.RegisterFile("feast/core/FeatureSet.proto", fileDescriptor_972fbd278ac19c0c) } var fileDescriptor_972fbd278ac19c0c = []byte{ - // 357 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x52, 0x4d, 0x6f, 0xe2, 0x30, - 0x10, 0x55, 0xf8, 0x08, 0x30, 0x48, 0xac, 0xe4, 0xc3, 0x92, 0x5d, 0xa4, 0x55, 0xc4, 0x29, 0xda, - 0x83, 0x2d, 0x85, 0x5b, 0x6f, 0x45, 0x6d, 0x8f, 0x55, 0x15, 0xaa, 0x1e, 0xaa, 0x56, 0xc8, 0x84, - 0x21, 0x4d, 0x21, 0x71, 0x14, 0x3b, 0x08, 0x7e, 0x41, 0xff, 0x76, 0x15, 0x1b, 0x37, 0x39, 0xf4, - 0xd8, 0x9b, 0xc7, 0xef, 0xcd, 0xc7, 0x7b, 0x33, 0x30, 0xdb, 0x21, 0x97, 0x8a, 0xc5, 0xa2, 0x44, - 0x76, 0x87, 0x5c, 0x55, 0x25, 0xae, 0x50, 0xd1, 0xa2, 0x14, 0x4a, 0x10, 0xd0, 0x20, 0xad, 0xc1, - 0xbf, 0x53, 0x43, 0x54, 0xe7, 0x02, 0x25, 0x7b, 0xe2, 0x87, 0x0a, 0x0d, 0xc9, 0x02, 0xba, 0xc2, - 0x4a, 0x54, 0x65, 0x6c, 0x81, 0x7f, 0x89, 0x10, 0xc9, 0x01, 0x99, 0x8e, 0x36, 0xd5, 0x8e, 0x6d, - 0xab, 0x92, 0xab, 0x54, 0xe4, 0x06, 0x9f, 0x7f, 0x74, 0x60, 0xd2, 0xb4, 0x5c, 0x15, 0x18, 0x13, - 0x02, 0xbd, 0x9c, 0x67, 0xe8, 0x39, 0xbe, 0x13, 0x8c, 0x22, 0xfd, 0x26, 0x1e, 0x0c, 0x8e, 0x58, - 0xca, 0x54, 0xe4, 0x5e, 0xc7, 0x77, 0x82, 0x7e, 0x64, 0x43, 0x12, 0xc2, 0x10, 0x73, 0x95, 0xaa, - 0x14, 0xa5, 0xd7, 0xf5, 0xbb, 0xc1, 0x38, 0xfc, 0x4d, 0x9b, 0x89, 0xe9, 0x6d, 0x8d, 0x9d, 0xeb, - 0xba, 0xd1, 0x17, 0x8f, 0x2c, 0x60, 0xb8, 0x33, 0x3d, 0xa5, 0xd7, 0xd3, 0x39, 0xd3, 0x76, 0x8e, - 0x9d, 0x47, 0x27, 0x59, 0x22, 0x09, 0x61, 0x90, 0xf1, 0xd3, 0x9a, 0x27, 0xe8, 0xf5, 0x7d, 0x27, - 0x18, 0x87, 0x7f, 0xa8, 0xd1, 0x46, 0xad, 0x36, 0x7a, 0x73, 0xd1, 0x16, 0xb9, 0x19, 0x3f, 0x5d, - 0x27, 0x48, 0xfe, 0x83, 0x2b, 0xb5, 0x1b, 0x9e, 0xab, 0x53, 0x48, 0xbb, 0x8d, 0xf1, 0x29, 0xba, - 0x30, 0xe6, 0x2f, 0x00, 0xcd, 0xb0, 0xdf, 0x9a, 0x70, 0x05, 0x70, 0xac, 0x3d, 0x5f, 0xd7, 0xfe, - 0x6b, 0x1f, 0x26, 0xe1, 0xec, 0x52, 0x51, 0xaf, 0x84, 0xea, 0x95, 0x3c, 0x9e, 0x8b, 0x5a, 0x77, - 0x95, 0x45, 0xa3, 0xa3, 0x8d, 0xe7, 0xaf, 0x30, 0x6e, 0xc9, 0xfa, 0xe9, 0xf2, 0xcb, 0x7b, 0x68, - 0x9d, 0xc9, 0xf2, 0x57, 0xb3, 0xd1, 0x87, 0xda, 0x9b, 0x67, 0x96, 0xa4, 0xea, 0xad, 0xda, 0xd0, - 0x58, 0x64, 0x2c, 0x11, 0xef, 0xb8, 0x67, 0xe6, 0x5e, 0xe4, 0x76, 0xcf, 0x12, 0x61, 0x8e, 0x43, - 0xb2, 0xe6, 0x86, 0x36, 0xae, 0xfe, 0x5a, 0x7c, 0x06, 0x00, 0x00, 0xff, 0xff, 0xdb, 0x96, 0x15, - 0x04, 0x9a, 0x02, 0x00, 0x00, + // 510 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x93, 0x4f, 0x6f, 0xda, 0x30, + 0x18, 0xc6, 0x07, 0xa5, 0x50, 0x5e, 0x26, 0x96, 0xf9, 0xb0, 0x66, 0xed, 0xb4, 0x21, 0x4e, 0xa8, + 0x07, 0x5b, 0x4a, 0x77, 0xda, 0x8d, 0x0a, 0x56, 0x21, 0x75, 0xa8, 0x72, 0x58, 0xa5, 0x4d, 0x9b, + 0x90, 0x09, 0x2f, 0x59, 0x5a, 0x82, 0xa3, 0xd8, 0x41, 0xe5, 0x53, 0xec, 0x33, 0xec, 0x9b, 0x4e, + 0x71, 0x12, 0x92, 0xa1, 0x6e, 0xa7, 0xdd, 0x62, 0x3f, 0x3f, 0xbf, 0x79, 0xde, 0x7f, 0x70, 0xbe, + 0x42, 0xa1, 0x34, 0xf3, 0x64, 0x8c, 0xec, 0x23, 0x0a, 0x9d, 0xc4, 0xe8, 0xa2, 0xa6, 0x51, 0x2c, + 0xb5, 0x24, 0x60, 0x44, 0x9a, 0x8a, 0x67, 0xa7, 0x19, 0xa8, 0x77, 0x11, 0x2a, 0x76, 0x27, 0xd6, + 0x09, 0x66, 0x50, 0x21, 0x98, 0x08, 0xae, 0x4c, 0x62, 0xaf, 0x10, 0xde, 0xfa, 0x52, 0xfa, 0x6b, + 0x64, 0xe6, 0xb4, 0x48, 0x56, 0x6c, 0x99, 0xc4, 0x42, 0x07, 0x72, 0x93, 0xeb, 0xef, 0x0e, 0x75, + 0x1d, 0x84, 0xa8, 0xb4, 0x08, 0xa3, 0x0c, 0xe8, 0xaf, 0x01, 0x4a, 0x4b, 0x84, 0x42, 0x43, 0x45, + 0xe8, 0xd9, 0xb5, 0x5e, 0x6d, 0xd0, 0x71, 0xce, 0x68, 0xe9, 0x8d, 0x96, 0x94, 0x1b, 0xa1, 0xc7, + 0x0d, 0x97, 0xf2, 0x21, 0x6a, 0x61, 0xd7, 0xff, 0xc5, 0x7f, 0x42, 0x2d, 0xb8, 0xe1, 0xfa, 0xbf, + 0xea, 0xd0, 0xfd, 0x33, 0x10, 0xb1, 0xa1, 0x15, 0xc5, 0xf2, 0x1e, 0x3d, 0x6d, 0xb7, 0x7a, 0xb5, + 0x41, 0x9b, 0x17, 0x47, 0x42, 0xa0, 0xb1, 0x11, 0x21, 0x1a, 0x33, 0x6d, 0x6e, 0xbe, 0x53, 0x7a, + 0x8b, 0xb1, 0x0a, 0xe4, 0xc6, 0xfc, 0xf3, 0x98, 0x17, 0x47, 0xe2, 0xc0, 0x09, 0x6e, 0x74, 0xa0, + 0x03, 0x54, 0xf6, 0x51, 0xef, 0x68, 0xd0, 0x71, 0x5e, 0x55, 0xed, 0x8c, 0x53, 0x6d, 0x67, 0xac, + 0xef, 0x39, 0x72, 0x09, 0x27, 0xab, 0xcc, 0x8d, 0xb2, 0x1b, 0xe6, 0xcd, 0xe9, 0x53, 0x29, 0x98, + 0x47, 0x05, 0x48, 0x1c, 0x68, 0x85, 0xe2, 0x71, 0x2e, 0x7c, 0xb4, 0x8f, 0x4d, 0xda, 0xaf, 0x69, + 0x56, 0x64, 0x5a, 0x14, 0x99, 0x8e, 0xf2, 0x26, 0xf0, 0x66, 0x28, 0x1e, 0x87, 0x3e, 0x92, 0x0b, + 0x68, 0x2a, 0xd3, 0x36, 0xbb, 0x69, 0x9e, 0x90, 0xea, 0x6f, 0xb2, 0x86, 0xf2, 0x9c, 0xe8, 0x7f, + 0x03, 0x28, 0xcd, 0x3e, 0x59, 0x84, 0x0f, 0x00, 0xdb, 0x74, 0x38, 0xe6, 0xe9, 0xa0, 0x98, 0x3a, + 0x74, 0x9d, 0xf3, 0x3c, 0xa2, 0x99, 0x1d, 0x6a, 0x66, 0x67, 0xb6, 0x8b, 0xd2, 0xbc, 0x93, 0x90, + 0xb7, 0xb7, 0xc5, 0xb9, 0xff, 0x1d, 0x3a, 0x95, 0xb4, 0xfe, 0x7b, 0xf8, 0x9f, 0xb5, 0x6a, 0x83, + 0xd3, 0xce, 0x93, 0x6b, 0x78, 0xe9, 0xc5, 0x28, 0x34, 0x2e, 0xe7, 0xfb, 0xe1, 0xdb, 0x0f, 0xd8, + 0x61, 0xe5, 0x66, 0x05, 0xc1, 0xad, 0xfc, 0xd1, 0xfe, 0x86, 0xbc, 0x87, 0xa6, 0xd2, 0x42, 0x27, + 0x2a, 0xf7, 0xf4, 0xe6, 0x2f, 0xe3, 0x69, 0x18, 0x9e, 0xb3, 0x17, 0x37, 0x60, 0x1d, 0x6a, 0x84, + 0x40, 0xd7, 0x9d, 0x0d, 0x67, 0x9f, 0xdd, 0xf9, 0x64, 0x7a, 0x37, 0xbc, 0x99, 0x8c, 0xac, 0x67, + 0x95, 0xbb, 0xdb, 0xf1, 0x74, 0x34, 0x99, 0x5e, 0x5b, 0x35, 0x62, 0xc1, 0xf3, 0xfc, 0x8e, 0x8f, + 0x87, 0xa3, 0x2f, 0x56, 0xfd, 0x6a, 0x0a, 0x95, 0x7d, 0xbd, 0x7a, 0x51, 0x46, 0xbe, 0x4d, 0x33, + 0xf8, 0xca, 0xfc, 0x40, 0xff, 0x48, 0x16, 0xd4, 0x93, 0x21, 0xf3, 0xe5, 0x3d, 0x3e, 0xb0, 0x6c, + 0x71, 0xd5, 0xf2, 0x81, 0xf9, 0x32, 0xdb, 0x42, 0xc5, 0xca, 0x65, 0x5e, 0x34, 0xcd, 0xd5, 0xe5, + 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xb8, 0xd5, 0xf0, 0x13, 0x23, 0x04, 0x00, 0x00, } diff --git a/sdk/go/protos/feast/core/Store.pb.go b/sdk/go/protos/feast/core/Store.pb.go index 5dafb63d94..9120edcb42 100644 --- a/sdk/go/protos/feast/core/Store.pb.go +++ b/sdk/go/protos/feast/core/Store.pb.go @@ -391,15 +391,27 @@ func (m *Store_CassandraConfig) GetPort() int32 { } type Store_Subscription struct { - // Name of featureSet to subscribe to. This field supports any valid basic POSIX regex, - // e.g. customer_.* or .* - // https://www.regular-expressions.info/posix.html + // Name of project that the feature sets belongs to. This can be one of + // - [project_name] + // - * + // If an asterisk is provided, filtering on projects will be disabled. All projects will + // be matched. It is NOT possible to provide an asterisk with a string in order to do + // pattern matching. + Project string `protobuf:"bytes,3,opt,name=project,proto3" json:"project,omitempty"` + // Name of the desired feature set. Asterisks can be used as wildcards in the name. + // Matching on names is only permitted if a specific project is defined. It is disallowed + // If the project name is set to "*" + // e.g. + // - * can be used to match all feature sets + // - my-feature-set* can be used to match all features prefixed by "my-feature-set" + // - my-feature-set-6 can be used to select a single feature set Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Versions of the given featureSet that will be ingested into this store. + // Versions of the given feature sets that will be returned. // Valid options for version: - // latest: only subscribe to latest version of feature set - // [version number]: pin to a specific version - // >[version number]: subscribe to all versions larger than or equal to [version number] + // "latest": only the latest version is returned. + // "*": Subscribe to all versions + // [version number]: pin to a specific version. Project and feature set name must be + // explicitly defined if a specific version is pinned. Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -431,6 +443,13 @@ func (m *Store_Subscription) XXX_DiscardUnknown() { var xxx_messageInfo_Store_Subscription proto.InternalMessageInfo +func (m *Store_Subscription) GetProject() string { + if m != nil { + return m.Project + } + return "" +} + func (m *Store_Subscription) GetName() string { if m != nil { return m.Name @@ -457,33 +476,34 @@ func init() { func init() { proto.RegisterFile("feast/core/Store.proto", fileDescriptor_4b177bc9ccf64875) } var fileDescriptor_4b177bc9ccf64875 = []byte{ - // 442 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xcf, 0x6f, 0xd3, 0x30, - 0x14, 0xc7, 0x97, 0xae, 0xdd, 0x96, 0x97, 0xfe, 0x88, 0x7c, 0x40, 0x51, 0xd1, 0x50, 0xd8, 0xa9, - 0xa7, 0x58, 0x2a, 0xe2, 0x80, 0xc4, 0x81, 0xa6, 0x9d, 0x20, 0x02, 0x55, 0xcc, 0x05, 0x24, 0xb8, - 0x4c, 0xf9, 0xe1, 0x65, 0xde, 0xb4, 0x38, 0xd8, 0x2e, 0x52, 0xff, 0x3a, 0xfe, 0x35, 0x64, 0x27, - 0x69, 0x53, 0xda, 0xc3, 0x2e, 0x91, 0xfd, 0x7d, 0xdf, 0xf7, 0xc9, 0xd3, 0xf3, 0x7b, 0xf0, 0xe2, - 0x8e, 0xc6, 0x52, 0xe1, 0x94, 0x0b, 0x8a, 0x57, 0x8a, 0x0b, 0x1a, 0x94, 0x82, 0x2b, 0x8e, 0xc0, - 0xe8, 0x81, 0xd6, 0xaf, 0xfe, 0xf6, 0xa0, 0x67, 0x62, 0x08, 0x41, 0xb7, 0x88, 0x9f, 0xa8, 0x67, - 0xf9, 0xd6, 0xc4, 0x26, 0xe6, 0x8c, 0x30, 0x74, 0xd5, 0xa6, 0xa4, 0x5e, 0xc7, 0xb7, 0x26, 0xc3, - 0xe9, 0xcb, 0x60, 0x97, 0x18, 0x54, 0x40, 0xf3, 0xfd, 0xb6, 0x29, 0x29, 0x31, 0x46, 0xb4, 0x80, - 0x81, 0x5c, 0x27, 0x32, 0x15, 0xac, 0x54, 0x8c, 0x17, 0xd2, 0xeb, 0xfa, 0xa7, 0x13, 0x67, 0xfa, - 0xea, 0x48, 0x66, 0xcb, 0x46, 0xf6, 0x93, 0x50, 0x08, 0x7d, 0x41, 0x33, 0x26, 0x6f, 0x53, 0x5e, - 0xdc, 0xb1, 0xdc, 0x73, 0x7c, 0x6b, 0xe2, 0x4c, 0x2f, 0x0f, 0x21, 0x44, 0xbb, 0xe6, 0xc6, 0xf4, - 0xe9, 0x84, 0x38, 0x62, 0x77, 0x45, 0x9f, 0x61, 0x94, 0xb0, 0xfc, 0xf7, 0x9a, 0x8a, 0x4d, 0x83, - 0xe9, 0x1b, 0x8c, 0x7f, 0x88, 0x09, 0x59, 0x7e, 0xa3, 0x8d, 0x5b, 0xd2, 0xb0, 0x49, 0xad, 0x61, - 0x4b, 0x70, 0xd3, 0x58, 0xca, 0xb8, 0xc8, 0x44, 0xdc, 0xd0, 0x06, 0x86, 0xf6, 0xfa, 0x90, 0x36, - 0x6f, 0x9c, 0x5b, 0xdc, 0x28, 0xdd, 0x97, 0xc6, 0x6f, 0xc1, 0x69, 0x95, 0xae, 0x5b, 0x7f, 0xcf, - 0xa5, 0x6a, 0x5a, 0xaf, 0xcf, 0x5a, 0x2b, 0xb9, 0x50, 0xa6, 0xf5, 0x3d, 0x62, 0xce, 0xe3, 0x25, - 0x0c, 0xf7, 0x4b, 0x45, 0x97, 0x00, 0xa5, 0xe0, 0x0f, 0x34, 0x55, 0xb7, 0x2c, 0xab, 0xf3, 0xed, - 0x5a, 0x89, 0x32, 0x1d, 0xce, 0x62, 0x15, 0x4b, 0x6a, 0xc2, 0x9d, 0x2a, 0x5c, 0x2b, 0x51, 0x36, - 0x7e, 0x07, 0xa3, 0xff, 0x8a, 0x7d, 0x76, 0x29, 0xef, 0xa1, 0xdf, 0x7e, 0xc1, 0xa3, 0xd3, 0xe3, - 0xc1, 0xf9, 0x1f, 0x2a, 0x24, 0xe3, 0x45, 0xfd, 0xeb, 0xe6, 0x7a, 0xf5, 0x01, 0xec, 0xed, 0xe4, - 0x20, 0x07, 0xce, 0xa3, 0xe5, 0x8f, 0xd9, 0x97, 0x68, 0xe1, 0x9e, 0x20, 0x1b, 0x7a, 0xe4, 0x7a, - 0x11, 0xad, 0x5c, 0x0b, 0xf5, 0xe1, 0x22, 0x8c, 0x3e, 0xde, 0x7c, 0xbf, 0x26, 0x3f, 0xdd, 0x0e, - 0x1a, 0x80, 0x3d, 0x9f, 0xad, 0x56, 0xb3, 0xe5, 0x82, 0xcc, 0xdc, 0xd3, 0xf0, 0x02, 0xce, 0xaa, - 0x77, 0x08, 0x23, 0x68, 0xcd, 0x73, 0x08, 0x86, 0xfb, 0x55, 0xcf, 0xf9, 0x2f, 0x9c, 0x33, 0x75, - 0xbf, 0x4e, 0x82, 0x94, 0x3f, 0xe1, 0x9c, 0x3f, 0xd0, 0x47, 0x5c, 0x2d, 0x84, 0xcc, 0x1e, 0x71, - 0xce, 0xb1, 0x59, 0x06, 0x89, 0x77, 0x4b, 0x92, 0x9c, 0x19, 0xe9, 0xcd, 0xbf, 0x00, 0x00, 0x00, - 0xff, 0xff, 0xdc, 0xaf, 0xad, 0x8c, 0x39, 0x03, 0x00, 0x00, + // 450 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0x4f, 0x6f, 0xd3, 0x30, + 0x18, 0xc6, 0x97, 0xfe, 0x59, 0x97, 0x37, 0xfd, 0x13, 0xf9, 0x80, 0xa2, 0xa2, 0xa1, 0xb0, 0x53, + 0x4f, 0xb1, 0x54, 0xc4, 0x81, 0x1b, 0x4d, 0x3b, 0x41, 0x04, 0xaa, 0x98, 0x0b, 0x93, 0xe0, 0x32, + 0xa5, 0x89, 0x97, 0x79, 0xd3, 0xe2, 0x60, 0xbb, 0x48, 0xfd, 0xa8, 0x7c, 0x1b, 0x64, 0x27, 0x69, + 0x53, 0xda, 0xc3, 0x2e, 0x91, 0xfd, 0xbc, 0xcf, 0xf3, 0xcb, 0x2b, 0xdb, 0x2f, 0xbc, 0xba, 0xa7, + 0xb1, 0x54, 0x38, 0xe1, 0x82, 0xe2, 0x95, 0xe2, 0x82, 0x06, 0x85, 0xe0, 0x8a, 0x23, 0x30, 0x7a, + 0xa0, 0xf5, 0xab, 0xbf, 0x5d, 0xe8, 0x9a, 0x1a, 0x42, 0xd0, 0xc9, 0xe3, 0x67, 0xea, 0x59, 0xbe, + 0x35, 0xb1, 0x89, 0x59, 0x23, 0x0c, 0x1d, 0xb5, 0x2d, 0xa8, 0xd7, 0xf2, 0xad, 0xc9, 0x70, 0xfa, + 0x3a, 0xd8, 0x07, 0x83, 0x12, 0x68, 0xbe, 0xdf, 0xb7, 0x05, 0x25, 0xc6, 0x88, 0x16, 0x30, 0x90, + 0x9b, 0xb5, 0x4c, 0x04, 0x2b, 0x14, 0xe3, 0xb9, 0xf4, 0x3a, 0x7e, 0x7b, 0xe2, 0x4c, 0xdf, 0x9c, + 0x48, 0x36, 0x6c, 0xe4, 0x30, 0x84, 0x42, 0xe8, 0x0b, 0x9a, 0x32, 0x79, 0x97, 0xf0, 0xfc, 0x9e, + 0x65, 0x9e, 0xe3, 0x5b, 0x13, 0x67, 0x7a, 0x79, 0x0c, 0x21, 0xda, 0x35, 0x37, 0xa6, 0xcf, 0x67, + 0xc4, 0x11, 0xfb, 0x2d, 0xfa, 0x02, 0xa3, 0x35, 0xcb, 0x7e, 0x6f, 0xa8, 0xd8, 0xd6, 0x98, 0xbe, + 0xc1, 0xf8, 0xc7, 0x98, 0x90, 0x65, 0x37, 0xda, 0xb8, 0x23, 0x0d, 0xeb, 0x68, 0x05, 0x5b, 0x82, + 0x9b, 0xc4, 0x52, 0xc6, 0x79, 0x2a, 0xe2, 0x9a, 0x36, 0x30, 0xb4, 0xb7, 0xc7, 0xb4, 0x79, 0xed, + 0xdc, 0xe1, 0x46, 0xc9, 0xa1, 0x34, 0x7e, 0x0f, 0x4e, 0xa3, 0x75, 0x7d, 0xf4, 0x0f, 0x5c, 0xaa, + 0xfa, 0xe8, 0xf5, 0x5a, 0x6b, 0x05, 0x17, 0xca, 0x1c, 0x7d, 0x97, 0x98, 0xf5, 0x78, 0x09, 0xc3, + 0xc3, 0x56, 0xd1, 0x25, 0x40, 0x21, 0xf8, 0x23, 0x4d, 0xd4, 0x1d, 0x4b, 0xab, 0xbc, 0x5d, 0x29, + 0x51, 0xaa, 0xcb, 0x69, 0xac, 0x62, 0x49, 0x4d, 0xb9, 0x55, 0x96, 0x2b, 0x25, 0x4a, 0xc7, 0x1f, + 0x60, 0xf4, 0x5f, 0xb3, 0x2f, 0x6e, 0xe5, 0x16, 0xfa, 0xcd, 0x1b, 0x44, 0x1e, 0xf4, 0xaa, 0xdf, + 0x7a, 0x6d, 0x13, 0xad, 0xb7, 0x27, 0xdf, 0x95, 0x07, 0xbd, 0x3f, 0x54, 0x48, 0xc6, 0xf3, 0xaa, + 0xa9, 0x7a, 0x7b, 0xf5, 0x11, 0xec, 0xdd, 0x9b, 0x42, 0x0e, 0xf4, 0xa2, 0xe5, 0xed, 0xec, 0x6b, + 0xb4, 0x70, 0xcf, 0x90, 0x0d, 0x5d, 0x72, 0xbd, 0x88, 0x56, 0xae, 0x85, 0xfa, 0x70, 0x11, 0x46, + 0x9f, 0x6e, 0x7e, 0x5c, 0x93, 0x9f, 0x6e, 0x0b, 0x0d, 0xc0, 0x9e, 0xcf, 0x56, 0xab, 0xd9, 0x72, + 0x41, 0x66, 0x6e, 0x3b, 0xbc, 0x80, 0xf3, 0xf2, 0x86, 0xc2, 0x08, 0x1a, 0x2f, 0x3d, 0x04, 0xc3, + 0xfd, 0xa6, 0x27, 0xe0, 0x17, 0xce, 0x98, 0x7a, 0xd8, 0xac, 0x83, 0x84, 0x3f, 0xe3, 0x8c, 0x3f, + 0xd2, 0x27, 0x5c, 0x8e, 0x8a, 0x4c, 0x9f, 0x70, 0xc6, 0xb1, 0x19, 0x13, 0x89, 0xf7, 0xe3, 0xb3, + 0x3e, 0x37, 0xd2, 0xbb, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x56, 0xfe, 0x58, 0x14, 0x53, 0x03, + 0x00, 0x00, } diff --git a/sdk/go/protos/feast/serving/ServingService.pb.go b/sdk/go/protos/feast/serving/ServingService.pb.go index 49c730ad3c..212e8606ce 100644 --- a/sdk/go/protos/feast/serving/ServingService.pb.go +++ b/sdk/go/protos/feast/serving/ServingService.pb.go @@ -231,13 +231,13 @@ func (m *GetFeastServingInfoResponse) GetJobStagingLocation() string { return "" } -type FeatureSetRequest struct { - // Feature set name - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Feature set version - Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` - // Features that should be retrieved from this feature set - FeatureNames []string `protobuf:"bytes,3,rep,name=feature_names,json=featureNames,proto3" json:"feature_names,omitempty"` +type FeatureReference struct { + // Project name + Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` + // Feature name + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // Feature version + Version int32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` // The features will be retrieved if: // entity_timestamp - max_age <= event_timestamp <= entity_timestamp // @@ -249,53 +249,53 @@ type FeatureSetRequest struct { XXX_sizecache int32 `json:"-"` } -func (m *FeatureSetRequest) Reset() { *m = FeatureSetRequest{} } -func (m *FeatureSetRequest) String() string { return proto.CompactTextString(m) } -func (*FeatureSetRequest) ProtoMessage() {} -func (*FeatureSetRequest) Descriptor() ([]byte, []int) { +func (m *FeatureReference) Reset() { *m = FeatureReference{} } +func (m *FeatureReference) String() string { return proto.CompactTextString(m) } +func (*FeatureReference) ProtoMessage() {} +func (*FeatureReference) Descriptor() ([]byte, []int) { return fileDescriptor_0c1ba93cf29a8d9d, []int{2} } -func (m *FeatureSetRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_FeatureSetRequest.Unmarshal(m, b) +func (m *FeatureReference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeatureReference.Unmarshal(m, b) } -func (m *FeatureSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_FeatureSetRequest.Marshal(b, m, deterministic) +func (m *FeatureReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeatureReference.Marshal(b, m, deterministic) } -func (m *FeatureSetRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_FeatureSetRequest.Merge(m, src) +func (m *FeatureReference) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeatureReference.Merge(m, src) } -func (m *FeatureSetRequest) XXX_Size() int { - return xxx_messageInfo_FeatureSetRequest.Size(m) +func (m *FeatureReference) XXX_Size() int { + return xxx_messageInfo_FeatureReference.Size(m) } -func (m *FeatureSetRequest) XXX_DiscardUnknown() { - xxx_messageInfo_FeatureSetRequest.DiscardUnknown(m) +func (m *FeatureReference) XXX_DiscardUnknown() { + xxx_messageInfo_FeatureReference.DiscardUnknown(m) } -var xxx_messageInfo_FeatureSetRequest proto.InternalMessageInfo +var xxx_messageInfo_FeatureReference proto.InternalMessageInfo -func (m *FeatureSetRequest) GetName() string { +func (m *FeatureReference) GetProject() string { if m != nil { - return m.Name + return m.Project } return "" } -func (m *FeatureSetRequest) GetVersion() int32 { +func (m *FeatureReference) GetName() string { if m != nil { - return m.Version + return m.Name } - return 0 + return "" } -func (m *FeatureSetRequest) GetFeatureNames() []string { +func (m *FeatureReference) GetVersion() int32 { if m != nil { - return m.FeatureNames + return m.Version } - return nil + return 0 } -func (m *FeatureSetRequest) GetMaxAge() *duration.Duration { +func (m *FeatureReference) GetMaxAge() *duration.Duration { if m != nil { return m.MaxAge } @@ -303,8 +303,8 @@ func (m *FeatureSetRequest) GetMaxAge() *duration.Duration { } type GetOnlineFeaturesRequest struct { - // List of feature sets and their features that are being retrieved - FeatureSets []*FeatureSetRequest `protobuf:"bytes,1,rep,name=feature_sets,json=featureSets,proto3" json:"feature_sets,omitempty"` + // List of features that are being retrieved + Features []*FeatureReference `protobuf:"bytes,4,rep,name=features,proto3" json:"features,omitempty"` // List of entity rows, containing entity id and timestamp data. // Used during retrieval of feature rows and for joining feature // rows into a final dataset @@ -342,9 +342,9 @@ func (m *GetOnlineFeaturesRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetOnlineFeaturesRequest proto.InternalMessageInfo -func (m *GetOnlineFeaturesRequest) GetFeatureSets() []*FeatureSetRequest { +func (m *GetOnlineFeaturesRequest) GetFeatures() []*FeatureReference { if m != nil { - return m.FeatureSets + return m.Features } return nil } @@ -414,8 +414,8 @@ func (m *GetOnlineFeaturesRequest_EntityRow) GetFields() map[string]*types.Value } type GetBatchFeaturesRequest struct { - // List of feature sets and their features that are being retrieved. - FeatureSets []*FeatureSetRequest `protobuf:"bytes,1,rep,name=feature_sets,json=featureSets,proto3" json:"feature_sets,omitempty"` + // List of features that are being retrieved + Features []*FeatureReference `protobuf:"bytes,3,rep,name=features,proto3" json:"features,omitempty"` // Source of the entity dataset containing the timestamps and entity keys to retrieve // features for. DatasetSource *DatasetSource `protobuf:"bytes,2,opt,name=dataset_source,json=datasetSource,proto3" json:"dataset_source,omitempty"` @@ -449,9 +449,9 @@ func (m *GetBatchFeaturesRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetBatchFeaturesRequest proto.InternalMessageInfo -func (m *GetBatchFeaturesRequest) GetFeatureSets() []*FeatureSetRequest { +func (m *GetBatchFeaturesRequest) GetFeatures() []*FeatureReference { if m != nil { - return m.FeatureSets + return m.Features } return nil } @@ -870,7 +870,7 @@ func init() { proto.RegisterEnum("feast.serving.DataFormat", DataFormat_name, DataFormat_value) proto.RegisterType((*GetFeastServingInfoRequest)(nil), "feast.serving.GetFeastServingInfoRequest") proto.RegisterType((*GetFeastServingInfoResponse)(nil), "feast.serving.GetFeastServingInfoResponse") - proto.RegisterType((*FeatureSetRequest)(nil), "feast.serving.FeatureSetRequest") + proto.RegisterType((*FeatureReference)(nil), "feast.serving.FeatureReference") proto.RegisterType((*GetOnlineFeaturesRequest)(nil), "feast.serving.GetOnlineFeaturesRequest") proto.RegisterType((*GetOnlineFeaturesRequest_EntityRow)(nil), "feast.serving.GetOnlineFeaturesRequest.EntityRow") proto.RegisterMapType((map[string]*types.Value)(nil), "feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry") @@ -889,77 +889,76 @@ func init() { func init() { proto.RegisterFile("feast/serving/ServingService.proto", fileDescriptor_0c1ba93cf29a8d9d) } var fileDescriptor_0c1ba93cf29a8d9d = []byte{ - // 1105 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x4f, 0x73, 0xda, 0x46, - 0x14, 0x8f, 0xc0, 0xc6, 0xe1, 0x11, 0x63, 0x79, 0xed, 0xda, 0xb2, 0xe2, 0x24, 0x0c, 0xed, 0xd4, - 0x94, 0x83, 0x68, 0x49, 0x9b, 0x69, 0xd3, 0xe9, 0x4c, 0xc0, 0x08, 0x82, 0xc7, 0x11, 0x9e, 0x05, - 0x3b, 0x6d, 0x2f, 0x1a, 0x01, 0x0b, 0x96, 0x0d, 0x5a, 0x57, 0xbb, 0x38, 0xf1, 0xd7, 0xe8, 0xb1, - 0x87, 0x5e, 0x7a, 0xee, 0xb5, 0x9f, 0xa4, 0x9f, 0xa0, 0xb7, 0x7e, 0x83, 0x1e, 0x3b, 0x5a, 0xad, - 0x30, 0xff, 0x9c, 0xd8, 0x9d, 0x69, 0x4e, 0xda, 0x7d, 0xef, 0xf7, 0xfe, 0xee, 0x7b, 0x4f, 0x0f, - 0xb2, 0x3d, 0xe2, 0x30, 0x5e, 0x60, 0xc4, 0xbf, 0x74, 0xbd, 0x7e, 0xa1, 0x19, 0x7e, 0xc5, 0xa7, - 0x43, 0x8c, 0x0b, 0x9f, 0x72, 0x8a, 0x56, 0x05, 0xc6, 0x90, 0x18, 0xfd, 0x49, 0x9f, 0xd2, 0xfe, - 0x80, 0x14, 0x04, 0xb3, 0x3d, 0xea, 0x15, 0xb8, 0x3b, 0x24, 0x8c, 0x3b, 0xc3, 0x8b, 0x10, 0xaf, - 0x3f, 0x9e, 0x05, 0x74, 0x47, 0xbe, 0xc3, 0x5d, 0xea, 0x49, 0xfe, 0x76, 0x68, 0x93, 0x5f, 0x5d, - 0x10, 0x56, 0x38, 0x71, 0x06, 0x23, 0x69, 0x28, 0xbb, 0x0b, 0x7a, 0x8d, 0xf0, 0x6a, 0xc0, 0x95, - 0x8e, 0xd4, 0xbd, 0x1e, 0xc5, 0xe4, 0xa7, 0x11, 0x61, 0x3c, 0xfb, 0xab, 0x02, 0x0f, 0x17, 0xb2, - 0xd9, 0x05, 0xf5, 0x18, 0x41, 0x1a, 0xac, 0x5c, 0x12, 0x9f, 0xb9, 0xd4, 0xd3, 0x94, 0x8c, 0x92, - 0x4b, 0xe2, 0xe8, 0x8a, 0x9e, 0xc2, 0x52, 0x60, 0x4c, 0x8b, 0x65, 0x94, 0x5c, 0xba, 0xf8, 0xc4, - 0x98, 0x8a, 0xc7, 0x98, 0x54, 0xd8, 0xba, 0xba, 0x20, 0x58, 0x80, 0xd1, 0xe7, 0xb0, 0x79, 0x46, - 0xdb, 0x36, 0xe3, 0x4e, 0xdf, 0xf5, 0xfa, 0xf6, 0x80, 0x76, 0x44, 0x0c, 0x1a, 0x08, 0xdd, 0xe8, - 0x8c, 0xb6, 0x9b, 0x21, 0xeb, 0x50, 0x72, 0xb2, 0xbf, 0x28, 0xb0, 0x5e, 0x25, 0x0e, 0x1f, 0xf9, - 0xa4, 0x49, 0xb8, 0x74, 0x1b, 0x21, 0x58, 0xf2, 0x9c, 0x21, 0x91, 0x3e, 0x89, 0xf3, 0xa4, 0xab, - 0x81, 0x4f, 0xcb, 0xd7, 0xae, 0x7e, 0x0c, 0x41, 0xb6, 0x03, 0x15, 0x76, 0x80, 0x64, 0x5a, 0x3c, - 0x13, 0xcf, 0x25, 0xf1, 0x03, 0x49, 0xb4, 0x02, 0x1a, 0x2a, 0xc2, 0xca, 0xd0, 0x79, 0x6b, 0x3b, - 0x7d, 0xa2, 0x2d, 0x65, 0x94, 0x5c, 0xaa, 0xb8, 0x63, 0x84, 0x29, 0x37, 0xa2, 0x94, 0x1b, 0x15, - 0x99, 0x72, 0x9c, 0x18, 0x3a, 0x6f, 0x4b, 0x7d, 0x92, 0xfd, 0x3b, 0x0e, 0x5a, 0x8d, 0xf0, 0x86, - 0x37, 0x70, 0x3d, 0x22, 0xbd, 0x64, 0x91, 0x8f, 0xfb, 0x10, 0x19, 0xb0, 0x19, 0xe1, 0x4c, 0x53, - 0x32, 0xf1, 0x5c, 0xaa, 0x98, 0x99, 0x4f, 0xd4, 0x74, 0x6c, 0x38, 0xd5, 0x1b, 0x93, 0x18, 0xc2, - 0x90, 0x22, 0x1e, 0x77, 0xf9, 0x95, 0xed, 0xd3, 0x37, 0x4c, 0x8b, 0x09, 0x1d, 0x5f, 0xcc, 0xe8, - 0xb8, 0xc9, 0x05, 0xc3, 0x14, 0xa2, 0x98, 0xbe, 0xc1, 0x40, 0xa2, 0x23, 0x43, 0xdf, 0xc0, 0x0e, - 0x1d, 0xba, 0xdc, 0x16, 0x24, 0x97, 0x30, 0xdb, 0xf5, 0x6c, 0x5f, 0x3e, 0xb8, 0x16, 0xcf, 0x28, - 0xb9, 0xfb, 0x78, 0x2b, 0x00, 0x98, 0x92, 0x5f, 0xf7, 0xa2, 0x72, 0xd0, 0xff, 0x51, 0x20, 0x39, - 0x56, 0x8a, 0x4c, 0x50, 0xa5, 0x73, 0xe3, 0x6a, 0x15, 0x2f, 0x92, 0x2a, 0xea, 0x73, 0xb9, 0x6b, - 0x45, 0x08, 0xbc, 0x16, 0xca, 0x8c, 0x09, 0xe8, 0x18, 0x12, 0x3d, 0x97, 0x0c, 0xba, 0x51, 0x78, - 0xdf, 0xdd, 0x39, 0x3c, 0xa3, 0x2a, 0xe4, 0x4d, 0x8f, 0xfb, 0x57, 0x58, 0x2a, 0xd3, 0x5f, 0x41, - 0x6a, 0x82, 0x8c, 0x54, 0x88, 0x9f, 0x93, 0x2b, 0x59, 0x31, 0xc1, 0x11, 0xe5, 0x60, 0xf9, 0x32, - 0x68, 0x14, 0x51, 0x2e, 0xa9, 0x22, 0x92, 0x66, 0x45, 0x0b, 0x19, 0xa2, 0x85, 0x70, 0x08, 0x78, - 0x1e, 0xfb, 0x5a, 0xc9, 0xfe, 0xa6, 0xc0, 0x76, 0x8d, 0xf0, 0xb2, 0xc3, 0x3b, 0xa7, 0xff, 0xcb, - 0x53, 0xef, 0x43, 0xba, 0xeb, 0x70, 0x87, 0x11, 0x6e, 0x33, 0x3a, 0xf2, 0x3b, 0x91, 0x5f, 0xbb, - 0x33, 0x6a, 0x2a, 0x21, 0xa8, 0x29, 0x30, 0x78, 0xb5, 0x3b, 0x79, 0xcd, 0xfe, 0x1e, 0x83, 0x9d, - 0x05, 0xf9, 0x92, 0xdd, 0xfc, 0x1a, 0x1e, 0x88, 0xe4, 0xd8, 0x22, 0xac, 0xc8, 0xcf, 0x2f, 0xdf, - 0x9f, 0xef, 0x50, 0x3e, 0x4c, 0xb3, 0xc8, 0x0c, 0xc3, 0xa9, 0xde, 0xf5, 0x45, 0xff, 0x43, 0x91, - 0xc9, 0x0e, 0xef, 0xe8, 0xfb, 0xf1, 0x93, 0x86, 0x26, 0x5e, 0xfc, 0x17, 0x13, 0x1f, 0xe2, 0x55, - 0x5f, 0x88, 0x06, 0x9e, 0x79, 0x54, 0x99, 0xad, 0x4f, 0x20, 0x7e, 0x46, 0xdb, 0xb2, 0xa2, 0xd1, - 0x4c, 0x04, 0x07, 0xb4, 0x8d, 0x03, 0x76, 0xf6, 0x2b, 0x58, 0xad, 0x11, 0x1e, 0x5c, 0x65, 0x31, - 0xdc, 0x4e, 0xec, 0x19, 0xa4, 0x23, 0xb1, 0x3b, 0x99, 0xfb, 0x4b, 0x81, 0xf8, 0x01, 0x6d, 0xa3, - 0x34, 0xc4, 0xdc, 0xae, 0x8c, 0x3b, 0xe6, 0x76, 0x51, 0x7e, 0x6a, 0x1c, 0x6f, 0xcd, 0x8b, 0x4f, - 0x4d, 0xe1, 0x04, 0xe3, 0x0e, 0x1f, 0x31, 0xd1, 0xed, 0xe9, 0xa2, 0x36, 0x8f, 0x6e, 0x0a, 0x3e, - 0x96, 0x38, 0xb4, 0x09, 0xcb, 0xc4, 0xf7, 0xa9, 0x2f, 0x46, 0x63, 0x12, 0x87, 0x17, 0xf4, 0x10, - 0x92, 0x3d, 0x77, 0x40, 0xec, 0x91, 0xef, 0x32, 0x6d, 0x59, 0xcc, 0xd4, 0xfb, 0x01, 0xe1, 0xd8, - 0x77, 0x19, 0x7a, 0x0e, 0xa9, 0xa0, 0x34, 0xed, 0x1e, 0xf5, 0x87, 0x0e, 0xd7, 0x12, 0xc2, 0xd2, - 0xce, 0x82, 0x5a, 0xae, 0x0a, 0x00, 0x86, 0xee, 0xf8, 0x9c, 0xfd, 0x53, 0x81, 0xd5, 0xa9, 0x32, - 0x47, 0x07, 0x90, 0x12, 0xa6, 0x64, 0x67, 0x84, 0x49, 0xda, 0x7b, 0x57, 0x67, 0x18, 0x55, 0x77, - 0x40, 0xc2, 0xe3, 0xcb, 0x7b, 0x18, 0x7a, 0xe3, 0x9b, 0x4e, 0x00, 0xae, 0x79, 0xd3, 0x41, 0x28, - 0xef, 0x0e, 0x22, 0x76, 0x87, 0x20, 0xca, 0xea, 0x6c, 0x3f, 0xe7, 0x29, 0xa8, 0xb3, 0xff, 0x45, - 0xf4, 0x18, 0xf4, 0xaa, 0x59, 0x6a, 0xb6, 0xec, 0xa6, 0x89, 0x4f, 0xea, 0x56, 0xcd, 0x6e, 0xfd, - 0x70, 0x64, 0xda, 0x75, 0xeb, 0xa4, 0x74, 0x58, 0xaf, 0xa8, 0xf7, 0xd0, 0x23, 0xd8, 0x59, 0xc0, - 0x6f, 0x58, 0x87, 0x75, 0xcb, 0x54, 0x15, 0xb4, 0x0b, 0xda, 0x02, 0x76, 0xb9, 0xd4, 0xda, 0x7f, - 0xa9, 0xc6, 0xf2, 0xcf, 0x60, 0x45, 0xbe, 0x3c, 0xda, 0x04, 0xf5, 0xa0, 0x51, 0x9e, 0xd5, 0xfe, - 0x11, 0xac, 0x8f, 0xa9, 0x95, 0xc6, 0x6b, 0xeb, 0xb0, 0x51, 0xaa, 0xa8, 0x4a, 0xfe, 0x14, 0x92, - 0xe3, 0x1a, 0x40, 0x5b, 0x80, 0x02, 0x4c, 0xb3, 0x55, 0x6a, 0x1d, 0x37, 0x27, 0x64, 0xa7, 0xe9, - 0x47, 0xa6, 0x55, 0xa9, 0x5b, 0x35, 0x55, 0x99, 0xa1, 0xe3, 0x63, 0xcb, 0x0a, 0xe8, 0x31, 0xb4, - 0x01, 0x6b, 0x13, 0xf4, 0x4a, 0xc3, 0x32, 0xd5, 0x78, 0xfe, 0x5b, 0x80, 0xeb, 0xf4, 0xa1, 0x6d, - 0xd8, 0xa8, 0x94, 0x5a, 0x25, 0xbb, 0xda, 0xc0, 0xaf, 0x4a, 0xad, 0x09, 0x5b, 0x9b, 0xa0, 0x4e, - 0x32, 0x4a, 0x27, 0xb8, 0xa1, 0x2a, 0xc5, 0x9f, 0xe3, 0x90, 0x9e, 0x5e, 0xae, 0xd0, 0x00, 0x36, - 0x16, 0xac, 0x33, 0xe8, 0xb3, 0xf9, 0xf9, 0x73, 0xc3, 0x46, 0xa4, 0xe7, 0x6f, 0x03, 0x95, 0x2d, - 0xdb, 0x83, 0xf5, 0xb9, 0x49, 0x86, 0xf6, 0x6e, 0xf9, 0xfb, 0xd2, 0x73, 0xb7, 0x1d, 0x8a, 0xa8, - 0x03, 0xea, 0xec, 0x94, 0x42, 0x9f, 0xce, 0x4b, 0x2f, 0xfa, 0x37, 0xe9, 0x7b, 0xef, 0xc5, 0x49, - 0x23, 0x26, 0x24, 0xc2, 0x89, 0x84, 0x76, 0xe7, 0x45, 0xae, 0xe7, 0x9b, 0xfe, 0xe8, 0x06, 0x6e, - 0xa8, 0xa6, 0xdc, 0x82, 0xe9, 0xd5, 0xb6, 0xbc, 0x26, 0x33, 0x57, 0x3a, 0xaa, 0x1f, 0x05, 0xdb, - 0xc0, 0x8f, 0xc5, 0xbe, 0xcb, 0x4f, 0x47, 0x6d, 0xa3, 0x43, 0x87, 0x85, 0x3e, 0x3d, 0x23, 0xe7, - 0x05, 0xb9, 0x2f, 0x77, 0xcf, 0x0b, 0x7d, 0x1a, 0x6e, 0xb8, 0xac, 0x30, 0xb5, 0x43, 0xb7, 0x13, - 0x82, 0xfa, 0xf4, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xa8, 0xc6, 0xa7, 0xc9, 0x5b, 0x0b, 0x00, - 0x00, + // 1101 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x51, 0x73, 0xda, 0xc6, + 0x13, 0x8f, 0xc0, 0xc6, 0x66, 0xf9, 0x1b, 0x2b, 0x67, 0xff, 0x6d, 0x59, 0x71, 0x12, 0x86, 0xe9, + 0xd4, 0x94, 0x07, 0xd1, 0x92, 0x36, 0xd3, 0x26, 0xd3, 0x99, 0x40, 0x10, 0x04, 0x8f, 0x03, 0x9e, + 0x03, 0x3b, 0x6d, 0x5f, 0x34, 0x02, 0x4e, 0x58, 0x36, 0xe8, 0xa8, 0xee, 0x70, 0xe2, 0x2f, 0xd1, + 0x87, 0xbe, 0x76, 0xa6, 0xdf, 0xa0, 0xaf, 0xfd, 0x24, 0xfd, 0x02, 0xed, 0xa7, 0xe8, 0x63, 0x47, + 0xa7, 0x03, 0x23, 0xc0, 0x8e, 0xdd, 0x87, 0x3e, 0xe9, 0x6e, 0xf7, 0xb7, 0xb7, 0xb7, 0xbf, 0xdb, + 0x5d, 0x2d, 0x64, 0x1d, 0x62, 0x33, 0x5e, 0x60, 0xc4, 0xbf, 0x74, 0xbd, 0x7e, 0xa1, 0x15, 0x7e, + 0xc5, 0xa7, 0x4b, 0x8c, 0x91, 0x4f, 0x39, 0x45, 0x1b, 0x02, 0x63, 0x48, 0x8c, 0xfe, 0xb4, 0x4f, + 0x69, 0x7f, 0x40, 0x0a, 0x42, 0xd9, 0x19, 0x3b, 0x05, 0xee, 0x0e, 0x09, 0xe3, 0xf6, 0x70, 0x14, + 0xe2, 0xf5, 0x27, 0xf3, 0x80, 0xde, 0xd8, 0xb7, 0xb9, 0x4b, 0x3d, 0xa9, 0xdf, 0x0d, 0x7d, 0xf2, + 0xab, 0x11, 0x61, 0x85, 0x53, 0x7b, 0x30, 0x96, 0x8e, 0xb2, 0xfb, 0xa0, 0xd7, 0x08, 0xaf, 0x06, + 0x5a, 0x79, 0x91, 0xba, 0xe7, 0x50, 0x4c, 0x7e, 0x1c, 0x13, 0xc6, 0xb3, 0xbf, 0x2a, 0xf0, 0x68, + 0xa9, 0x9a, 0x8d, 0xa8, 0xc7, 0x08, 0xd2, 0x60, 0xed, 0x92, 0xf8, 0xcc, 0xa5, 0x9e, 0xa6, 0x64, + 0x94, 0x5c, 0x12, 0x4f, 0xb6, 0xe8, 0x19, 0xac, 0x04, 0xce, 0xb4, 0x58, 0x46, 0xc9, 0xa5, 0x8b, + 0x4f, 0x8d, 0x48, 0x3c, 0xc6, 0xec, 0x81, 0xed, 0xab, 0x11, 0xc1, 0x02, 0x8c, 0x3e, 0x87, 0xed, + 0x73, 0xda, 0xb1, 0x18, 0xb7, 0xfb, 0xae, 0xd7, 0xb7, 0x06, 0xb4, 0x2b, 0x62, 0xd0, 0x40, 0x9c, + 0x8d, 0xce, 0x69, 0xa7, 0x15, 0xaa, 0x8e, 0xa4, 0x26, 0xfb, 0x93, 0x02, 0x6a, 0x95, 0xd8, 0x7c, + 0xec, 0x13, 0x4c, 0x1c, 0xe2, 0x13, 0xaf, 0x2b, 0x6e, 0x35, 0xf2, 0xe9, 0x39, 0xe9, 0xf2, 0xc9, + 0xad, 0xe4, 0x16, 0x21, 0x58, 0xf1, 0xec, 0x61, 0x78, 0xab, 0x24, 0x16, 0xeb, 0xd9, 0x18, 0xe2, + 0x19, 0x25, 0xb7, 0x7a, 0x1d, 0x43, 0x11, 0xd6, 0x86, 0xf6, 0x07, 0xcb, 0xee, 0x13, 0x6d, 0x25, + 0xa3, 0xe4, 0x52, 0xc5, 0x3d, 0x23, 0xa4, 0xd9, 0x98, 0xd0, 0x6c, 0x54, 0x24, 0xcd, 0x38, 0x31, + 0xb4, 0x3f, 0x94, 0xfa, 0x24, 0xfb, 0x67, 0x1c, 0xb4, 0x1a, 0xe1, 0x4d, 0x6f, 0xe0, 0x7a, 0x44, + 0xde, 0x8c, 0x49, 0x3a, 0xd1, 0x4b, 0x58, 0x77, 0xa4, 0x48, 0x5b, 0xc9, 0xc4, 0x73, 0xa9, 0x65, + 0xc4, 0x44, 0x62, 0xc1, 0x53, 0x03, 0x84, 0x21, 0x45, 0x3c, 0xee, 0xf2, 0x2b, 0xcb, 0xa7, 0xef, + 0x99, 0x16, 0x13, 0xf6, 0x5f, 0xcc, 0xd9, 0xdf, 0xe4, 0xda, 0x30, 0x85, 0x29, 0xa6, 0xef, 0x31, + 0x90, 0xc9, 0x92, 0xa1, 0x6f, 0x60, 0x8f, 0x0e, 0x5d, 0x6e, 0x09, 0x91, 0x4b, 0x98, 0xe5, 0x7a, + 0x96, 0x2f, 0x1f, 0x57, 0xb0, 0xb1, 0x8e, 0x77, 0x02, 0x80, 0x29, 0xf5, 0x75, 0x6f, 0xf2, 0xf4, + 0xfa, 0xdf, 0x0a, 0x24, 0xa7, 0x87, 0x22, 0x13, 0x54, 0x79, 0xb9, 0x69, 0x66, 0x0a, 0xee, 0x53, + 0x45, 0x7d, 0x81, 0xb3, 0xf6, 0x04, 0x81, 0x37, 0x43, 0x9b, 0xa9, 0x00, 0x9d, 0x40, 0xc2, 0x71, + 0xc9, 0xa0, 0x37, 0x09, 0xef, 0xdb, 0x7b, 0x87, 0x67, 0x54, 0x85, 0xbd, 0xe9, 0x71, 0xff, 0x0a, + 0xcb, 0xc3, 0xf4, 0xb7, 0x90, 0x9a, 0x11, 0x23, 0x15, 0xe2, 0x17, 0xe4, 0x4a, 0xe6, 0x46, 0xb0, + 0x44, 0x39, 0x58, 0xbd, 0x0c, 0x8a, 0x42, 0x24, 0x46, 0xaa, 0x88, 0xa4, 0x5b, 0x51, 0x2e, 0x86, + 0x28, 0x17, 0x1c, 0x02, 0x5e, 0xc4, 0xbe, 0x56, 0xb2, 0xbf, 0x28, 0xb0, 0x5b, 0x23, 0xbc, 0x6c, + 0xf3, 0xee, 0xd9, 0x6d, 0x4f, 0x1c, 0xbf, 0xef, 0x13, 0xbf, 0x86, 0x74, 0xcf, 0xe6, 0x36, 0x23, + 0xdc, 0x62, 0x74, 0xec, 0x77, 0x27, 0xf7, 0xd9, 0x9f, 0x3b, 0xa2, 0x12, 0x82, 0x5a, 0x02, 0x83, + 0x37, 0x7a, 0xb3, 0xdb, 0xec, 0x6f, 0x31, 0xd8, 0x5b, 0xc2, 0x93, 0xac, 0xd8, 0x77, 0xf0, 0x3f, + 0x41, 0x8a, 0x25, 0xc2, 0x61, 0x9a, 0x22, 0xee, 0xf8, 0xe5, 0xc7, 0x79, 0x0e, 0xed, 0x43, 0x7a, + 0x05, 0x23, 0x0c, 0xa7, 0x9c, 0xeb, 0x8d, 0xfe, 0xbb, 0x22, 0x49, 0x0e, 0xf7, 0xe8, 0xbb, 0xe9, + 0x53, 0x86, 0x2e, 0x5e, 0xfd, 0x1b, 0x17, 0xff, 0xc5, 0x6b, 0xbe, 0x12, 0x05, 0x3b, 0xf7, 0x98, + 0x92, 0xad, 0x4f, 0x20, 0x7e, 0x4e, 0x3b, 0x32, 0x93, 0xd1, 0x5c, 0x04, 0x87, 0xb4, 0x83, 0x03, + 0x75, 0xf6, 0x2b, 0xd8, 0xa8, 0x11, 0x1e, 0x6c, 0x65, 0x12, 0xdc, 0xcd, 0xec, 0x39, 0xa4, 0x27, + 0x66, 0xf7, 0x72, 0xf7, 0x97, 0x02, 0xf1, 0x43, 0xda, 0x41, 0x69, 0x88, 0xb9, 0x3d, 0x19, 0x77, + 0xcc, 0xed, 0xa1, 0x7c, 0xa4, 0xe5, 0xee, 0x2c, 0x9a, 0x47, 0x3a, 0x6d, 0x82, 0x71, 0x9b, 0x8f, + 0x99, 0xa8, 0xf2, 0x74, 0x51, 0x5b, 0x44, 0xb7, 0x84, 0x1e, 0x4b, 0x1c, 0xda, 0x86, 0x55, 0xe2, + 0xfb, 0xd4, 0x17, 0xad, 0x30, 0x89, 0xc3, 0x0d, 0x7a, 0x04, 0x49, 0xc7, 0x1d, 0x10, 0x6b, 0xec, + 0xbb, 0x4c, 0x5b, 0xcd, 0xc4, 0x73, 0x49, 0xbc, 0x1e, 0x08, 0x4e, 0x7c, 0x97, 0xa1, 0x17, 0x90, + 0x0a, 0x52, 0xd3, 0x72, 0xa8, 0x3f, 0xb4, 0xb9, 0x96, 0x10, 0x9e, 0xf6, 0x96, 0xe4, 0x72, 0x55, + 0x00, 0x30, 0xf4, 0xa6, 0xeb, 0xec, 0x1f, 0x0a, 0x6c, 0x44, 0xd2, 0x1c, 0x1d, 0x42, 0x4a, 0xb8, + 0x92, 0x95, 0x11, 0x92, 0x74, 0x70, 0x5b, 0x65, 0x18, 0x55, 0x77, 0x40, 0xc2, 0xe5, 0x9b, 0x07, + 0x18, 0x9c, 0xe9, 0x4e, 0x27, 0x00, 0xd7, 0xba, 0x68, 0x10, 0xca, 0xed, 0x41, 0xc4, 0xee, 0x11, + 0x44, 0x59, 0x9d, 0xaf, 0xe7, 0x3c, 0x15, 0xbf, 0xab, 0xc8, 0xbf, 0x0f, 0x3d, 0x01, 0xbd, 0x6a, + 0x96, 0x5a, 0x6d, 0xab, 0x65, 0xe2, 0xd3, 0x7a, 0xa3, 0x66, 0xb5, 0xbf, 0x3f, 0x36, 0xad, 0x7a, + 0xe3, 0xb4, 0x74, 0x54, 0xaf, 0xa8, 0x0f, 0xd0, 0x63, 0xd8, 0x5b, 0xa2, 0x6f, 0x36, 0x8e, 0xea, + 0x0d, 0x53, 0x55, 0xd0, 0x3e, 0x68, 0x4b, 0xd4, 0xe5, 0x52, 0xfb, 0xf5, 0x1b, 0x35, 0x96, 0x7f, + 0x0e, 0x6b, 0xf2, 0xe5, 0xd1, 0x36, 0xa8, 0x87, 0xcd, 0xf2, 0xfc, 0xe9, 0xff, 0x87, 0x87, 0x53, + 0x69, 0xa5, 0xf9, 0xae, 0x71, 0xd4, 0x2c, 0x55, 0x54, 0x25, 0x7f, 0x06, 0xc9, 0x69, 0x0e, 0xa0, + 0x1d, 0x40, 0x01, 0xa6, 0xd5, 0x2e, 0xb5, 0x4f, 0x5a, 0x33, 0xb6, 0x51, 0xf9, 0xb1, 0xd9, 0xa8, + 0xd4, 0x1b, 0x35, 0x55, 0x99, 0x93, 0xe3, 0x93, 0x46, 0x23, 0x90, 0xc7, 0xd0, 0x16, 0x6c, 0xce, + 0xc8, 0x2b, 0xcd, 0x86, 0xa9, 0xc6, 0xf3, 0x2f, 0x01, 0xae, 0xe9, 0x43, 0xbb, 0xb0, 0x55, 0x29, + 0xb5, 0x4b, 0x56, 0xb5, 0x89, 0xdf, 0x96, 0xda, 0x33, 0xbe, 0xb6, 0x41, 0x9d, 0x55, 0x94, 0x4e, + 0x71, 0x53, 0x55, 0x8a, 0x3f, 0xc7, 0x21, 0x1d, 0x1d, 0xa0, 0xd0, 0x00, 0xb6, 0x96, 0x8c, 0x2c, + 0xe8, 0xb3, 0xc5, 0xfe, 0x73, 0xc3, 0xd4, 0xa3, 0xe7, 0xef, 0x02, 0x95, 0x25, 0xeb, 0xc0, 0xc3, + 0x85, 0x4e, 0x86, 0x0e, 0xee, 0xf8, 0xdb, 0xd2, 0x73, 0x77, 0x6d, 0x8a, 0xa8, 0x0b, 0xea, 0x7c, + 0x97, 0x42, 0x9f, 0x2e, 0x5a, 0x2f, 0xfb, 0x27, 0xe9, 0x07, 0x1f, 0xc5, 0x49, 0x27, 0x26, 0x24, + 0xc2, 0x8e, 0x84, 0xf6, 0x17, 0x4d, 0xae, 0xfb, 0x9b, 0xfe, 0xf8, 0x06, 0x6d, 0x78, 0x4c, 0xb9, + 0x0d, 0xd1, 0xf1, 0xb5, 0xbc, 0x29, 0x99, 0x2b, 0x1d, 0xd7, 0x8f, 0x83, 0x29, 0xe0, 0x87, 0x62, + 0xdf, 0xe5, 0x67, 0xe3, 0x8e, 0xd1, 0xa5, 0xc3, 0x42, 0x9f, 0x9e, 0x93, 0x8b, 0x82, 0x9c, 0x89, + 0x7b, 0x17, 0x85, 0x3e, 0x0d, 0xa7, 0x58, 0x56, 0x88, 0xcc, 0xc9, 0x9d, 0x84, 0x90, 0x3e, 0xfb, + 0x27, 0x00, 0x00, 0xff, 0xff, 0x36, 0xb8, 0x08, 0x14, 0x3f, 0x0b, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. diff --git a/sdk/go/request.go b/sdk/go/request.go index 5ffacea2e8..9e97dffb72 100644 --- a/sdk/go/request.go +++ b/sdk/go/request.go @@ -8,23 +8,30 @@ import ( ) var ( - ErrInvalidFeatureName = "invalid feature name %s provided, feature names must be in the format featureSet:version:featureName" + ErrInvalidFeatureName = "invalid feature ids %s provided, feature names must be in the format /:" ) // OnlineFeaturesRequest wrapper on feast.serving.GetOnlineFeaturesRequest. type OnlineFeaturesRequest struct { - - // Features is the list of features to obtain from Feast. Each feature must be given by its fully qualified ID, - // in the format featureSet:version:featureName. - Features []string + // Features is the list of features to obtain from Feast. Each feature can be given as + // + // : + // / + // /: + // The only required components are the feature name and project. + Features []string // Entities is the list of entity rows to retrieve features on. Each row is a map of entity name to entity value. - Entities []Row + Entities []Row + + // Project is the default project to use when looking up features. This is only used when a project is not found + // within the feature id. + Project string } // Builds the feast-specified request payload from the wrapper. func (r OnlineFeaturesRequest) buildRequest() (*serving.GetOnlineFeaturesRequest, error) { - featureSets, err := buildFeatureSets(r.Features) + features, err := buildFeatures(r.Features, r.Project) if err != nil { return nil, err } @@ -33,55 +40,62 @@ func (r OnlineFeaturesRequest) buildRequest() (*serving.GetOnlineFeaturesRequest for i := range r.Entities { entityRows[i] = &serving.GetOnlineFeaturesRequest_EntityRow{ - Fields: r.Entities[i], + Fields: r.Entities[i], } } return &serving.GetOnlineFeaturesRequest{ - FeatureSets: featureSets, - EntityRows: entityRows, + Features: features, + EntityRows: entityRows, }, nil } -// buildFeatureSets create a slice of FeatureSetRequest object from -// a slice of "feature_set:version:feature_name" string. -// -// It returns an error when "feature_set:version:feature_name" string -// has an invalid format. -func buildFeatureSets(features []string) ([]*serving.FeatureSetRequest, error) { - var requests []*serving.FeatureSetRequest - - // Map of "feature_set_name:version" to "FeatureSetRequest" pointer - // to reference existing FeatureSetRequest, if any. - fsNameVersionToRequest := make(map[string]*serving.FeatureSetRequest) - - for _, feature := range features { - splits := strings.Split(feature, ":") - if len(splits) != 3 { - return nil, fmt.Errorf(ErrInvalidFeatureName, feature) - } +// buildFeatures create a slice of FeatureReferences from a slice of "/:" +// It returns an error when the format is invalid +func buildFeatures(featureReferences []string, defaultProject string) ([]*serving.FeatureReference, error) { + var features []*serving.FeatureReference - featureSetName, featureSetVersionString, featureName := splits[0], splits[1], splits[2] - featureSetVersion, err := strconv.Atoi(featureSetVersionString) - if err != nil { - return nil, fmt.Errorf(ErrInvalidFeatureName, feature) + for _, featureRef := range featureReferences { + var project string + var name string + var version int + var featureSplit []string + + projectSplit := strings.Split(featureRef, "/") + + if len(projectSplit) == 2 { + project = projectSplit[0] + featureSplit = strings.Split(projectSplit[1], ":") + } else if len(projectSplit) == 1 { + project = defaultProject + featureSplit = strings.Split(projectSplit[0], ":") + } else { + return nil, fmt.Errorf(ErrInvalidFeatureName, featureRef) } - fsNameVersion := featureSetName + ":" + featureSetVersionString - if request, ok := fsNameVersionToRequest[fsNameVersion]; !ok { - request = &serving.FeatureSetRequest{ - Name: featureSetName, - Version: int32(featureSetVersion), - FeatureNames: []string{featureName}, + if len(featureSplit) == 2 { + name = featureSplit[0] + v, err := strconv.Atoi(featureSplit[1]) + if err != nil { + return nil, fmt.Errorf(ErrInvalidFeatureName, featureRef) } - fsNameVersionToRequest[fsNameVersion] = request - // Adding FeatureSetRequest in this step ensures the order of - // FeatureSetRequest in the slice follows the order of feature sets - // in the "features" argument in buildFeatureSets method. - requests = append(requests, request) + version = v + } else if len(featureSplit) == 1 { + name = featureSplit[0] } else { - request.FeatureNames = append(request.FeatureNames, featureName) + return nil, fmt.Errorf(ErrInvalidFeatureName, featureRef) } + + + if project == "" || name == "" || version < 0 { + return nil, fmt.Errorf(ErrInvalidFeatureName, featureRef) + } + + features = append(features, &serving.FeatureReference{ + Name: name, + Version: int32(version), + Project: project, + }) } - return requests, nil -} \ No newline at end of file + return features, nil +} diff --git a/sdk/go/request_test.go b/sdk/go/request_test.go index 3da4fd465b..2e403f0bd3 100644 --- a/sdk/go/request_test.go +++ b/sdk/go/request_test.go @@ -20,24 +20,40 @@ func TestGetOnlineFeaturesRequest(t *testing.T) { { name: "valid", req: OnlineFeaturesRequest{ - Features: []string{"fs:1:feature1", "fs:1:feature2", "fs:2:feature1"}, + Features: []string{"my_project_1/feature1:1", "my_project_2/feature1:1", "my_project_4/feature3", "feature2:2", "feature2"}, Entities: []Row{ {"entity1": Int64Val(1), "entity2": StrVal("bob")}, {"entity1": Int64Val(1), "entity2": StrVal("annie")}, {"entity1": Int64Val(1), "entity2": StrVal("jane")}, }, + Project: "my_project_3", }, want: &serving.GetOnlineFeaturesRequest{ - FeatureSets: []*serving.FeatureSetRequest{ + Features: []*serving.FeatureReference{ { - Name: "fs", - Version: 1, - FeatureNames: []string{"feature1", "feature2"}, + Project: "my_project_1", + Name: "feature1", + Version: 1, }, { - Name: "fs", - Version: 2, - FeatureNames: []string{"feature1"}, + Project: "my_project_2", + Name: "feature1", + Version: 1, + }, + { + Project: "my_project_4", + Name: "feature3", + Version: 0, + }, + { + Project: "my_project_3", + Name: "feature2", + Version: 2, + }, + { + Project: "my_project_3", + Name: "feature2", + Version: 0, }, }, EntityRows: []*serving.GetOnlineFeaturesRequest_EntityRow{ @@ -63,38 +79,72 @@ func TestGetOnlineFeaturesRequest(t *testing.T) { OmitEntitiesInResponse: false, }, wantErr: false, - err: nil, + err: nil, + }, + { + name: "valid_project_in_name", + req: OnlineFeaturesRequest{ + Features: []string{"project/feature1"}, + Entities: []Row{}, + }, + want: &serving.GetOnlineFeaturesRequest{ + Features: []*serving.FeatureReference{ + { + Project: "project", + Name: "feature1", + Version: 0, + }, + }, + EntityRows: []*serving.GetOnlineFeaturesRequest_EntityRow{ + }, + OmitEntitiesInResponse: false, + }, + wantErr: false, + err: nil, + }, + { + name: "no_project", + req: OnlineFeaturesRequest{ + Features: []string{"feature1"}, + Entities: []Row{}, + }, + wantErr: true, + err: fmt.Errorf(ErrInvalidFeatureName, "feature1"), }, { name: "invalid_feature_name/wrong_format", req: OnlineFeaturesRequest{ - Features: []string{"fs1:feature1"}, - Entities: []Row{}, + Features: []string{"fs1:3:feature1"}, + Entities: []Row{}, + Project: "my_project", }, wantErr: true, - err: fmt.Errorf(ErrInvalidFeatureName, "fs1:feature1"), + err: fmt.Errorf(ErrInvalidFeatureName, "fs1:3:feature1"), }, { name: "invalid_feature_name/invalid_version", req: OnlineFeaturesRequest{ - Features: []string{"fs:a:feature1"}, - Entities: []Row{}, + Features: []string{"project/a:feature1"}, + Entities: []Row{}, }, wantErr: true, - err: fmt.Errorf(ErrInvalidFeatureName, "fs:a:feature1"), + err: fmt.Errorf(ErrInvalidFeatureName, "project/a:feature1"), }, } for _, tc := range tt { t.Run(tc.name, func(t *testing.T) { got, err := tc.req.buildRequest() + if (err != nil) != tc.wantErr { t.Errorf("error = %v, wantErr %v", err, tc.wantErr) return } + if tc.wantErr && err.Error() != tc.err.Error() { t.Errorf("error = %v, expected err = %v", err, tc.err) return } + if !cmp.Equal(got, tc.want) { m := json.Marshaler{} gotJson, _ := m.MarshalToString(got) diff --git a/sdk/go/response_test.go b/sdk/go/response_test.go index 9975457c14..882c1695d5 100644 --- a/sdk/go/response_test.go +++ b/sdk/go/response_test.go @@ -13,14 +13,14 @@ RawResponse: &serving.GetOnlineFeaturesResponse{ FieldValues: []*serving.GetOnlineFeaturesResponse_FieldValues{ { Fields: map[string]*types.Value{ - "fs:1:feature1": Int64Val(1), - "fs:1:feature2": &types.Value{}, + "project1/feature1": Int64Val(1), + "project1/feature2": &types.Value{}, }, }, { Fields: map[string]*types.Value{ - "fs:1:feature1": Int64Val(2), - "fs:1:feature2": Int64Val(2), + "project1/feature1": Int64Val(2), + "project1/feature2": Int64Val(2), }, }, }, @@ -30,8 +30,8 @@ RawResponse: &serving.GetOnlineFeaturesResponse{ func TestOnlineFeaturesResponseToRow(t *testing.T) { actual := response.Rows() expected := []Row{ - {"fs:1:feature1": Int64Val(1), "fs:1:feature2": &types.Value{}}, - {"fs:1:feature1": Int64Val(2), "fs:1:feature2": Int64Val(2)}, + {"project1/feature1": Int64Val(1), "project1/feature2": &types.Value{}}, + {"project1/feature1": Int64Val(2), "project1/feature2": Int64Val(2)}, } if !cmp.Equal(actual, expected) { t.Errorf("expected: %v, got: %v", expected, actual) @@ -53,7 +53,7 @@ func TestOnlineFeaturesResponseToInt64Array(t *testing.T) { { name: "valid", args: args{ - order: []string{"fs:1:feature2", "fs:1:feature1"}, + order: []string{"project1/feature2", "project1/feature1" }, fillNa: []int64{-1, -1}, }, want: [][]int64{{-1, 1}, {2, 2}}, @@ -72,12 +72,12 @@ func TestOnlineFeaturesResponseToInt64Array(t *testing.T) { { name: "length mismatch", args: args{ - order: []string{"fs:1:feature2", "fs:1:feature3"}, + order: []string{"project1/feature2", "project1/feature3" }, fillNa: []int64{-1, -1}, }, want: nil, wantErr: true, - err: fmt.Errorf(ErrFeatureNotFound, "fs:1:feature3"), + err: fmt.Errorf(ErrFeatureNotFound, "project1/feature3"), }, } for _, tc := range tt { diff --git a/sdk/java/src/main/java/com/gojek/feast/FeastClient.java b/sdk/java/src/main/java/com/gojek/feast/FeastClient.java index 91ddd2a442..91ba572efc 100644 --- a/sdk/java/src/main/java/com/gojek/feast/FeastClient.java +++ b/sdk/java/src/main/java/com/gojek/feast/FeastClient.java @@ -16,7 +16,7 @@ */ package com.gojek.feast; -import feast.serving.ServingAPIProto.FeatureSetRequest; +import feast.serving.ServingAPIProto.FeatureReference; import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; @@ -59,42 +59,46 @@ public GetFeastServingInfoResponse getFeastServingInfo() { /** * Get online features from Feast. * - *

See {@link #getOnlineFeatures(List, List, boolean)} + *

See {@link #getOnlineFeatures(List, List, str)} * - * @param featureIds list of feature id to retrieve, feature id follows this format - * [feature_set_name]:[version]:[feature_name] + * @param features list of string feature references to retrieve, feature reference follows this format + * [project]/[name]:[version] * @param rows list of {@link Row} to select the entities to retrieve the features for + * @param defaultProject {@link String} Default project to find features in if not provided in + * feature reference. * @return list of {@link Row} containing features */ - public List getOnlineFeatures(List featureIds, List rows) { - return getOnlineFeatures(featureIds, rows, false); + public List getOnlineFeatures(List features, List rows, String defaultProject) { + return getOnlineFeatures(features, rows, defaultProject, false); } /** * Get online features from Feast. * - *

Example of retrieving online features for driver feature set, version 1, with features - * driver_id and driver_name + *

Example of retrieving online features for the driver project, with features + * driver_id and driver_name, both version 1 * *

{@code
    * FeastClient client = FeastClient.create("localhost", 6566);
-   * List requestedFeatureIds = Arrays.asList("driver:1:driver_id", "driver:1:driver_name");
+   * List requestedFeatureIds = Arrays.asList("driver/driver_id:1", "driver/driver_name:1");
    * List requestedRows =
    *         Arrays.asList(Row.create().set("driver_id", 123), Row.create().set("driver_id", 456));
    * List retrievedFeatures = client.getOnlineFeatures(requestedFeatureIds, requestedRows);
    * retrievedFeatures.forEach(System.out::println);
    * }
* - * @param featureIds list of feature id to retrieve, feature id follows this format - * [feature_set_name]:[version]:[feature_name] + * @param featureRefStrings list of feature refs to retrieve, feature refs follow this format + * [project]/[name]:[version] * @param rows list of {@link Row} to select the entities to retrieve the features for + * @param defaultProject {@link String} Default project to find features in if not provided in + * feature reference. * @param omitEntitiesInResponse if true, the returned {@link Row} will not contain field and * value for the entity * @return list of {@link Row} containing features */ public List getOnlineFeatures( - List featureIds, List rows, boolean omitEntitiesInResponse) { - List featureSets = RequestUtil.createFeatureSets(featureIds); + List featureRefStrings, List rows, String defaultProject, boolean omitEntitiesInResponse) { + List features = RequestUtil.createFeatureRefs(featureRefStrings, defaultProject); List entityRows = rows.stream() .map( @@ -108,7 +112,7 @@ public List getOnlineFeatures( GetOnlineFeaturesResponse response = stub.getOnlineFeatures( GetOnlineFeaturesRequest.newBuilder() - .addAllFeatureSets(featureSets) + .addAllFeatures(features) .addAllEntityRows(entityRows) .setOmitEntitiesInResponse(omitEntitiesInResponse) .build()); diff --git a/sdk/java/src/main/java/com/gojek/feast/RequestUtil.java b/sdk/java/src/main/java/com/gojek/feast/RequestUtil.java index e80b40bad9..530af05e76 100644 --- a/sdk/java/src/main/java/com/gojek/feast/RequestUtil.java +++ b/sdk/java/src/main/java/com/gojek/feast/RequestUtil.java @@ -16,60 +16,73 @@ */ package com.gojek.feast; -import feast.serving.ServingAPIProto.FeatureSetRequest; +import feast.serving.ServingAPIProto.FeatureReference; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; -import org.apache.commons.lang3.tuple.ImmutablePair; -import org.apache.commons.lang3.tuple.Pair; @SuppressWarnings("WeakerAccess") public class RequestUtil { - public static List createFeatureSets(List featureIds) { - if (featureIds == null) { - throw new IllegalArgumentException("featureIds cannot be null"); + + public static List createFeatureRefs(List featureRefStrings, + String defaultProject) { + if (featureRefStrings == null) { + throw new IllegalArgumentException("featureRefs cannot be null"); } - // featureSetMap is a map of pair of feature set name and version -> a list of feature names - Map, List> featureSetMap = new HashMap<>(); + List featureRefs = new ArrayList<>(); + + for (String featureRefString : featureRefStrings) { + String project; + String name; + int version = 0; + String[] featureSplit; + String[] projectSplit = featureRefString.split("/"); - for (String featureId : featureIds) { - String[] parts = featureId.split(":"); - if (parts.length < 3) { + if (projectSplit.length == 2) { + project = projectSplit[0]; + featureSplit = projectSplit[1].split(":"); + } else if (projectSplit.length == 1) { + project = defaultProject; + featureSplit = projectSplit[0].split(":"); + } else { throw new IllegalArgumentException( String.format( - "Feature id '%s' has invalid format. Expected format: ::.", - featureId)); + "Feature id '%s' has invalid format. Expected format: ::.", + featureRefString)); } - String featureSetName = parts[0]; - int featureSetVersion; - try { - featureSetVersion = Integer.parseInt(parts[1]); - } catch (NumberFormatException e) { + + if (featureSplit.length == 2) { + name = featureSplit[0]; + try { + version = Integer.parseInt(featureSplit[1]); + } catch (NumberFormatException e) { + throw new IllegalArgumentException( + String.format( + "Feature id '%s' contains invalid version. Expected format: /:.", + featureRefString)); + } + } else if (projectSplit.length == 1) { + name = featureSplit[0]; + } else { throw new IllegalArgumentException( String.format( - "Feature id '%s' contains invalid version. Expected format: ::.", - parts[1])); + "Feature id '%s' has invalid format. Expected format: /:.", + featureRefString)); } - Pair key = new ImmutablePair<>(featureSetName, featureSetVersion); - if (!featureSetMap.containsKey(key)) { - featureSetMap.put(key, new ArrayList<>()); + if (project.isEmpty() || name.isEmpty() || version < 0) { + throw new IllegalArgumentException( + String.format( + "Feature id '%s' has invalid format. Expected format: /:.", + featureRefString)); } - String featureName = parts[2]; - featureSetMap.get(key).add(featureName); + + featureRefs.add( + FeatureReference.newBuilder().setName(name).setProject(project).setVersion(version) + .build()); } - return featureSetMap.entrySet().stream() - .map( - entry -> - FeatureSetRequest.newBuilder() - .setName(entry.getKey().getKey()) - .setVersion(entry.getKey().getValue()) - .addAllFeatureNames(entry.getValue()) - .build()) - .collect(Collectors.toList()); + ; return featureRefs; + } } diff --git a/sdk/java/src/test/java/com/gojek/feast/RequestUtilTest.java b/sdk/java/src/test/java/com/gojek/feast/RequestUtilTest.java index 21c8bde15e..47505518ae 100644 --- a/sdk/java/src/test/java/com/gojek/feast/RequestUtilTest.java +++ b/sdk/java/src/test/java/com/gojek/feast/RequestUtilTest.java @@ -20,7 +20,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.protobuf.TextFormat; -import feast.serving.ServingAPIProto.FeatureSetRequest; +import feast.serving.ServingAPIProto.FeatureReference; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; @@ -36,43 +36,56 @@ class RequestUtilTest { private static Stream provideValidFeatureIds() { return Stream.of( Arguments.of( - Collections.singletonList("driver:1:driver_id"), + Collections.singletonList("driver_project/driver_id:1"), Collections.singletonList( - FeatureSetRequest.newBuilder() - .setName("driver") - .setVersion(1) - .addFeatureNames("driver_id"))), + FeatureReference.newBuilder() + .setProject("driver_project") + .setName("driver_id") + .setVersion(1).build() + ) + ), Arguments.of( - Arrays.asList("driver:1:driver_id", "driver:1:driver_name"), - Collections.singletonList( - FeatureSetRequest.newBuilder() - .setName("driver") + Arrays.asList("driver_project/driver_id:1", "driver_project/driver_name:1"), + Arrays.asList( + FeatureReference.newBuilder() + .setProject("driver_project") + .setName("driver_id") + .setVersion(1) + .build(), + FeatureReference.newBuilder() + .setProject("driver_project") + .setName("driver_name") .setVersion(1) - .addAllFeatureNames(Arrays.asList("driver_id", "driver_name")) - .build())), + .build()) + ), Arguments.of( - Arrays.asList("driver:1:driver_id", "driver:1:driver_name", "booking:2:booking_id"), + Arrays.asList("driver_project/driver_id:1", "driver_project/driver_name:1", "booking_project/driver_name:1"), Arrays.asList( - FeatureSetRequest.newBuilder() - .setName("driver") + FeatureReference.newBuilder() + .setProject("driver_project") + .setVersion(1) + .setName("driver_id") + .build(), + FeatureReference.newBuilder() + .setProject("driver_project") .setVersion(1) - .addAllFeatureNames(Arrays.asList("driver_id", "driver_name")) + .setName("driver_name") .build(), - FeatureSetRequest.newBuilder() - .setName("booking") - .setVersion(2) - .addFeatureNames("booking_id") + FeatureReference.newBuilder() + .setProject("booking_project") + .setVersion(1) + .setName("driver_name") .build()))); } @ParameterizedTest @MethodSource("provideValidFeatureIds") void createFeatureSets_ShouldReturnFeatureSetsForValidFeatureIds( - List input, List expected) { - List actual = RequestUtil.createFeatureSets(input); + List input, List expected) { + List actual = RequestUtil.createFeatureRefs(input, "my-project"); // Order of the actual and expected featureSets do no not matter - actual.sort(Comparator.comparing(FeatureSetRequest::getName)); - expected.sort(Comparator.comparing(FeatureSetRequest::getName)); + actual.sort(Comparator.comparing(FeatureReference::getName)); + expected.sort(Comparator.comparing(FeatureReference::getName)); assertEquals(expected.size(), actual.size()); for (int i = 0; i < expected.size(); i++) { String expectedString = TextFormat.printer().printToString(expected.get(i)); @@ -81,23 +94,21 @@ void createFeatureSets_ShouldReturnFeatureSetsForValidFeatureIds( } } - private static Stream provideInvalidFeatureIds() { + private static Stream provideInvalidFeatureRefs() { return Stream.of( - Arguments.of(Collections.singletonList("feature_set_only")), - Arguments.of(Collections.singletonList("missing:feature_name")), - Arguments.of(Collections.singletonList("invalid:version:value")), + Arguments.of(Collections.singletonList("missing:bad_version")), Arguments.of(Collections.singletonList(""))); } @ParameterizedTest - @MethodSource("provideInvalidFeatureIds") - void createFeatureSets_ShouldThrowExceptionForInvalidFeatureIds(List input) { - assertThrows(IllegalArgumentException.class, () -> RequestUtil.createFeatureSets(input)); + @MethodSource("provideInvalidFeatureRefs") + void createFeatureSets_ShouldThrowExceptionForInvalidFeatureRefs(List input) { + assertThrows(IllegalArgumentException.class, () -> RequestUtil.createFeatureRefs(input, "my-project")); } @ParameterizedTest @NullSource - void createFeatureSets_ShouldThrowExceptionForNullFeatureIds(List input) { - assertThrows(IllegalArgumentException.class, () -> RequestUtil.createFeatureSets(input)); + void createFeatureSets_ShouldThrowExceptionForNullFeatureRefs(List input) { + assertThrows(IllegalArgumentException.class, () -> RequestUtil.createFeatureRefs(input, "my-project")); } } diff --git a/sdk/python/feast/__init__.py b/sdk/python/feast/__init__.py index 5f62ab496f..adcac0cd24 100644 --- a/sdk/python/feast/__init__.py +++ b/sdk/python/feast/__init__.py @@ -1,4 +1,5 @@ from pkg_resources import get_distribution, DistributionNotFound + try: __version__ = get_distribution(__name__).version except DistributionNotFound: diff --git a/sdk/python/feast/client.py b/sdk/python/feast/client.py index 719022ea7a..640e587cc9 100644 --- a/sdk/python/feast/client.py +++ b/sdk/python/feast/client.py @@ -20,7 +20,8 @@ import time from collections import OrderedDict from math import ceil -from typing import Dict, List, Tuple, Union +from typing import Dict, List, Tuple, Union, Optional +from typing import List from urllib.parse import urlparse import fastavro @@ -36,11 +37,18 @@ ApplyFeatureSetResponse, GetFeatureSetRequest, GetFeatureSetResponse, + CreateProjectRequest, + CreateProjectResponse, + ArchiveProjectRequest, + ArchiveProjectResponse, + ListProjectsRequest, + ListProjectsResponse, ) from feast.core.CoreService_pb2_grpc import CoreServiceStub from feast.core.FeatureSet_pb2 import FeatureSetStatus from feast.feature_set import FeatureSet, Entity from feast.job import Job +from feast.serving.ServingService_pb2 import FeatureReference from feast.loaders.abstract_producer import get_producer from feast.loaders.file import export_source_to_staging_location from feast.loaders.ingest import KAFKA_CHUNK_PRODUCTION_TIMEOUT @@ -53,7 +61,6 @@ GetOnlineFeaturesResponse, DatasetSource, DataFormat, - FeatureSetRequest, FeastServingType, ) from feast.serving.ServingService_pb2_grpc import ServingServiceStub @@ -64,6 +71,7 @@ GRPC_CONNECTION_TIMEOUT_APPLY = 600 # type: int FEAST_SERVING_URL_ENV_KEY = "FEAST_SERVING_URL" # type: str FEAST_CORE_URL_ENV_KEY = "FEAST_CORE_URL" # type: str +FEAST_PROJECT_ENV_KEY = "FEAST_PROJECT" # type: str BATCH_FEATURE_REQUEST_WAIT_TIME_SECONDS = 300 CPU_COUNT = os.cpu_count() # type: int @@ -74,7 +82,7 @@ class Client: """ def __init__( - self, core_url: str = None, serving_url: str = None, verbose: bool = False + self, core_url: str = None, serving_url: str = None, project: str = None ): """ The Feast Client should be initialized with at least one service url @@ -82,11 +90,11 @@ def __init__( Args: core_url: Feast Core URL. Used to manage features serving_url: Feast Serving URL. Used to retrieve features - verbose: Enable verbose logging + project: Sets the active project. This field is optional. """ self._core_url = core_url self._serving_url = serving_url - self._verbose = verbose + self._project = project self.__core_channel: grpc.Channel = None self.__serving_channel: grpc.Channel = None self._core_service_stub: CoreServiceStub = None @@ -96,6 +104,9 @@ def __init__( def core_url(self) -> str: """ Retrieve Feast Core URL + + Returns: + Feast Core URL string """ if self._core_url is not None: @@ -109,8 +120,8 @@ def core_url(self, value: str): """ Set the Feast Core URL - Returns: - Feast Core URL string + Args: + value: Feast Core URL """ self._core_url = value @@ -118,6 +129,9 @@ def core_url(self, value: str): def serving_url(self) -> str: """ Retrieve Serving Core URL + + Returns: + Feast Serving URL string """ if self._serving_url is not None: return self._serving_url @@ -130,8 +144,8 @@ def serving_url(self, value: str): """ Set the Feast Serving URL - Returns: - Feast Serving URL string + Args: + value: Feast Serving URL """ self._serving_url = value @@ -214,6 +228,74 @@ def _connect_serving(self, skip_if_connected=True): else: self._serving_service_stub = ServingServiceStub(self.__serving_channel) + @property + def project(self) -> Union[str, None]: + """ + Retrieve currently active project + + Returns: + Project name + """ + if self._project is not None: + return self._project + if os.getenv(FEAST_PROJECT_ENV_KEY) is not None: + return os.getenv(FEAST_PROJECT_ENV_KEY) + return None + + def set_project(self, project: str): + """ + Set currently active Feast project + + Args: + project: Project to set as active + """ + self._project = project + + def list_projects(self) -> List[str]: + """ + List all active Feast projects + + Returns: + List of project names + + """ + self._connect_core() + response = self._core_service_stub.ListProjects( + ListProjectsRequest(), timeout=GRPC_CONNECTION_TIMEOUT_DEFAULT + ) # type: ListProjectsResponse + return list(response.projects) + + def create_project(self, project): + """ + Creates a Feast project + + Args: + project: Name of project + """ + + self._connect_core() + self._core_service_stub.CreateProject( + CreateProjectRequest(name=project), timeout=GRPC_CONNECTION_TIMEOUT_DEFAULT + ) # type: CreateProjectResponse + + def archive_project(self, project): + """ + Archives a project. Project will still continue to function for + ingestion and retrieval, but will be in a read-only state. It will + also not be visible from the Core API for management purposes. + + Args: + project: Name of project to archive + """ + + self._connect_core() + self._core_service_stub.ArchiveProject( + ArchiveProjectRequest(name=project), timeout=GRPC_CONNECTION_TIMEOUT_DEFAULT + ) # type: ArchiveProjectResponse + + if self._project == project: + self._project = "" + def apply(self, feature_sets: Union[List[FeatureSet], FeatureSet]): """ Idempotently registers feature set(s) with Feast Core. Either a single @@ -240,15 +322,27 @@ def _apply_feature_set(self, feature_set: FeatureSet): feature_set: Feature set that will be registered """ self._connect_core() - feature_set._client = self feature_set.is_valid() + feature_set_proto = feature_set.to_proto() + if len(feature_set_proto.spec.project) == 0: + if self.project is None: + raise ValueError( + f"No project found in feature set {feature_set.name}. " + f"Please set the project within the feature set or within " + f"your Feast Client." + ) + else: + feature_set_proto.spec.project = self.project # Convert the feature set to a request and send to Feast Core - apply_fs_response = self._core_service_stub.ApplyFeatureSet( - ApplyFeatureSetRequest(feature_set=feature_set.to_proto()), - timeout=GRPC_CONNECTION_TIMEOUT_APPLY, - ) # type: ApplyFeatureSetResponse + try: + apply_fs_response = self._core_service_stub.ApplyFeatureSet( + ApplyFeatureSetRequest(feature_set=feature_set_proto), + timeout=GRPC_CONNECTION_TIMEOUT_APPLY, + ) # type: ApplyFeatureSetResponse + except grpc.RpcError as e: + raise grpc.RpcError(e.details()) # Extract the returned feature set applied_fs = FeatureSet.from_proto(apply_fs_response.feature_set) @@ -266,18 +360,41 @@ def _apply_feature_set(self, feature_set: FeatureSet): # Deep copy from the returned feature set to the local feature set feature_set._update_from_feature_set(applied_fs) - def list_feature_sets(self) -> List[FeatureSet]: + def list_feature_sets( + self, project: str = None, name: str = None, version: str = None + ) -> List[FeatureSet]: """ Retrieve a list of feature sets from Feast Core + Args: + project: Filter feature sets based on project name + name: Filter feature sets based on feature set name + version: Filter feature sets based on version number + Returns: List of feature sets """ self._connect_core() + if project is None: + if self.project is not None: + project = self.project + else: + project = "*" + + if name is None: + name = "*" + + if version is None: + version = "*" + + filter = ListFeatureSetsRequest.Filter( + project=project, feature_set_name=name, feature_set_version=version + ) + # Get latest feature sets from Feast Core feature_set_protos = self._core_service_stub.ListFeatureSets( - ListFeatureSetsRequest() + ListFeatureSetsRequest(filter=filter) ) # type: ListFeatureSetsResponse # Extract feature sets and return @@ -289,13 +406,14 @@ def list_feature_sets(self) -> List[FeatureSet]: return feature_sets def get_feature_set( - self, name: str, version: int = None + self, name: str, version: int = None, project: str = None ) -> Union[FeatureSet, None]: """ Retrieves a feature set. If no version is specified then the latest version will be returned. Args: + project: Feast project that this feature set belongs to name: Name of feature set version: Version of feature set @@ -305,11 +423,23 @@ def get_feature_set( """ self._connect_core() + if project is None: + if self.project is not None: + project = self.project + else: + raise ValueError("No project has been configured.") + if version is None: version = 0 - get_feature_set_response = self._core_service_stub.GetFeatureSet( - GetFeatureSetRequest(name=name.strip(), version=int(version)) - ) # type: GetFeatureSetResponse + + try: + get_feature_set_response = self._core_service_stub.GetFeatureSet( + GetFeatureSetRequest( + project=project, name=name.strip(), version=int(version) + ) + ) # type: GetFeatureSetResponse + except grpc.RpcError as e: + raise grpc.RpcError(e.details()) return FeatureSet.from_proto(get_feature_set_response.feature_set) def list_entities(self) -> Dict[str, Entity]: @@ -326,22 +456,26 @@ def list_entities(self) -> Dict[str, Entity]: return entities_dict def get_batch_features( - self, feature_ids: List[str], entity_rows: Union[pd.DataFrame, str] + self, + feature_refs: List[str], + entity_rows: Union[pd.DataFrame, str], + default_project: str = None, ) -> Job: """ Retrieves historical features from a Feast Serving deployment. Args: - feature_ids (List[str]): - List of feature ids that will be returned for each entity. - Each feature id should have the following format - "feature_set_name:version:feature_name". + feature_refs (List[str]): + List of feature references that will be returned for each entity. + Each feature reference should have the following format + "project/feature:version". entity_rows (Union[pd.DataFrame, str]): Pandas dataframe containing entities and a 'datetime' column. Each entity in a feature set must be present as a column in this dataframe. The datetime column must contain timestamps in datetime64 format. + default_project: Default project where feature values will be found. Returns: feast.job.Job: @@ -354,27 +488,29 @@ def get_batch_features( >>> from datetime import datetime >>> >>> feast_client = Client(core_url="localhost:6565", serving_url="localhost:6566") - >>> feature_ids = ["customer:1:bookings_7d"] + >>> feature_refs = ["my_project/bookings_7d:1", "booking_14d"] >>> entity_rows = pd.DataFrame( >>> { >>> "datetime": [pd.datetime.now() for _ in range(3)], >>> "customer": [1001, 1002, 1003], >>> } >>> ) - >>> feature_retrieval_job = feast_client.get_batch_features(feature_ids, entity_rows) + >>> feature_retrieval_job = feast_client.get_batch_features( + >>> feature_refs, entity_rows, default_project="my_project") >>> df = feature_retrieval_job.to_dataframe() >>> print(df) """ self._connect_serving() - fs_request = _build_feature_set_request(feature_ids) + feature_references = _build_feature_references( + feature_refs=feature_refs, default_project=default_project + ) # Retrieve serving information to determine store type and # staging location serving_info = self._serving_service_stub.GetFeastServingInfo( - GetFeastServingInfoRequest(), - timeout=GRPC_CONNECTION_TIMEOUT_DEFAULT + GetFeastServingInfoRequest(), timeout=GRPC_CONNECTION_TIMEOUT_DEFAULT ) # type: GetFeastServingInfoResponse if serving_info.type != FeastServingType.FEAST_SERVING_TYPE_BATCH: @@ -385,35 +521,25 @@ def get_batch_features( if isinstance(entity_rows, pd.DataFrame): # Pandas DataFrame detected - # Validate entity rows to based on entities in Feast Core - self._validate_dataframe_for_batch_retrieval( - entity_rows=entity_rows, - feature_sets_request=fs_request - ) # Remove timezone from datetime column if isinstance( - entity_rows["datetime"].dtype, - pd.core.dtypes.dtypes.DatetimeTZDtype + entity_rows["datetime"].dtype, pd.core.dtypes.dtypes.DatetimeTZDtype ): entity_rows["datetime"] = pd.DatetimeIndex( entity_rows["datetime"] ).tz_localize(None) elif isinstance(entity_rows, str): # String based source - if entity_rows.endswith((".avro", "*")): - # Validate Avro entity rows to based on entities in Feast Core - self._validate_avro_for_batch_retrieval( - source=entity_rows, - feature_sets_request=fs_request - ) - else: + if not entity_rows.endswith((".avro", "*")): raise Exception( f"Only .avro and wildcard paths are accepted as entity_rows" ) else: - raise Exception(f"Only pandas.DataFrame and str types are allowed" - f" as entity_rows, but got {type(entity_rows)}.") + raise Exception( + f"Only pandas.DataFrame and str types are allowed" + f" as entity_rows, but got {type(entity_rows)}." + ) # Export and upload entity row DataFrame to staging location # provided by Feast @@ -422,11 +548,10 @@ def get_batch_features( ) # type: List[str] request = GetBatchFeaturesRequest( - feature_sets=fs_request, + features=feature_references, dataset_source=DatasetSource( file_source=DatasetSource.FileSource( - file_uris=staged_files, - data_format=DataFormat.DATA_FORMAT_AVRO + file_uris=staged_files, data_format=DataFormat.DATA_FORMAT_AVRO ) ), ) @@ -435,165 +560,57 @@ def get_batch_features( response = self._serving_service_stub.GetBatchFeatures(request) return Job(response.job, self._serving_service_stub) - def _validate_dataframe_for_batch_retrieval( - self, entity_rows: pd.DataFrame, feature_sets_request - ): - """ - Validate whether an the entity rows in a DataFrame contains the correct - information for batch retrieval. - - Datetime column must be present in the DataFrame. - - Args: - entity_rows (pd.DataFrame): - Pandas DataFrame containing entities and datetime column. Each - entity in a feature set must be present as a column in this - DataFrame. - - feature_sets_request: - Feature sets that will be requested. - """ - - self._validate_columns( - columns=entity_rows.columns, - feature_sets_request=feature_sets_request, - datetime_field="datetime" - ) - - def _validate_avro_for_batch_retrieval( - self, source: str, feature_sets_request - ): - """ - Validate whether the entity rows in an Avro source file contains the - correct information for batch retrieval. - - Only gs:// and local files (file://) uri schemes are allowed. - - Avro file must have a column named "event_timestamp". - - No checks will be done if a GCS path is provided. - - Args: - source (str): - File path to Avro. - - feature_sets_request: - Feature sets that will be requested. - """ - p = urlparse(source) - - if p.scheme == "gs": - # GCS path provided (Risk is delegated to user) - # No validation if GCS path is provided - return - elif p.scheme == "file" or not p.scheme: - # Local file (file://) provided - file_path = os.path.abspath(os.path.join(p.netloc, p.path)) - else: - raise Exception(f"Unsupported uri scheme provided {p.scheme}, only " - f"local files (file://), and gs:// schemes are " - f"allowed") - - with open(file_path, "rb") as f: - reader = fastavro.reader(f) - schema = json.loads(reader.metadata["avro.schema"]) - columns = [x["name"] for x in schema["fields"]] - self._validate_columns( - columns=columns, - feature_sets_request=feature_sets_request, - datetime_field="event_timestamp" - ) - - def _validate_columns( - self, columns: List[str], - feature_sets_request, - datetime_field: str - ) -> None: - """ - Check if the required column contains the correct values for batch - retrieval. - - Args: - columns (List[str]): - List of columns to validate against feature_sets_request. - - feature_sets_request (): - Feature sets that will be requested. - - datetime_field (str): - Name of the datetime field that must be enforced and present as - a column in the data source. - - Returns: - None: - None - """ - # Ensure datetime column exists - if datetime_field not in columns: - raise ValueError( - f'Entity rows does not contain "{datetime_field}" column in ' - f'columns {columns}' - ) - - # Validate Avro columns based on feature set entities - for feature_set in feature_sets_request: - fs = self.get_feature_set( - name=feature_set.name, version=feature_set.version - ) - if fs is None: - raise ValueError( - f'Feature set "{feature_set.name}:{feature_set.version}" ' - f"could not be found" - ) - for entity_type in fs.entities: - if entity_type.name not in columns: - raise ValueError( - f'Input does not contain entity' - f' "{entity_type.name}" column in columns "{columns}"' - ) def get_online_features( self, - feature_ids: List[str], + feature_refs: List[str], entity_rows: List[GetOnlineFeaturesRequest.EntityRow], + default_project: Optional[str] = None, ) -> GetOnlineFeaturesResponse: """ Retrieves the latest online feature data from Feast Serving Args: - feature_ids: List of feature Ids in the following format - [feature_set_name]:[version]:[feature_name] + feature_refs: List of feature references in the following format + [project]/[feature_name]:[version]. Only the feature name + is a required component in the reference. example: - ["feature_set_1:6:my_feature_1", - "feature_set_1:6:my_feature_2",] + ["my_project/my_feature_1:3", + "my_project3/my_feature_4:1",] entity_rows: List of GetFeaturesRequest.EntityRow where each row contains entities. Timestamp should not be set for online retrieval. All entity types within a feature + default_project: This project will be used if the project name is + not provided in the feature reference Returns: Returns a list of maps where each item in the list contains the latest feature values for the provided entities """ - self._connect_serving() return self._serving_service_stub.GetOnlineFeatures( GetOnlineFeaturesRequest( - feature_sets=_build_feature_set_request(feature_ids), + features=_build_feature_references( + feature_refs=feature_refs, + default_project=( + default_project if not self.project else self.project + ), + ), entity_rows=entity_rows, ) ) # type: GetOnlineFeaturesResponse def ingest( - self, - feature_set: Union[str, FeatureSet], - source: Union[pd.DataFrame, str], - chunk_size: int = 10000, - version: int = None, - force_update: bool = False, - max_workers: int = max(CPU_COUNT - 1, 1), - disable_progress_bar: bool = False, - timeout: int = KAFKA_CHUNK_PRODUCTION_TIMEOUT + self, + feature_set: Union[str, FeatureSet], + source: Union[pd.DataFrame, str], + chunk_size: int = 10000, + version: int = None, + force_update: bool = False, + max_workers: int = max(CPU_COUNT - 1, 1), + disable_progress_bar: bool = False, + timeout: int = KAFKA_CHUNK_PRODUCTION_TIMEOUT, ) -> None: """ Loads feature data into Feast for a specific feature set. @@ -644,9 +661,7 @@ def ingest( raise Exception(f"Feature set name must be provided") # Read table and get row count - dir_path, dest_path = _read_table_from_source( - source, chunk_size, max_workers - ) + dir_path, dest_path = _read_table_from_source(source, chunk_size, max_workers) pq_file = pq.ParquetFile(dest_path) @@ -657,7 +672,7 @@ def ingest( feature_set.infer_fields_from_pa( table=pq_file.read_row_group(0), discard_unused_fields=True, - replace_existing_features=True + replace_existing_features=True, ) self.apply(feature_set) current_time = time.time() @@ -690,10 +705,11 @@ def ingest( # Transform and push data to Kafka if feature_set.source.source_type == "Kafka": for chunk in get_feature_row_chunks( - file=dest_path, - row_groups=list(range(pq_file.num_row_groups)), - fs=feature_set, - max_workers=max_workers): + file=dest_path, + row_groups=list(range(pq_file.num_row_groups)), + fs=feature_set, + max_workers=max_workers, + ): # Push FeatureRow one chunk at a time to kafka for serialized_row in chunk: @@ -722,39 +738,62 @@ def ingest( return None -def _build_feature_set_request( - feature_ids: List[str] -) -> List[FeatureSetRequest]: +def _build_feature_references( + feature_refs: List[str], default_project: str = None +) -> List[FeatureReference]: """ Builds a list of FeatureSet objects from feature set ids in order to retrieve feature data from Feast Serving Args: - feature_ids: List of feature ids - ("feature_set_name:version:feature_name") + feature_refs: List of feature reference strings + ("project/feature:version") + default_project: This project will be used if the project name is + not provided in the feature reference """ - feature_set_request = dict() # type: Dict[str, FeatureSetRequest] - for feature_id in feature_ids: - fid_parts = feature_id.split(":") - if len(fid_parts) == 3: - feature_set, version, feature = fid_parts + + features = [] + + for feature_ref in feature_refs: + project_split = feature_ref.split("/") + version = 0 + + if len(project_split) == 2: + project, feature_version = project_split + elif len(project_split) == 1: + feature_version = project_split[0] + if default_project is None: + raise ValueError( + f"No project specified in {feature_ref} and no default project provided" + ) + project = default_project + else: + raise ValueError( + f'Could not parse feature ref {feature_ref}, expecting "project/feature:version"' + ) + + feature_split = feature_version.split(":") + if len(feature_split) == 2: + name, version = feature_split + version = int(version) + elif len(feature_split) == 1: + name = feature_split[0] else: raise ValueError( - f"Could not parse feature id ${feature_id}, needs 2 colons" + f'Could not parse feature ref {feature_ref}, expecting "project/feature:version"' ) - if feature_set not in feature_set_request: - feature_set_request[feature_set] = FeatureSetRequest( - name=feature_set, version=int(version) + if len(project) == 0 or len(name) == 0 or version < 0: + raise ValueError( + f'Could not parse feature ref {feature_ref}, expecting "project/feature:version"' ) - feature_set_request[feature_set].feature_names.append(feature) - return list(feature_set_request.values()) + + features.append(FeatureReference(project=project, name=name, version=version)) + return features def _read_table_from_source( - source: Union[pd.DataFrame, str], - chunk_size: int, - max_workers: int + source: Union[pd.DataFrame, str], chunk_size: int, max_workers: int ) -> Tuple[str, str]: """ Infers a data source type (path or Pandas DataFrame) and reads it in as @@ -804,8 +843,7 @@ def _read_table_from_source( else: table = pq.read_table(file_path) else: - raise ValueError( - f"Unknown data source provided for ingestion: {source}") + raise ValueError(f"Unknown data source provided for ingestion: {source}") # Ensure that PyArrow table is initialised assert isinstance(table, pa.lib.Table) @@ -814,7 +852,7 @@ def _read_table_from_source( dir_path = tempfile.mkdtemp() tmp_table_name = f"{int(time.time())}.parquet" dest_path = f"{dir_path}/{tmp_table_name}" - row_group_size = min(ceil(table.num_rows/max_workers), chunk_size) + row_group_size = min(ceil(table.num_rows / max_workers), chunk_size) pq.write_table(table=table, where=dest_path, row_group_size=row_group_size) # Remove table from memory diff --git a/sdk/python/feast/core/CoreService_pb2.py b/sdk/python/feast/core/CoreService_pb2.py index 3185bece28..858703d7f3 100644 --- a/sdk/python/feast/core/CoreService_pb2.py +++ b/sdk/python/feast/core/CoreService_pb2.py @@ -22,7 +22,7 @@ package='feast.core', syntax='proto3', serialized_options=_b('\n\nfeast.coreB\020CoreServiceProtoZ/github.com/gojek/feast/sdk/go/protos/feast/core'), - serialized_pb=_b('\n\x1c\x66\x65\x61st/core/CoreService.proto\x12\nfeast.core\x1a\x1b\x66\x65\x61st/core/FeatureSet.proto\x1a\x16\x66\x65\x61st/core/Store.proto\"5\n\x14GetFeatureSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\"D\n\x15GetFeatureSetResponse\x12+\n\x0b\x66\x65\x61ture_set\x18\x01 \x01(\x0b\x32\x16.feast.core.FeatureSet\"\x94\x01\n\x16ListFeatureSetsRequest\x12\x39\n\x06\x66ilter\x18\x01 \x01(\x0b\x32).feast.core.ListFeatureSetsRequest.Filter\x1a?\n\x06\x46ilter\x12\x18\n\x10\x66\x65\x61ture_set_name\x18\x01 \x01(\t\x12\x1b\n\x13\x66\x65\x61ture_set_version\x18\x02 \x01(\t\"G\n\x17ListFeatureSetsResponse\x12,\n\x0c\x66\x65\x61ture_sets\x18\x01 \x03(\x0b\x32\x16.feast.core.FeatureSet\"a\n\x11ListStoresRequest\x12\x34\n\x06\x66ilter\x18\x01 \x01(\x0b\x32$.feast.core.ListStoresRequest.Filter\x1a\x16\n\x06\x46ilter\x12\x0c\n\x04name\x18\x01 \x01(\t\"6\n\x12ListStoresResponse\x12 \n\x05store\x18\x01 \x03(\x0b\x32\x11.feast.core.Store\"E\n\x16\x41pplyFeatureSetRequest\x12+\n\x0b\x66\x65\x61ture_set\x18\x01 \x01(\x0b\x32\x16.feast.core.FeatureSet\"\xb3\x01\n\x17\x41pplyFeatureSetResponse\x12+\n\x0b\x66\x65\x61ture_set\x18\x01 \x01(\x0b\x32\x16.feast.core.FeatureSet\x12:\n\x06status\x18\x02 \x01(\x0e\x32*.feast.core.ApplyFeatureSetResponse.Status\"/\n\x06Status\x12\r\n\tNO_CHANGE\x10\x00\x12\x0b\n\x07\x43REATED\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"\x1c\n\x1aGetFeastCoreVersionRequest\".\n\x1bGetFeastCoreVersionResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\"6\n\x12UpdateStoreRequest\x12 \n\x05store\x18\x01 \x01(\x0b\x32\x11.feast.core.Store\"\x95\x01\n\x13UpdateStoreResponse\x12 \n\x05store\x18\x01 \x01(\x0b\x32\x11.feast.core.Store\x12\x36\n\x06status\x18\x02 \x01(\x0e\x32&.feast.core.UpdateStoreResponse.Status\"$\n\x06Status\x12\r\n\tNO_CHANGE\x10\x00\x12\x0b\n\x07UPDATED\x10\x01\x32\xa0\x04\n\x0b\x43oreService\x12\x66\n\x13GetFeastCoreVersion\x12&.feast.core.GetFeastCoreVersionRequest\x1a\'.feast.core.GetFeastCoreVersionResponse\x12T\n\rGetFeatureSet\x12 .feast.core.GetFeatureSetRequest\x1a!.feast.core.GetFeatureSetResponse\x12Z\n\x0fListFeatureSets\x12\".feast.core.ListFeatureSetsRequest\x1a#.feast.core.ListFeatureSetsResponse\x12K\n\nListStores\x12\x1d.feast.core.ListStoresRequest\x1a\x1e.feast.core.ListStoresResponse\x12Z\n\x0f\x41pplyFeatureSet\x12\".feast.core.ApplyFeatureSetRequest\x1a#.feast.core.ApplyFeatureSetResponse\x12N\n\x0bUpdateStore\x12\x1e.feast.core.UpdateStoreRequest\x1a\x1f.feast.core.UpdateStoreResponseBO\n\nfeast.coreB\x10\x43oreServiceProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') + serialized_pb=_b('\n\x1c\x66\x65\x61st/core/CoreService.proto\x12\nfeast.core\x1a\x1b\x66\x65\x61st/core/FeatureSet.proto\x1a\x16\x66\x65\x61st/core/Store.proto\"F\n\x14GetFeatureSetRequest\x12\x0f\n\x07project\x18\x03 \x01(\t\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\"D\n\x15GetFeatureSetResponse\x12+\n\x0b\x66\x65\x61ture_set\x18\x01 \x01(\x0b\x32\x16.feast.core.FeatureSet\"\xa5\x01\n\x16ListFeatureSetsRequest\x12\x39\n\x06\x66ilter\x18\x01 \x01(\x0b\x32).feast.core.ListFeatureSetsRequest.Filter\x1aP\n\x06\x46ilter\x12\x0f\n\x07project\x18\x03 \x01(\t\x12\x18\n\x10\x66\x65\x61ture_set_name\x18\x01 \x01(\t\x12\x1b\n\x13\x66\x65\x61ture_set_version\x18\x02 \x01(\t\"G\n\x17ListFeatureSetsResponse\x12,\n\x0c\x66\x65\x61ture_sets\x18\x01 \x03(\x0b\x32\x16.feast.core.FeatureSet\"a\n\x11ListStoresRequest\x12\x34\n\x06\x66ilter\x18\x01 \x01(\x0b\x32$.feast.core.ListStoresRequest.Filter\x1a\x16\n\x06\x46ilter\x12\x0c\n\x04name\x18\x01 \x01(\t\"6\n\x12ListStoresResponse\x12 \n\x05store\x18\x01 \x03(\x0b\x32\x11.feast.core.Store\"E\n\x16\x41pplyFeatureSetRequest\x12+\n\x0b\x66\x65\x61ture_set\x18\x01 \x01(\x0b\x32\x16.feast.core.FeatureSet\"\xb3\x01\n\x17\x41pplyFeatureSetResponse\x12+\n\x0b\x66\x65\x61ture_set\x18\x01 \x01(\x0b\x32\x16.feast.core.FeatureSet\x12:\n\x06status\x18\x02 \x01(\x0e\x32*.feast.core.ApplyFeatureSetResponse.Status\"/\n\x06Status\x12\r\n\tNO_CHANGE\x10\x00\x12\x0b\n\x07\x43REATED\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"\x1c\n\x1aGetFeastCoreVersionRequest\".\n\x1bGetFeastCoreVersionResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\"6\n\x12UpdateStoreRequest\x12 \n\x05store\x18\x01 \x01(\x0b\x32\x11.feast.core.Store\"\x95\x01\n\x13UpdateStoreResponse\x12 \n\x05store\x18\x01 \x01(\x0b\x32\x11.feast.core.Store\x12\x36\n\x06status\x18\x02 \x01(\x0e\x32&.feast.core.UpdateStoreResponse.Status\"$\n\x06Status\x12\r\n\tNO_CHANGE\x10\x00\x12\x0b\n\x07UPDATED\x10\x01\"$\n\x14\x43reateProjectRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x17\n\x15\x43reateProjectResponse\"%\n\x15\x41rchiveProjectRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x18\n\x16\x41rchiveProjectResponse\"\x15\n\x13ListProjectsRequest\"(\n\x14ListProjectsResponse\x12\x10\n\x08projects\x18\x01 \x03(\t2\xa2\x06\n\x0b\x43oreService\x12\x66\n\x13GetFeastCoreVersion\x12&.feast.core.GetFeastCoreVersionRequest\x1a\'.feast.core.GetFeastCoreVersionResponse\x12T\n\rGetFeatureSet\x12 .feast.core.GetFeatureSetRequest\x1a!.feast.core.GetFeatureSetResponse\x12Z\n\x0fListFeatureSets\x12\".feast.core.ListFeatureSetsRequest\x1a#.feast.core.ListFeatureSetsResponse\x12K\n\nListStores\x12\x1d.feast.core.ListStoresRequest\x1a\x1e.feast.core.ListStoresResponse\x12Z\n\x0f\x41pplyFeatureSet\x12\".feast.core.ApplyFeatureSetRequest\x1a#.feast.core.ApplyFeatureSetResponse\x12N\n\x0bUpdateStore\x12\x1e.feast.core.UpdateStoreRequest\x1a\x1f.feast.core.UpdateStoreResponse\x12T\n\rCreateProject\x12 .feast.core.CreateProjectRequest\x1a!.feast.core.CreateProjectResponse\x12W\n\x0e\x41rchiveProject\x12!.feast.core.ArchiveProjectRequest\x1a\".feast.core.ArchiveProjectResponse\x12Q\n\x0cListProjects\x12\x1f.feast.core.ListProjectsRequest\x1a .feast.core.ListProjectsResponseBO\n\nfeast.coreB\x10\x43oreServiceProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') , dependencies=[feast_dot_core_dot_FeatureSet__pb2.DESCRIPTOR,feast_dot_core_dot_Store__pb2.DESCRIPTOR,]) @@ -49,8 +49,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=805, - serialized_end=852, + serialized_start=839, + serialized_end=886, ) _sym_db.RegisterEnumDescriptor(_APPLYFEATURESETRESPONSE_STATUS) @@ -71,8 +71,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1102, - serialized_end=1138, + serialized_start=1136, + serialized_end=1172, ) _sym_db.RegisterEnumDescriptor(_UPDATESTORERESPONSE_STATUS) @@ -85,14 +85,21 @@ containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='name', full_name='feast.core.GetFeatureSetRequest.name', index=0, + name='project', full_name='feast.core.GetFeatureSetRequest.project', index=0, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='feast.core.GetFeatureSetRequest.name', index=1, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='version', full_name='feast.core.GetFeatureSetRequest.version', index=1, + name='version', full_name='feast.core.GetFeatureSetRequest.version', index=2, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, @@ -111,7 +118,7 @@ oneofs=[ ], serialized_start=97, - serialized_end=150, + serialized_end=167, ) @@ -141,8 +148,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=152, - serialized_end=220, + serialized_start=169, + serialized_end=237, ) @@ -154,14 +161,21 @@ containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='feature_set_name', full_name='feast.core.ListFeatureSetsRequest.Filter.feature_set_name', index=0, + name='project', full_name='feast.core.ListFeatureSetsRequest.Filter.project', index=0, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='feature_set_name', full_name='feast.core.ListFeatureSetsRequest.Filter.feature_set_name', index=1, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='feature_set_version', full_name='feast.core.ListFeatureSetsRequest.Filter.feature_set_version', index=1, + name='feature_set_version', full_name='feast.core.ListFeatureSetsRequest.Filter.feature_set_version', index=2, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, @@ -179,8 +193,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=308, - serialized_end=371, + serialized_start=325, + serialized_end=405, ) _LISTFEATURESETSREQUEST = _descriptor.Descriptor( @@ -209,8 +223,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=223, - serialized_end=371, + serialized_start=240, + serialized_end=405, ) @@ -240,8 +254,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=373, - serialized_end=444, + serialized_start=407, + serialized_end=478, ) @@ -271,8 +285,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=521, - serialized_end=543, + serialized_start=555, + serialized_end=577, ) _LISTSTORESREQUEST = _descriptor.Descriptor( @@ -301,8 +315,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=446, - serialized_end=543, + serialized_start=480, + serialized_end=577, ) @@ -332,8 +346,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=545, - serialized_end=599, + serialized_start=579, + serialized_end=633, ) @@ -363,8 +377,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=601, - serialized_end=670, + serialized_start=635, + serialized_end=704, ) @@ -402,8 +416,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=673, - serialized_end=852, + serialized_start=707, + serialized_end=886, ) @@ -426,8 +440,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=854, - serialized_end=882, + serialized_start=888, + serialized_end=916, ) @@ -457,8 +471,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=884, - serialized_end=930, + serialized_start=918, + serialized_end=964, ) @@ -488,8 +502,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=932, - serialized_end=986, + serialized_start=966, + serialized_end=1020, ) @@ -527,8 +541,173 @@ extension_ranges=[], oneofs=[ ], - serialized_start=989, - serialized_end=1138, + serialized_start=1023, + serialized_end=1172, +) + + +_CREATEPROJECTREQUEST = _descriptor.Descriptor( + name='CreateProjectRequest', + full_name='feast.core.CreateProjectRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='feast.core.CreateProjectRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1174, + serialized_end=1210, +) + + +_CREATEPROJECTRESPONSE = _descriptor.Descriptor( + name='CreateProjectResponse', + full_name='feast.core.CreateProjectResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1212, + serialized_end=1235, +) + + +_ARCHIVEPROJECTREQUEST = _descriptor.Descriptor( + name='ArchiveProjectRequest', + full_name='feast.core.ArchiveProjectRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='feast.core.ArchiveProjectRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1237, + serialized_end=1274, +) + + +_ARCHIVEPROJECTRESPONSE = _descriptor.Descriptor( + name='ArchiveProjectResponse', + full_name='feast.core.ArchiveProjectResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1276, + serialized_end=1300, +) + + +_LISTPROJECTSREQUEST = _descriptor.Descriptor( + name='ListProjectsRequest', + full_name='feast.core.ListProjectsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1302, + serialized_end=1323, +) + + +_LISTPROJECTSRESPONSE = _descriptor.Descriptor( + name='ListProjectsResponse', + full_name='feast.core.ListProjectsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='projects', full_name='feast.core.ListProjectsResponse.projects', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1325, + serialized_end=1365, ) _GETFEATURESETRESPONSE.fields_by_name['feature_set'].message_type = feast_dot_core_dot_FeatureSet__pb2._FEATURESET @@ -558,6 +737,12 @@ DESCRIPTOR.message_types_by_name['GetFeastCoreVersionResponse'] = _GETFEASTCOREVERSIONRESPONSE DESCRIPTOR.message_types_by_name['UpdateStoreRequest'] = _UPDATESTOREREQUEST DESCRIPTOR.message_types_by_name['UpdateStoreResponse'] = _UPDATESTORERESPONSE +DESCRIPTOR.message_types_by_name['CreateProjectRequest'] = _CREATEPROJECTREQUEST +DESCRIPTOR.message_types_by_name['CreateProjectResponse'] = _CREATEPROJECTRESPONSE +DESCRIPTOR.message_types_by_name['ArchiveProjectRequest'] = _ARCHIVEPROJECTREQUEST +DESCRIPTOR.message_types_by_name['ArchiveProjectResponse'] = _ARCHIVEPROJECTRESPONSE +DESCRIPTOR.message_types_by_name['ListProjectsRequest'] = _LISTPROJECTSREQUEST +DESCRIPTOR.message_types_by_name['ListProjectsResponse'] = _LISTPROJECTSRESPONSE _sym_db.RegisterFileDescriptor(DESCRIPTOR) GetFeatureSetRequest = _reflection.GeneratedProtocolMessageType('GetFeatureSetRequest', (_message.Message,), { @@ -660,6 +845,48 @@ }) _sym_db.RegisterMessage(UpdateStoreResponse) +CreateProjectRequest = _reflection.GeneratedProtocolMessageType('CreateProjectRequest', (_message.Message,), { + 'DESCRIPTOR' : _CREATEPROJECTREQUEST, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.CreateProjectRequest) + }) +_sym_db.RegisterMessage(CreateProjectRequest) + +CreateProjectResponse = _reflection.GeneratedProtocolMessageType('CreateProjectResponse', (_message.Message,), { + 'DESCRIPTOR' : _CREATEPROJECTRESPONSE, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.CreateProjectResponse) + }) +_sym_db.RegisterMessage(CreateProjectResponse) + +ArchiveProjectRequest = _reflection.GeneratedProtocolMessageType('ArchiveProjectRequest', (_message.Message,), { + 'DESCRIPTOR' : _ARCHIVEPROJECTREQUEST, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ArchiveProjectRequest) + }) +_sym_db.RegisterMessage(ArchiveProjectRequest) + +ArchiveProjectResponse = _reflection.GeneratedProtocolMessageType('ArchiveProjectResponse', (_message.Message,), { + 'DESCRIPTOR' : _ARCHIVEPROJECTRESPONSE, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ArchiveProjectResponse) + }) +_sym_db.RegisterMessage(ArchiveProjectResponse) + +ListProjectsRequest = _reflection.GeneratedProtocolMessageType('ListProjectsRequest', (_message.Message,), { + 'DESCRIPTOR' : _LISTPROJECTSREQUEST, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ListProjectsRequest) + }) +_sym_db.RegisterMessage(ListProjectsRequest) + +ListProjectsResponse = _reflection.GeneratedProtocolMessageType('ListProjectsResponse', (_message.Message,), { + 'DESCRIPTOR' : _LISTPROJECTSRESPONSE, + '__module__' : 'feast.core.CoreService_pb2' + # @@protoc_insertion_point(class_scope:feast.core.ListProjectsResponse) + }) +_sym_db.RegisterMessage(ListProjectsResponse) + DESCRIPTOR._options = None @@ -669,8 +896,8 @@ file=DESCRIPTOR, index=0, serialized_options=None, - serialized_start=1141, - serialized_end=1685, + serialized_start=1368, + serialized_end=2170, methods=[ _descriptor.MethodDescriptor( name='GetFeastCoreVersion', @@ -726,6 +953,33 @@ output_type=_UPDATESTORERESPONSE, serialized_options=None, ), + _descriptor.MethodDescriptor( + name='CreateProject', + full_name='feast.core.CoreService.CreateProject', + index=6, + containing_service=None, + input_type=_CREATEPROJECTREQUEST, + output_type=_CREATEPROJECTRESPONSE, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='ArchiveProject', + full_name='feast.core.CoreService.ArchiveProject', + index=7, + containing_service=None, + input_type=_ARCHIVEPROJECTREQUEST, + output_type=_ARCHIVEPROJECTRESPONSE, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name='ListProjects', + full_name='feast.core.CoreService.ListProjects', + index=8, + containing_service=None, + input_type=_LISTPROJECTSREQUEST, + output_type=_LISTPROJECTSRESPONSE, + serialized_options=None, + ), ]) _sym_db.RegisterServiceDescriptor(_CORESERVICE) diff --git a/sdk/python/feast/core/CoreService_pb2.pyi b/sdk/python/feast/core/CoreService_pb2.pyi index 5cd6eaf671..645226982a 100644 --- a/sdk/python/feast/core/CoreService_pb2.pyi +++ b/sdk/python/feast/core/CoreService_pb2.pyi @@ -15,6 +15,7 @@ from google.protobuf.descriptor import ( from google.protobuf.internal.containers import ( RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, ) from google.protobuf.message import ( @@ -37,11 +38,13 @@ from typing_extensions import ( class GetFeatureSetRequest(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + project = ... # type: typing___Text name = ... # type: typing___Text version = ... # type: int def __init__(self, *, + project : typing___Optional[typing___Text] = None, name : typing___Optional[typing___Text] = None, version : typing___Optional[int] = None, ) -> None: ... @@ -50,9 +53,9 @@ class GetFeatureSetRequest(google___protobuf___message___Message): def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"name",u"version"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"name",u"project",u"version"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"version",b"version"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"project",b"project",u"version",b"version"]) -> None: ... class GetFeatureSetResponse(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @@ -79,11 +82,13 @@ class ListFeatureSetsRequest(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... class Filter(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + project = ... # type: typing___Text feature_set_name = ... # type: typing___Text feature_set_version = ... # type: typing___Text def __init__(self, *, + project : typing___Optional[typing___Text] = None, feature_set_name : typing___Optional[typing___Text] = None, feature_set_version : typing___Optional[typing___Text] = None, ) -> None: ... @@ -92,9 +97,9 @@ class ListFeatureSetsRequest(google___protobuf___message___Message): def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"feature_set_name",u"feature_set_version"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"feature_set_name",u"feature_set_version",u"project"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[u"feature_set_name",b"feature_set_name",u"feature_set_version",b"feature_set_version"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"feature_set_name",b"feature_set_name",u"feature_set_version",b"feature_set_version",u"project",b"project"]) -> None: ... @property @@ -341,3 +346,84 @@ class UpdateStoreResponse(google___protobuf___message___Message): else: def HasField(self, field_name: typing_extensions___Literal[u"store",b"store"]) -> bool: ... def ClearField(self, field_name: typing_extensions___Literal[u"status",b"status",u"store",b"store"]) -> None: ... + +class CreateProjectRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + name = ... # type: typing___Text + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CreateProjectRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"name"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name"]) -> None: ... + +class CreateProjectResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + def __init__(self, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CreateProjectResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + +class ArchiveProjectRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + name = ... # type: typing___Text + + def __init__(self, + *, + name : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ArchiveProjectRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"name"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name"]) -> None: ... + +class ArchiveProjectResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + def __init__(self, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ArchiveProjectResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + +class ListProjectsRequest(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + + def __init__(self, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ListProjectsRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + +class ListProjectsResponse(google___protobuf___message___Message): + DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + projects = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + def __init__(self, + *, + projects : typing___Optional[typing___Iterable[typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ListProjectsResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"projects"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[u"projects",b"projects"]) -> None: ... diff --git a/sdk/python/feast/core/CoreService_pb2_grpc.py b/sdk/python/feast/core/CoreService_pb2_grpc.py index c4d2808779..0e17d0552a 100644 --- a/sdk/python/feast/core/CoreService_pb2_grpc.py +++ b/sdk/python/feast/core/CoreService_pb2_grpc.py @@ -44,6 +44,21 @@ def __init__(self, channel): request_serializer=feast_dot_core_dot_CoreService__pb2.UpdateStoreRequest.SerializeToString, response_deserializer=feast_dot_core_dot_CoreService__pb2.UpdateStoreResponse.FromString, ) + self.CreateProject = channel.unary_unary( + '/feast.core.CoreService/CreateProject', + request_serializer=feast_dot_core_dot_CoreService__pb2.CreateProjectRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_CoreService__pb2.CreateProjectResponse.FromString, + ) + self.ArchiveProject = channel.unary_unary( + '/feast.core.CoreService/ArchiveProject', + request_serializer=feast_dot_core_dot_CoreService__pb2.ArchiveProjectRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_CoreService__pb2.ArchiveProjectResponse.FromString, + ) + self.ListProjects = channel.unary_unary( + '/feast.core.CoreService/ListProjects', + request_serializer=feast_dot_core_dot_CoreService__pb2.ListProjectsRequest.SerializeToString, + response_deserializer=feast_dot_core_dot_CoreService__pb2.ListProjectsResponse.FromString, + ) class CoreServiceServicer(object): @@ -108,6 +123,32 @@ def UpdateStore(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def CreateProject(self, request, context): + """Creates a project. Projects serve as namespaces within which resources like features will be + created. Both feature set names as well as field names must be unique within a project. Project + names themselves must be globally unique. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ArchiveProject(self, request, context): + """Archives a project. Archived projects will continue to exist and function, but won't be visible + through the Core API. Any existing ingestion or serving requests will continue to function, + but will result in warning messages being logged. It is not possible to unarchive a project + through the Core API + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListProjects(self, request, context): + """Lists all projects active projects. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def add_CoreServiceServicer_to_server(servicer, server): rpc_method_handlers = { @@ -141,6 +182,21 @@ def add_CoreServiceServicer_to_server(servicer, server): request_deserializer=feast_dot_core_dot_CoreService__pb2.UpdateStoreRequest.FromString, response_serializer=feast_dot_core_dot_CoreService__pb2.UpdateStoreResponse.SerializeToString, ), + 'CreateProject': grpc.unary_unary_rpc_method_handler( + servicer.CreateProject, + request_deserializer=feast_dot_core_dot_CoreService__pb2.CreateProjectRequest.FromString, + response_serializer=feast_dot_core_dot_CoreService__pb2.CreateProjectResponse.SerializeToString, + ), + 'ArchiveProject': grpc.unary_unary_rpc_method_handler( + servicer.ArchiveProject, + request_deserializer=feast_dot_core_dot_CoreService__pb2.ArchiveProjectRequest.FromString, + response_serializer=feast_dot_core_dot_CoreService__pb2.ArchiveProjectResponse.SerializeToString, + ), + 'ListProjects': grpc.unary_unary_rpc_method_handler( + servicer.ListProjects, + request_deserializer=feast_dot_core_dot_CoreService__pb2.ListProjectsRequest.FromString, + response_serializer=feast_dot_core_dot_CoreService__pb2.ListProjectsResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( 'feast.core.CoreService', rpc_method_handlers) diff --git a/sdk/python/feast/core/FeatureSet_pb2.py b/sdk/python/feast/core/FeatureSet_pb2.py index 0cb77f1a70..991220ccae 100644 --- a/sdk/python/feast/core/FeatureSet_pb2.py +++ b/sdk/python/feast/core/FeatureSet_pb2.py @@ -25,7 +25,7 @@ package='feast.core', syntax='proto3', serialized_options=_b('\n\nfeast.coreB\017FeatureSetProtoZ/github.com/gojek/feast/sdk/go/protos/feast/core'), - serialized_pb=_b('\n\x1b\x66\x65\x61st/core/FeatureSet.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/types/Value.proto\x1a\x17\x66\x65\x61st/core/Source.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"`\n\nFeatureSet\x12(\n\x04spec\x18\x01 \x01(\x0b\x32\x1a.feast.core.FeatureSetSpec\x12(\n\x04meta\x18\x02 \x01(\x0b\x32\x1a.feast.core.FeatureSetMeta\"\xd4\x01\n\x0e\x46\x65\x61tureSetSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12(\n\x08\x65ntities\x18\x03 \x03(\x0b\x32\x16.feast.core.EntitySpec\x12)\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x17.feast.core.FeatureSpec\x12*\n\x07max_age\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\"\n\x06source\x18\x06 \x01(\x0b\x32\x12.feast.core.Source\"K\n\nEntitySpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\"L\n\x0b\x46\x65\x61tureSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\"u\n\x0e\x46\x65\x61tureSetMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x06status\x18\x02 \x01(\x0e\x32\x1c.feast.core.FeatureSetStatus*L\n\x10\x46\x65\x61tureSetStatus\x12\x12\n\x0eSTATUS_INVALID\x10\x00\x12\x12\n\x0eSTATUS_PENDING\x10\x01\x12\x10\n\x0cSTATUS_READY\x10\x02\x42N\n\nfeast.coreB\x0f\x46\x65\x61tureSetProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') + serialized_pb=_b('\n\x1b\x66\x65\x61st/core/FeatureSet.proto\x12\nfeast.core\x1a\x17\x66\x65\x61st/types/Value.proto\x1a\x17\x66\x65\x61st/core/Source.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"`\n\nFeatureSet\x12(\n\x04spec\x18\x01 \x01(\x0b\x32\x1a.feast.core.FeatureSetSpec\x12(\n\x04meta\x18\x02 \x01(\x0b\x32\x1a.feast.core.FeatureSetMeta\"\xe5\x01\n\x0e\x46\x65\x61tureSetSpec\x12\x0f\n\x07project\x18\x07 \x01(\t\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12(\n\x08\x65ntities\x18\x03 \x03(\x0b\x32\x16.feast.core.EntitySpec\x12)\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x17.feast.core.FeatureSpec\x12*\n\x07max_age\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\"\n\x06source\x18\x06 \x01(\x0b\x32\x12.feast.core.Source\"K\n\nEntitySpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\"L\n\x0b\x46\x65\x61tureSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\nvalue_type\x18\x02 \x01(\x0e\x32\x1b.feast.types.ValueType.Enum\"u\n\x0e\x46\x65\x61tureSetMeta\x12\x35\n\x11\x63reated_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x06status\x18\x02 \x01(\x0e\x32\x1c.feast.core.FeatureSetStatus*L\n\x10\x46\x65\x61tureSetStatus\x12\x12\n\x0eSTATUS_INVALID\x10\x00\x12\x12\n\x0eSTATUS_PENDING\x10\x01\x12\x10\n\x0cSTATUS_READY\x10\x02\x42N\n\nfeast.coreB\x0f\x46\x65\x61tureSetProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') , dependencies=[feast_dot_types_dot_Value__pb2.DESCRIPTOR,feast_dot_core_dot_Source__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) @@ -50,8 +50,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=745, - serialized_end=821, + serialized_start=762, + serialized_end=838, ) _sym_db.RegisterEnumDescriptor(_FEATURESETSTATUS) @@ -108,42 +108,49 @@ containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='name', full_name='feast.core.FeatureSetSpec.name', index=0, + name='project', full_name='feast.core.FeatureSetSpec.project', index=0, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='feast.core.FeatureSetSpec.name', index=1, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='version', full_name='feast.core.FeatureSetSpec.version', index=1, + name='version', full_name='feast.core.FeatureSetSpec.version', index=2, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='entities', full_name='feast.core.FeatureSetSpec.entities', index=2, + name='entities', full_name='feast.core.FeatureSetSpec.entities', index=3, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='features', full_name='feast.core.FeatureSetSpec.features', index=3, + name='features', full_name='feast.core.FeatureSetSpec.features', index=4, number=4, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='max_age', full_name='feast.core.FeatureSetSpec.max_age', index=4, + name='max_age', full_name='feast.core.FeatureSetSpec.max_age', index=5, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='source', full_name='feast.core.FeatureSetSpec.source', index=5, + name='source', full_name='feast.core.FeatureSetSpec.source', index=6, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, @@ -162,7 +169,7 @@ oneofs=[ ], serialized_start=257, - serialized_end=469, + serialized_end=486, ) @@ -199,8 +206,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=471, - serialized_end=546, + serialized_start=488, + serialized_end=563, ) @@ -237,8 +244,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=548, - serialized_end=624, + serialized_start=565, + serialized_end=641, ) @@ -275,8 +282,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=626, - serialized_end=743, + serialized_start=643, + serialized_end=760, ) _FEATURESET.fields_by_name['spec'].message_type = _FEATURESETSPEC diff --git a/sdk/python/feast/core/FeatureSet_pb2.pyi b/sdk/python/feast/core/FeatureSet_pb2.pyi index 6fa03ed359..c663c70c68 100644 --- a/sdk/python/feast/core/FeatureSet_pb2.pyi +++ b/sdk/python/feast/core/FeatureSet_pb2.pyi @@ -89,6 +89,7 @@ class FeatureSet(google___protobuf___message___Message): class FeatureSetSpec(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + project = ... # type: typing___Text name = ... # type: typing___Text version = ... # type: int @@ -106,6 +107,7 @@ class FeatureSetSpec(google___protobuf___message___Message): def __init__(self, *, + project : typing___Optional[typing___Text] = None, name : typing___Optional[typing___Text] = None, version : typing___Optional[int] = None, entities : typing___Optional[typing___Iterable[EntitySpec]] = None, @@ -119,10 +121,10 @@ class FeatureSetSpec(google___protobuf___message___Message): def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): def HasField(self, field_name: typing_extensions___Literal[u"max_age",u"source"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"entities",u"features",u"max_age",u"name",u"source",u"version"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entities",u"features",u"max_age",u"name",u"project",u"source",u"version"]) -> None: ... else: def HasField(self, field_name: typing_extensions___Literal[u"max_age",b"max_age",u"source",b"source"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"entities",b"entities",u"features",b"features",u"max_age",b"max_age",u"name",b"name",u"source",b"source",u"version",b"version"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entities",b"entities",u"features",b"features",u"max_age",b"max_age",u"name",b"name",u"project",b"project",u"source",b"source",u"version",b"version"]) -> None: ... class EntitySpec(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... diff --git a/sdk/python/feast/core/Store_pb2.py b/sdk/python/feast/core/Store_pb2.py index c7f9e07d87..716a597b9a 100644 --- a/sdk/python/feast/core/Store_pb2.py +++ b/sdk/python/feast/core/Store_pb2.py @@ -20,7 +20,7 @@ package='feast.core', syntax='proto3', serialized_options=_b('\n\nfeast.coreB\nStoreProtoZ/github.com/gojek/feast/sdk/go/protos/feast/core'), - serialized_pb=_b('\n\x16\x66\x65\x61st/core/Store.proto\x12\nfeast.core\"\xb9\x04\n\x05Store\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.feast.core.Store.StoreType\x12\x35\n\rsubscriptions\x18\x04 \x03(\x0b\x32\x1e.feast.core.Store.Subscription\x12\x35\n\x0credis_config\x18\x0b \x01(\x0b\x32\x1d.feast.core.Store.RedisConfigH\x00\x12;\n\x0f\x62igquery_config\x18\x0c \x01(\x0b\x32 .feast.core.Store.BigQueryConfigH\x00\x12=\n\x10\x63\x61ssandra_config\x18\r \x01(\x0b\x32!.feast.core.Store.CassandraConfigH\x00\x1a)\n\x0bRedisConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x1a\x38\n\x0e\x42igQueryConfig\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x1a-\n\x0f\x43\x61ssandraConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x1a-\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\"@\n\tStoreType\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05REDIS\x10\x01\x12\x0c\n\x08\x42IGQUERY\x10\x02\x12\r\n\tCASSANDRA\x10\x03\x42\x08\n\x06\x63onfigBI\n\nfeast.coreB\nStoreProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') + serialized_pb=_b('\n\x16\x66\x65\x61st/core/Store.proto\x12\nfeast.core\"\xca\x04\n\x05Store\x12\x0c\n\x04name\x18\x01 \x01(\t\x12)\n\x04type\x18\x02 \x01(\x0e\x32\x1b.feast.core.Store.StoreType\x12\x35\n\rsubscriptions\x18\x04 \x03(\x0b\x32\x1e.feast.core.Store.Subscription\x12\x35\n\x0credis_config\x18\x0b \x01(\x0b\x32\x1d.feast.core.Store.RedisConfigH\x00\x12;\n\x0f\x62igquery_config\x18\x0c \x01(\x0b\x32 .feast.core.Store.BigQueryConfigH\x00\x12=\n\x10\x63\x61ssandra_config\x18\r \x01(\x0b\x32!.feast.core.Store.CassandraConfigH\x00\x1a)\n\x0bRedisConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x1a\x38\n\x0e\x42igQueryConfig\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x1a-\n\x0f\x43\x61ssandraConfig\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x1a>\n\x0cSubscription\x12\x0f\n\x07project\x18\x03 \x01(\t\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\"@\n\tStoreType\x12\x0b\n\x07INVALID\x10\x00\x12\t\n\x05REDIS\x10\x01\x12\x0c\n\x08\x42IGQUERY\x10\x02\x12\r\n\tCASSANDRA\x10\x03\x42\x08\n\x06\x63onfigBI\n\nfeast.coreB\nStoreProtoZ/github.com/gojek/feast/sdk/go/protos/feast/coreb\x06proto3') ) @@ -50,8 +50,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=534, - serialized_end=598, + serialized_start=551, + serialized_end=615, ) _sym_db.RegisterEnumDescriptor(_STORE_STORETYPE) @@ -175,14 +175,21 @@ containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='name', full_name='feast.core.Store.Subscription.name', index=0, + name='project', full_name='feast.core.Store.Subscription.project', index=0, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='feast.core.Store.Subscription.name', index=1, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='version', full_name='feast.core.Store.Subscription.version', index=1, + name='version', full_name='feast.core.Store.Subscription.version', index=2, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, @@ -201,7 +208,7 @@ oneofs=[ ], serialized_start=487, - serialized_end=532, + serialized_end=549, ) _STORE = _descriptor.Descriptor( @@ -270,7 +277,7 @@ index=0, containing_type=None, fields=[]), ], serialized_start=39, - serialized_end=608, + serialized_end=625, ) _STORE_REDISCONFIG.containing_type = _STORE diff --git a/sdk/python/feast/core/Store_pb2.pyi b/sdk/python/feast/core/Store_pb2.pyi index 726a9d5443..541bcd329b 100644 --- a/sdk/python/feast/core/Store_pb2.pyi +++ b/sdk/python/feast/core/Store_pb2.pyi @@ -109,11 +109,13 @@ class Store(google___protobuf___message___Message): class Subscription(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + project = ... # type: typing___Text name = ... # type: typing___Text version = ... # type: typing___Text def __init__(self, *, + project : typing___Optional[typing___Text] = None, name : typing___Optional[typing___Text] = None, version : typing___Optional[typing___Text] = None, ) -> None: ... @@ -122,9 +124,9 @@ class Store(google___protobuf___message___Message): def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"name",u"version"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"name",u"project",u"version"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"version",b"version"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"name",b"name",u"project",b"project",u"version",b"version"]) -> None: ... name = ... # type: typing___Text type = ... # type: Store.StoreType diff --git a/sdk/python/feast/exceptions.py b/sdk/python/feast/exceptions.py deleted file mode 100644 index ca5f20694c..0000000000 --- a/sdk/python/feast/exceptions.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2019 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def format_grpc_exception(method: str, code: str, details: str) -> str: - """ - Consistent format for use in printing gRPC exceptions - """ - return f'{method} failed with code "{code}"\n{details}' diff --git a/sdk/python/feast/feature_set.py b/sdk/python/feast/feature_set.py index b402ef3acd..d557660751 100644 --- a/sdk/python/feast/feature_set.py +++ b/sdk/python/feast/feature_set.py @@ -30,7 +30,11 @@ from feast.type_map import pa_to_feast_value_type from feast.type_map import python_type_to_feast_value_type from google.protobuf import json_format +from feast.core.FeatureSet_pb2 import FeatureSetSpec as FeatureSetSpecProto +from feast.core.FeatureSet_pb2 import FeatureSetMeta as FeatureSetMetaProto +from feast.core.FeatureSet_pb2 import FeatureSet as FeatureSetProto from google.protobuf.duration_pb2 import Duration +from feast.type_map import python_type_to_feast_value_type from google.protobuf.json_format import MessageToJson from pandas.api.types import is_datetime64_ns_dtype from pyarrow.lib import TimestampType @@ -44,12 +48,14 @@ class FeatureSet: def __init__( self, name: str, + project: str = None, features: List[Feature] = None, entities: List[Entity] = None, source: Source = None, - max_age: Optional[Duration] = None + max_age: Optional[Duration] = None, ): self._name = name + self._project = project self._fields = OrderedDict() # type: Dict[str, Field] if features is not None: self.features = features @@ -61,7 +67,6 @@ def __init__( self._source = source self._max_age = max_age self._version = None - self._client = None self._status = None self._created_timestamp = None @@ -73,7 +78,11 @@ def __eq__(self, other): if key not in other.fields.keys() or self.fields[key] != other.fields[key]: return False - if self.name != other.name or self.max_age != other.max_age: + if ( + self.name != other.name + or self.project != other.project + or self.max_age != other.max_age + ): return False return True @@ -81,10 +90,14 @@ def __str__(self): return str(MessageToJson(self.to_proto())) def __repr__(self): - shortname = "" + self._name - if self._version: - shortname += ":" + str(self._version).strip() - return shortname + ref = "" + if self.project: + ref += self.project + "/" + if self.name: + ref += self.name + if self.version: + ref += ":" + str(self.version).strip() + return ref @property def fields(self) -> Dict[str, Field]: @@ -159,6 +172,20 @@ def name(self, name): """ self._name = name + @property + def project(self): + """ + Returns the project that this feature set belongs to + """ + return self._project + + @project.setter + def project(self, project): + """ + Sets the project that this feature set belongs to + """ + self._project = project + @property def source(self): """ @@ -410,12 +437,13 @@ def infer_fields_from_df( print(output_log) def infer_fields_from_pa( - self, table: pa.lib.Table, - entities: Optional[List[Entity]] = None, - features: Optional[List[Feature]] = None, - replace_existing_features: bool = False, - replace_existing_entities: bool = False, - discard_unused_fields: bool = False + self, + table: pa.lib.Table, + entities: Optional[List[Entity]] = None, + features: Optional[List[Feature]] = None, + replace_existing_features: bool = False, + replace_existing_entities: bool = False, + discard_unused_fields: bool = False, ) -> None: """ Adds fields (Features or Entities) to a feature set based on the schema @@ -514,22 +542,21 @@ def infer_fields_from_pa( # Only overwrite conflicting fields if replacement is allowed if column in new_fields: if ( - isinstance(self._fields[column], Feature) - and not replace_existing_features + isinstance(self._fields[column], Feature) + and not replace_existing_features ): continue if ( - isinstance(self._fields[column], Entity) - and not replace_existing_entities + isinstance(self._fields[column], Entity) + and not replace_existing_entities ): continue # Store this fields as a feature # TODO: (Minor) Change the parameter name from dtype to patype new_fields[column] = Feature( - name=column, - dtype=self._infer_pa_column_type(table.column(column)) + name=column, dtype=self._infer_pa_column_type(table.column(column)) ) output_log += f"{type(new_fields[column]).__name__} {new_fields[column].name} ({new_fields[column].dtype}) added from PyArrow Table.\n" @@ -598,6 +625,7 @@ def _update_from_feature_set(self, feature_set): """ self.name = feature_set.name + self.project = feature_set.project self.version = feature_set.version self.source = feature_set.source self.max_age = feature_set.max_age @@ -629,6 +657,9 @@ def is_valid(self): if feature set is invalid. """ + if not self.name: + raise ValueError(f"No name found in feature set.") + if len(self.entities) == 0: raise ValueError(f"No entities found in feature set {self.name}") @@ -691,7 +722,10 @@ def from_proto(cls, feature_set_proto: FeatureSetProto): None if feature_set_proto.spec.source.type == 0 else Source.from_proto(feature_set_proto.spec.source) - ) + ), + project=feature_set_proto.spec.project + if len(feature_set_proto.spec.project) == 0 + else feature_set_proto.spec.project, ) feature_set._version = feature_set_proto.spec.version feature_set._status = feature_set_proto.meta.status @@ -713,6 +747,7 @@ def to_proto(self) -> FeatureSetProto: spec = FeatureSetSpecProto( name=self.name, version=self.version, + project=self.project, max_age=self.max_age, source=self.source.to_proto() if self.source is not None else None, features=[ diff --git a/sdk/python/feast/job.py b/sdk/python/feast/job.py index 26f6181ee2..f849f6630d 100644 --- a/sdk/python/feast/job.py +++ b/sdk/python/feast/job.py @@ -132,10 +132,7 @@ def result(self, timeout_sec: int = DEFAULT_TIMEOUT_SEC): for record in avro_reader: yield record - def to_dataframe( - self, - timeout_sec: int = DEFAULT_TIMEOUT_SEC - ) -> pd.DataFrame: + def to_dataframe(self, timeout_sec: int = DEFAULT_TIMEOUT_SEC) -> pd.DataFrame: """ Wait until a job is done to get an iterable rows of result. This method will split the response into chunked DataFrame of a specified size to @@ -157,9 +154,7 @@ def to_dataframe( return pd.DataFrame.from_records(records) def to_chunked_dataframe( - self, - max_chunk_size: int = -1, - timeout_sec: int = DEFAULT_TIMEOUT_SEC + self, max_chunk_size: int = -1, timeout_sec: int = DEFAULT_TIMEOUT_SEC ) -> pd.DataFrame: """ Wait until a job is done to get an iterable rows of result. This method diff --git a/sdk/python/feast/loaders/abstract_producer.py b/sdk/python/feast/loaders/abstract_producer.py index 884ae49984..d0ddabf1e5 100644 --- a/sdk/python/feast/loaders/abstract_producer.py +++ b/sdk/python/feast/loaders/abstract_producer.py @@ -22,12 +22,7 @@ class AbstractProducer: Abstract class for Kafka producers """ - def __init__( - self, - brokers: str, - row_count: int, - disable_progress_bar: bool - ): + def __init__(self, brokers: str, row_count: int, disable_progress_bar: bool): self.brokers = brokers self.row_count = row_count self.error_count = 0 @@ -35,20 +30,15 @@ def __init__( # Progress bar will always display average rate self.pbar = tqdm( - total=row_count, - unit="rows", - smoothing=0, - disable=disable_progress_bar + total=row_count, unit="rows", smoothing=0, disable=disable_progress_bar ) def produce(self, topic: str, data: str): - message = "{} should implement a produce method".format( - self.__class__.__name__) + message = "{} should implement a produce method".format(self.__class__.__name__) raise NotImplementedError(message) def flush(self, timeout: int): - message = "{} should implement a flush method".format( - self.__class__.__name__) + message = "{} should implement a flush method".format(self.__class__.__name__) raise NotImplementedError(message) def _inc_pbar(self, meta): @@ -98,13 +88,9 @@ class ConfluentProducer(AbstractProducer): Concrete implementation of Confluent Kafka producer (confluent-kafka) """ - def __init__( - self, - brokers: str, - row_count: int, - disable_progress_bar: bool - ): + def __init__(self, brokers: str, row_count: int, disable_progress_bar: bool): from confluent_kafka import Producer + self.producer = Producer({"bootstrap.servers": brokers}) super().__init__(brokers, row_count, disable_progress_bar) @@ -122,8 +108,7 @@ def produce(self, topic: str, value: bytes) -> None: """ try: - self.producer.produce( - topic, value=value, callback=self._delivery_callback) + self.producer.produce(topic, value=value, callback=self._delivery_callback) # Serve delivery callback queue. # NOTE: Since produce() is an asynchronous API this poll() call # will most likely not serve the delivery callback for the @@ -173,13 +158,9 @@ class KafkaPythonProducer(AbstractProducer): Concrete implementation of Python Kafka producer (kafka-python) """ - def __init__( - self, - brokers: str, - row_count: int, - disable_progress_bar: bool - ): + def __init__(self, brokers: str, row_count: int, disable_progress_bar: bool): from kafka import KafkaProducer + self.producer = KafkaProducer(bootstrap_servers=[brokers]) super().__init__(brokers, row_count, disable_progress_bar) @@ -199,8 +180,11 @@ def produce(self, topic: str, value: bytes): KafkaTimeoutError: if unable to fetch topic metadata, or unable to obtain memory buffer prior to configured max_block_ms """ - return self.producer.send(topic, value=value).add_callback( - self._inc_pbar).add_errback(self._set_error) + return ( + self.producer.send(topic, value=value) + .add_callback(self._inc_pbar) + .add_errback(self._set_error) + ) def flush(self, timeout: Optional[int]): """ @@ -220,7 +204,7 @@ def flush(self, timeout: Optional[int]): def get_producer( - brokers: str, row_count: int, disable_progress_bar: bool + brokers: str, row_count: int, disable_progress_bar: bool ) -> Union[ConfluentProducer, KafkaPythonProducer]: """ Simple context helper function that returns a AbstractProducer object when diff --git a/sdk/python/feast/loaders/file.py b/sdk/python/feast/loaders/file.py index 108f2790dd..bb050c07c6 100644 --- a/sdk/python/feast/loaders/file.py +++ b/sdk/python/feast/loaders/file.py @@ -27,7 +27,7 @@ def export_source_to_staging_location( - source: Union[pd.DataFrame, str], staging_location_uri: str + source: Union[pd.DataFrame, str], staging_location_uri: str ) -> List[str]: """ Uploads a DataFrame as an Avro file to a remote staging location. @@ -69,38 +69,33 @@ def export_source_to_staging_location( uri_path = None # Remote gs staging location provided by serving - dir_path, file_name, source_path = export_dataframe_to_local( - source, - uri_path - ) + dir_path, file_name, source_path = export_dataframe_to_local(source, uri_path) elif urlparse(source).scheme in ["", "file"]: # Local file provided as a source dir_path = None file_name = os.path.basename(source) - source_path = os.path.abspath(os.path.join( - urlparse(source).netloc, urlparse(source).path)) + source_path = os.path.abspath( + os.path.join(urlparse(source).netloc, urlparse(source).path) + ) elif urlparse(source).scheme == "gs": # Google Cloud Storage path provided input_source_uri = urlparse(source) if "*" in source: # Wildcard path - return _get_files( - bucket=input_source_uri.hostname, - uri=input_source_uri - ) + return _get_files(bucket=input_source_uri.hostname, uri=input_source_uri) else: return [source] else: - raise Exception(f"Only string and DataFrame types are allowed as a " - f"source, {type(source)} was provided.") + raise Exception( + f"Only string and DataFrame types are allowed as a " + f"source, {type(source)} was provided." + ) # Push data to required staging location if uri.scheme == "gs": # Staging location is a Google Cloud Storage path upload_file_to_gcs( - source_path, - uri.hostname, - str(uri.path).strip("/") + "/" + file_name + source_path, uri.hostname, str(uri.path).strip("/") + "/" + file_name ) elif uri.scheme == "file": # Staging location is a file path @@ -120,8 +115,7 @@ def export_source_to_staging_location( def export_dataframe_to_local( - df: pd.DataFrame, - dir_path: Optional[str] = None + df: pd.DataFrame, dir_path: Optional[str] = None ) -> Tuple[str, str, str]: """ Exports a pandas DataFrame to the local filesystem. @@ -149,11 +143,7 @@ def export_dataframe_to_local( # Temporarily rename datetime column to event_timestamp. Ideally we would # force the schema with our avro writer instead. - df.columns = [ - "event_timestamp" - if col == "datetime" else col - for col in df.columns - ] + df.columns = ["event_timestamp" if col == "datetime" else col for col in df.columns] try: # Export dataset to file in local path @@ -163,9 +153,7 @@ def export_dataframe_to_local( finally: # Revert event_timestamp column to datetime df.columns = [ - "datetime" - if col == "event_timestamp" else col - for col in df.columns + "datetime" if col == "event_timestamp" else col for col in df.columns ] return dir_path, file_name, dest_path @@ -223,13 +211,14 @@ def _get_files(bucket: str, uri: ParseResult) -> List[str]: if "*" in path: regex = re.compile(path.replace("*", ".*?").strip("/")) blob_list = bucket.list_blobs( - prefix=path.strip("/").split("*")[0], - delimiter="/" + prefix=path.strip("/").split("*")[0], delimiter="/" ) # File path should not be in path (file path must be longer than path) - return [f"{uri.scheme}://{uri.hostname}/{file}" - for file in [x.name for x in blob_list] - if re.match(regex, file) and file not in path] + return [ + f"{uri.scheme}://{uri.hostname}/{file}" + for file in [x.name for x in blob_list] + if re.match(regex, file) and file not in path + ] else: raise Exception(f"{path} is not a wildcard path") diff --git a/sdk/python/feast/loaders/ingest.py b/sdk/python/feast/loaders/ingest.py index cbe80086e6..95b699d000 100644 --- a/sdk/python/feast/loaders/ingest.py +++ b/sdk/python/feast/loaders/ingest.py @@ -7,8 +7,10 @@ import pyarrow.parquet as pq from feast.constants import DATETIME_COLUMN from feast.feature_set import FeatureSet -from feast.type_map import pa_column_to_timestamp_proto_column, \ - pa_column_to_proto_column +from feast.type_map import ( + pa_column_to_timestamp_proto_column, + pa_column_to_proto_column, +) from feast.types import Field_pb2 as FieldProto from feast.types.FeatureRow_pb2 import FeatureRow @@ -22,11 +24,7 @@ KAFKA_CHUNK_PRODUCTION_TIMEOUT = 120 # type: int -def _encode_pa_tables( - file: str, - fs: FeatureSet, - row_group_idx: int, -) -> List[bytes]: +def _encode_pa_tables(file: str, fs: FeatureSet, row_group_idx: int) -> List[bytes]: """ Helper function to encode a PyArrow table(s) read from parquet file(s) into FeatureRows. @@ -58,17 +56,15 @@ def _encode_pa_tables( table = pq_file.read_row_group(row_group_idx) # Add datetime column - datetime_col = pa_column_to_timestamp_proto_column( - table.column(DATETIME_COLUMN)) + datetime_col = pa_column_to_timestamp_proto_column(table.column(DATETIME_COLUMN)) # Preprocess the columns by converting all its values to Proto values proto_columns = { - field_name: pa_column_to_proto_column(field.dtype, - table.column(field_name)) + field_name: pa_column_to_proto_column(field.dtype, table.column(field_name)) for field_name, field in fs.fields.items() } - feature_set = f"{fs.name}:{fs.version}" + feature_set = f"{fs.project}/{fs.name}:{fs.version}" # List to store result feature_rows = [] @@ -80,8 +76,9 @@ def _encode_pa_tables( # Iterate through the rows for row_idx in range(table.num_rows): - feature_row = FeatureRow(event_timestamp=datetime_col[row_idx], - feature_set=feature_set) + feature_row = FeatureRow( + event_timestamp=datetime_col[row_idx], feature_set=feature_set + ) # Loop optimization declaration ext = feature_row.fields.extend @@ -96,10 +93,7 @@ def _encode_pa_tables( def get_feature_row_chunks( - file: str, - row_groups: List[int], - fs: FeatureSet, - max_workers: int + file: str, row_groups: List[int], fs: FeatureSet, max_workers: int ) -> Iterable[List[bytes]]: """ Iterator function to encode a PyArrow table read from a parquet file to diff --git a/sdk/python/feast/serving/ServingService_pb2.py b/sdk/python/feast/serving/ServingService_pb2.py index e7258f5a7d..9d0d55f2ab 100644 --- a/sdk/python/feast/serving/ServingService_pb2.py +++ b/sdk/python/feast/serving/ServingService_pb2.py @@ -24,7 +24,7 @@ package='feast.serving', syntax='proto3', serialized_options=_b('\n\rfeast.servingB\017ServingAPIProtoZ2github.com/gojek/feast/sdk/go/protos/feast/serving'), - serialized_pb=_b('\n\"feast/serving/ServingService.proto\x12\rfeast.serving\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x17\x66\x65\x61st/types/Value.proto\"\x1c\n\x1aGetFeastServingInfoRequest\"{\n\x1bGetFeastServingInfoResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\x12-\n\x04type\x18\x02 \x01(\x0e\x32\x1f.feast.serving.FeastServingType\x12\x1c\n\x14job_staging_location\x18\n \x01(\t\"u\n\x11\x46\x65\x61tureSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x15\n\rfeature_names\x18\x03 \x03(\t\x12*\n\x07max_age\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\"\x93\x03\n\x18GetOnlineFeaturesRequest\x12\x36\n\x0c\x66\x65\x61ture_sets\x18\x01 \x03(\x0b\x32 .feast.serving.FeatureSetRequest\x12\x46\n\x0b\x65ntity_rows\x18\x02 \x03(\x0b\x32\x31.feast.serving.GetOnlineFeaturesRequest.EntityRow\x12!\n\x19omit_entities_in_response\x18\x03 \x01(\x08\x1a\xd3\x01\n\tEntityRow\x12\x34\n\x10\x65ntity_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12M\n\x06\x66ields\x18\x02 \x03(\x0b\x32=.feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry\x1a\x41\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.Value:\x02\x38\x01\"\x87\x01\n\x17GetBatchFeaturesRequest\x12\x36\n\x0c\x66\x65\x61ture_sets\x18\x01 \x03(\x0b\x32 .feast.serving.FeatureSetRequest\x12\x34\n\x0e\x64\x61taset_source\x18\x02 \x01(\x0b\x32\x1c.feast.serving.DatasetSource\"\x8c\x02\n\x19GetOnlineFeaturesResponse\x12J\n\x0c\x66ield_values\x18\x01 \x03(\x0b\x32\x34.feast.serving.GetOnlineFeaturesResponse.FieldValues\x1a\xa2\x01\n\x0b\x46ieldValues\x12P\n\x06\x66ields\x18\x01 \x03(\x0b\x32@.feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry\x1a\x41\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.Value:\x02\x38\x01\";\n\x18GetBatchFeaturesResponse\x12\x1f\n\x03job\x18\x01 \x01(\x0b\x32\x12.feast.serving.Job\"0\n\rGetJobRequest\x12\x1f\n\x03job\x18\x01 \x01(\x0b\x32\x12.feast.serving.Job\"1\n\x0eGetJobResponse\x12\x1f\n\x03job\x18\x01 \x01(\x0b\x32\x12.feast.serving.Job\"\xb3\x01\n\x03Job\x12\n\n\x02id\x18\x01 \x01(\t\x12$\n\x04type\x18\x02 \x01(\x0e\x32\x16.feast.serving.JobType\x12(\n\x06status\x18\x03 \x01(\x0e\x32\x18.feast.serving.JobStatus\x12\r\n\x05\x65rror\x18\x04 \x01(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12.\n\x0b\x64\x61ta_format\x18\x06 \x01(\x0e\x32\x19.feast.serving.DataFormat\"\xb2\x01\n\rDatasetSource\x12>\n\x0b\x66ile_source\x18\x01 \x01(\x0b\x32\'.feast.serving.DatasetSource.FileSourceH\x00\x1aO\n\nFileSource\x12\x11\n\tfile_uris\x18\x01 \x03(\t\x12.\n\x0b\x64\x61ta_format\x18\x02 \x01(\x0e\x32\x19.feast.serving.DataFormatB\x10\n\x0e\x64\x61taset_source*o\n\x10\x46\x65\x61stServingType\x12\x1e\n\x1a\x46\x45\x41ST_SERVING_TYPE_INVALID\x10\x00\x12\x1d\n\x19\x46\x45\x41ST_SERVING_TYPE_ONLINE\x10\x01\x12\x1c\n\x18\x46\x45\x41ST_SERVING_TYPE_BATCH\x10\x02*6\n\x07JobType\x12\x14\n\x10JOB_TYPE_INVALID\x10\x00\x12\x15\n\x11JOB_TYPE_DOWNLOAD\x10\x01*h\n\tJobStatus\x12\x16\n\x12JOB_STATUS_INVALID\x10\x00\x12\x16\n\x12JOB_STATUS_PENDING\x10\x01\x12\x16\n\x12JOB_STATUS_RUNNING\x10\x02\x12\x13\n\x0fJOB_STATUS_DONE\x10\x03*;\n\nDataFormat\x12\x17\n\x13\x44\x41TA_FORMAT_INVALID\x10\x00\x12\x14\n\x10\x44\x41TA_FORMAT_AVRO\x10\x01\x32\x92\x03\n\x0eServingService\x12l\n\x13GetFeastServingInfo\x12).feast.serving.GetFeastServingInfoRequest\x1a*.feast.serving.GetFeastServingInfoResponse\x12\x66\n\x11GetOnlineFeatures\x12\'.feast.serving.GetOnlineFeaturesRequest\x1a(.feast.serving.GetOnlineFeaturesResponse\x12\x63\n\x10GetBatchFeatures\x12&.feast.serving.GetBatchFeaturesRequest\x1a\'.feast.serving.GetBatchFeaturesResponse\x12\x45\n\x06GetJob\x12\x1c.feast.serving.GetJobRequest\x1a\x1d.feast.serving.GetJobResponseBT\n\rfeast.servingB\x0fServingAPIProtoZ2github.com/gojek/feast/sdk/go/protos/feast/servingb\x06proto3') + serialized_pb=_b('\n\"feast/serving/ServingService.proto\x12\rfeast.serving\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x17\x66\x65\x61st/types/Value.proto\"\x1c\n\x1aGetFeastServingInfoRequest\"{\n\x1bGetFeastServingInfoResponse\x12\x0f\n\x07version\x18\x01 \x01(\t\x12-\n\x04type\x18\x02 \x01(\x0e\x32\x1f.feast.serving.FeastServingType\x12\x1c\n\x14job_staging_location\x18\n \x01(\t\"n\n\x10\x46\x65\x61tureReference\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12*\n\x07max_age\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\"\x8e\x03\n\x18GetOnlineFeaturesRequest\x12\x31\n\x08\x66\x65\x61tures\x18\x04 \x03(\x0b\x32\x1f.feast.serving.FeatureReference\x12\x46\n\x0b\x65ntity_rows\x18\x02 \x03(\x0b\x32\x31.feast.serving.GetOnlineFeaturesRequest.EntityRow\x12!\n\x19omit_entities_in_response\x18\x03 \x01(\x08\x1a\xd3\x01\n\tEntityRow\x12\x34\n\x10\x65ntity_timestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12M\n\x06\x66ields\x18\x02 \x03(\x0b\x32=.feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry\x1a\x41\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.Value:\x02\x38\x01\"\x82\x01\n\x17GetBatchFeaturesRequest\x12\x31\n\x08\x66\x65\x61tures\x18\x03 \x03(\x0b\x32\x1f.feast.serving.FeatureReference\x12\x34\n\x0e\x64\x61taset_source\x18\x02 \x01(\x0b\x32\x1c.feast.serving.DatasetSource\"\x8c\x02\n\x19GetOnlineFeaturesResponse\x12J\n\x0c\x66ield_values\x18\x01 \x03(\x0b\x32\x34.feast.serving.GetOnlineFeaturesResponse.FieldValues\x1a\xa2\x01\n\x0b\x46ieldValues\x12P\n\x06\x66ields\x18\x01 \x03(\x0b\x32@.feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry\x1a\x41\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.feast.types.Value:\x02\x38\x01\";\n\x18GetBatchFeaturesResponse\x12\x1f\n\x03job\x18\x01 \x01(\x0b\x32\x12.feast.serving.Job\"0\n\rGetJobRequest\x12\x1f\n\x03job\x18\x01 \x01(\x0b\x32\x12.feast.serving.Job\"1\n\x0eGetJobResponse\x12\x1f\n\x03job\x18\x01 \x01(\x0b\x32\x12.feast.serving.Job\"\xb3\x01\n\x03Job\x12\n\n\x02id\x18\x01 \x01(\t\x12$\n\x04type\x18\x02 \x01(\x0e\x32\x16.feast.serving.JobType\x12(\n\x06status\x18\x03 \x01(\x0e\x32\x18.feast.serving.JobStatus\x12\r\n\x05\x65rror\x18\x04 \x01(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12.\n\x0b\x64\x61ta_format\x18\x06 \x01(\x0e\x32\x19.feast.serving.DataFormat\"\xb2\x01\n\rDatasetSource\x12>\n\x0b\x66ile_source\x18\x01 \x01(\x0b\x32\'.feast.serving.DatasetSource.FileSourceH\x00\x1aO\n\nFileSource\x12\x11\n\tfile_uris\x18\x01 \x03(\t\x12.\n\x0b\x64\x61ta_format\x18\x02 \x01(\x0e\x32\x19.feast.serving.DataFormatB\x10\n\x0e\x64\x61taset_source*o\n\x10\x46\x65\x61stServingType\x12\x1e\n\x1a\x46\x45\x41ST_SERVING_TYPE_INVALID\x10\x00\x12\x1d\n\x19\x46\x45\x41ST_SERVING_TYPE_ONLINE\x10\x01\x12\x1c\n\x18\x46\x45\x41ST_SERVING_TYPE_BATCH\x10\x02*6\n\x07JobType\x12\x14\n\x10JOB_TYPE_INVALID\x10\x00\x12\x15\n\x11JOB_TYPE_DOWNLOAD\x10\x01*h\n\tJobStatus\x12\x16\n\x12JOB_STATUS_INVALID\x10\x00\x12\x16\n\x12JOB_STATUS_PENDING\x10\x01\x12\x16\n\x12JOB_STATUS_RUNNING\x10\x02\x12\x13\n\x0fJOB_STATUS_DONE\x10\x03*;\n\nDataFormat\x12\x17\n\x13\x44\x41TA_FORMAT_INVALID\x10\x00\x12\x14\n\x10\x44\x41TA_FORMAT_AVRO\x10\x01\x32\x92\x03\n\x0eServingService\x12l\n\x13GetFeastServingInfo\x12).feast.serving.GetFeastServingInfoRequest\x1a*.feast.serving.GetFeastServingInfoResponse\x12\x66\n\x11GetOnlineFeatures\x12\'.feast.serving.GetOnlineFeaturesRequest\x1a(.feast.serving.GetOnlineFeaturesResponse\x12\x63\n\x10GetBatchFeatures\x12&.feast.serving.GetBatchFeaturesRequest\x1a\'.feast.serving.GetBatchFeaturesResponse\x12\x45\n\x06GetJob\x12\x1c.feast.serving.GetJobRequest\x1a\x1d.feast.serving.GetJobResponseBT\n\rfeast.servingB\x0fServingAPIProtoZ2github.com/gojek/feast/sdk/go/protos/feast/servingb\x06proto3') , dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,feast_dot_types_dot_Value__pb2.DESCRIPTOR,]) @@ -49,8 +49,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1757, - serialized_end=1868, + serialized_start=1740, + serialized_end=1851, ) _sym_db.RegisterEnumDescriptor(_FEASTSERVINGTYPE) @@ -72,8 +72,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1870, - serialized_end=1924, + serialized_start=1853, + serialized_end=1907, ) _sym_db.RegisterEnumDescriptor(_JOBTYPE) @@ -103,8 +103,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1926, - serialized_end=2030, + serialized_start=1909, + serialized_end=2013, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS) @@ -126,8 +126,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2032, - serialized_end=2091, + serialized_start=2015, + serialized_end=2074, ) _sym_db.RegisterEnumDescriptor(_DATAFORMAT) @@ -215,36 +215,36 @@ ) -_FEATURESETREQUEST = _descriptor.Descriptor( - name='FeatureSetRequest', - full_name='feast.serving.FeatureSetRequest', +_FEATUREREFERENCE = _descriptor.Descriptor( + name='FeatureReference', + full_name='feast.serving.FeatureReference', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='name', full_name='feast.serving.FeatureSetRequest.name', index=0, + name='project', full_name='feast.serving.FeatureReference.project', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='version', full_name='feast.serving.FeatureSetRequest.version', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, + name='name', full_name='feast.serving.FeatureReference.name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='feature_names', full_name='feast.serving.FeatureSetRequest.feature_names', index=2, - number=3, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], + name='version', full_name='feast.serving.FeatureReference.version', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='max_age', full_name='feast.serving.FeatureSetRequest.max_age', index=3, + name='max_age', full_name='feast.serving.FeatureReference.max_age', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, @@ -263,7 +263,7 @@ oneofs=[ ], serialized_start=298, - serialized_end=415, + serialized_end=408, ) @@ -300,8 +300,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=756, - serialized_end=821, + serialized_start=744, + serialized_end=809, ) _GETONLINEFEATURESREQUEST_ENTITYROW = _descriptor.Descriptor( @@ -337,8 +337,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=610, - serialized_end=821, + serialized_start=598, + serialized_end=809, ) _GETONLINEFEATURESREQUEST = _descriptor.Descriptor( @@ -349,8 +349,8 @@ containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='feature_sets', full_name='feast.serving.GetOnlineFeaturesRequest.feature_sets', index=0, - number=1, type=11, cpp_type=10, label=3, + name='features', full_name='feast.serving.GetOnlineFeaturesRequest.features', index=0, + number=4, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, @@ -381,8 +381,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=418, - serialized_end=821, + serialized_start=411, + serialized_end=809, ) @@ -394,8 +394,8 @@ containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='feature_sets', full_name='feast.serving.GetBatchFeaturesRequest.feature_sets', index=0, - number=1, type=11, cpp_type=10, label=3, + name='features', full_name='feast.serving.GetBatchFeaturesRequest.features', index=0, + number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, @@ -419,8 +419,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=824, - serialized_end=959, + serialized_start=812, + serialized_end=942, ) @@ -457,8 +457,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=756, - serialized_end=821, + serialized_start=744, + serialized_end=809, ) _GETONLINEFEATURESRESPONSE_FIELDVALUES = _descriptor.Descriptor( @@ -487,8 +487,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1068, - serialized_end=1230, + serialized_start=1051, + serialized_end=1213, ) _GETONLINEFEATURESRESPONSE = _descriptor.Descriptor( @@ -517,8 +517,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=962, - serialized_end=1230, + serialized_start=945, + serialized_end=1213, ) @@ -548,8 +548,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1232, - serialized_end=1291, + serialized_start=1215, + serialized_end=1274, ) @@ -579,8 +579,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1293, - serialized_end=1341, + serialized_start=1276, + serialized_end=1324, ) @@ -610,8 +610,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1343, - serialized_end=1392, + serialized_start=1326, + serialized_end=1375, ) @@ -676,8 +676,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1395, - serialized_end=1574, + serialized_start=1378, + serialized_end=1557, ) @@ -714,8 +714,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1658, - serialized_end=1737, + serialized_start=1641, + serialized_end=1720, ) _DATASETSOURCE = _descriptor.Descriptor( @@ -747,20 +747,20 @@ name='dataset_source', full_name='feast.serving.DatasetSource.dataset_source', index=0, containing_type=None, fields=[]), ], - serialized_start=1577, - serialized_end=1755, + serialized_start=1560, + serialized_end=1738, ) _GETFEASTSERVINGINFORESPONSE.fields_by_name['type'].enum_type = _FEASTSERVINGTYPE -_FEATURESETREQUEST.fields_by_name['max_age'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_FEATUREREFERENCE.fields_by_name['max_age'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION _GETONLINEFEATURESREQUEST_ENTITYROW_FIELDSENTRY.fields_by_name['value'].message_type = feast_dot_types_dot_Value__pb2._VALUE _GETONLINEFEATURESREQUEST_ENTITYROW_FIELDSENTRY.containing_type = _GETONLINEFEATURESREQUEST_ENTITYROW _GETONLINEFEATURESREQUEST_ENTITYROW.fields_by_name['entity_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _GETONLINEFEATURESREQUEST_ENTITYROW.fields_by_name['fields'].message_type = _GETONLINEFEATURESREQUEST_ENTITYROW_FIELDSENTRY _GETONLINEFEATURESREQUEST_ENTITYROW.containing_type = _GETONLINEFEATURESREQUEST -_GETONLINEFEATURESREQUEST.fields_by_name['feature_sets'].message_type = _FEATURESETREQUEST +_GETONLINEFEATURESREQUEST.fields_by_name['features'].message_type = _FEATUREREFERENCE _GETONLINEFEATURESREQUEST.fields_by_name['entity_rows'].message_type = _GETONLINEFEATURESREQUEST_ENTITYROW -_GETBATCHFEATURESREQUEST.fields_by_name['feature_sets'].message_type = _FEATURESETREQUEST +_GETBATCHFEATURESREQUEST.fields_by_name['features'].message_type = _FEATUREREFERENCE _GETBATCHFEATURESREQUEST.fields_by_name['dataset_source'].message_type = _DATASETSOURCE _GETONLINEFEATURESRESPONSE_FIELDVALUES_FIELDSENTRY.fields_by_name['value'].message_type = feast_dot_types_dot_Value__pb2._VALUE _GETONLINEFEATURESRESPONSE_FIELDVALUES_FIELDSENTRY.containing_type = _GETONLINEFEATURESRESPONSE_FIELDVALUES @@ -781,7 +781,7 @@ _DATASETSOURCE.fields_by_name['file_source'].containing_oneof = _DATASETSOURCE.oneofs_by_name['dataset_source'] DESCRIPTOR.message_types_by_name['GetFeastServingInfoRequest'] = _GETFEASTSERVINGINFOREQUEST DESCRIPTOR.message_types_by_name['GetFeastServingInfoResponse'] = _GETFEASTSERVINGINFORESPONSE -DESCRIPTOR.message_types_by_name['FeatureSetRequest'] = _FEATURESETREQUEST +DESCRIPTOR.message_types_by_name['FeatureReference'] = _FEATUREREFERENCE DESCRIPTOR.message_types_by_name['GetOnlineFeaturesRequest'] = _GETONLINEFEATURESREQUEST DESCRIPTOR.message_types_by_name['GetBatchFeaturesRequest'] = _GETBATCHFEATURESREQUEST DESCRIPTOR.message_types_by_name['GetOnlineFeaturesResponse'] = _GETONLINEFEATURESRESPONSE @@ -810,12 +810,12 @@ }) _sym_db.RegisterMessage(GetFeastServingInfoResponse) -FeatureSetRequest = _reflection.GeneratedProtocolMessageType('FeatureSetRequest', (_message.Message,), { - 'DESCRIPTOR' : _FEATURESETREQUEST, +FeatureReference = _reflection.GeneratedProtocolMessageType('FeatureReference', (_message.Message,), { + 'DESCRIPTOR' : _FEATUREREFERENCE, '__module__' : 'feast.serving.ServingService_pb2' - # @@protoc_insertion_point(class_scope:feast.serving.FeatureSetRequest) + # @@protoc_insertion_point(class_scope:feast.serving.FeatureReference) }) -_sym_db.RegisterMessage(FeatureSetRequest) +_sym_db.RegisterMessage(FeatureReference) GetOnlineFeaturesRequest = _reflection.GeneratedProtocolMessageType('GetOnlineFeaturesRequest', (_message.Message,), { @@ -924,8 +924,8 @@ file=DESCRIPTOR, index=0, serialized_options=None, - serialized_start=2094, - serialized_end=2496, + serialized_start=2077, + serialized_end=2479, methods=[ _descriptor.MethodDescriptor( name='GetFeastServingInfo', diff --git a/sdk/python/feast/serving/ServingService_pb2.pyi b/sdk/python/feast/serving/ServingService_pb2.pyi index d03fa6568f..e10245d6c7 100644 --- a/sdk/python/feast/serving/ServingService_pb2.pyi +++ b/sdk/python/feast/serving/ServingService_pb2.pyi @@ -147,32 +147,32 @@ class GetFeastServingInfoResponse(google___protobuf___message___Message): else: def ClearField(self, field_name: typing_extensions___Literal[u"job_staging_location",b"job_staging_location",u"type",b"type",u"version",b"version"]) -> None: ... -class FeatureSetRequest(google___protobuf___message___Message): +class FeatureReference(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... + project = ... # type: typing___Text name = ... # type: typing___Text version = ... # type: int - feature_names = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] @property def max_age(self) -> google___protobuf___duration_pb2___Duration: ... def __init__(self, *, + project : typing___Optional[typing___Text] = None, name : typing___Optional[typing___Text] = None, version : typing___Optional[int] = None, - feature_names : typing___Optional[typing___Iterable[typing___Text]] = None, max_age : typing___Optional[google___protobuf___duration_pb2___Duration] = None, ) -> None: ... @classmethod - def FromString(cls, s: bytes) -> FeatureSetRequest: ... + def FromString(cls, s: bytes) -> FeatureReference: ... def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): def HasField(self, field_name: typing_extensions___Literal[u"max_age"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"feature_names",u"max_age",u"name",u"version"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"max_age",u"name",u"project",u"version"]) -> None: ... else: def HasField(self, field_name: typing_extensions___Literal[u"max_age",b"max_age"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"feature_names",b"feature_names",u"max_age",b"max_age",u"name",b"name",u"version",b"version"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"max_age",b"max_age",u"name",b"name",u"project",b"project",u"version",b"version"]) -> None: ... class GetOnlineFeaturesRequest(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @@ -227,14 +227,14 @@ class GetOnlineFeaturesRequest(google___protobuf___message___Message): omit_entities_in_response = ... # type: bool @property - def feature_sets(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[FeatureSetRequest]: ... + def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[FeatureReference]: ... @property def entity_rows(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[GetOnlineFeaturesRequest.EntityRow]: ... def __init__(self, *, - feature_sets : typing___Optional[typing___Iterable[FeatureSetRequest]] = None, + features : typing___Optional[typing___Iterable[FeatureReference]] = None, entity_rows : typing___Optional[typing___Iterable[GetOnlineFeaturesRequest.EntityRow]] = None, omit_entities_in_response : typing___Optional[bool] = None, ) -> None: ... @@ -243,22 +243,22 @@ class GetOnlineFeaturesRequest(google___protobuf___message___Message): def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"entity_rows",u"feature_sets",u"omit_entities_in_response"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entity_rows",u"features",u"omit_entities_in_response"]) -> None: ... else: - def ClearField(self, field_name: typing_extensions___Literal[u"entity_rows",b"entity_rows",u"feature_sets",b"feature_sets",u"omit_entities_in_response",b"omit_entities_in_response"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entity_rows",b"entity_rows",u"features",b"features",u"omit_entities_in_response",b"omit_entities_in_response"]) -> None: ... class GetBatchFeaturesRequest(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... @property - def feature_sets(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[FeatureSetRequest]: ... + def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[FeatureReference]: ... @property def dataset_source(self) -> DatasetSource: ... def __init__(self, *, - feature_sets : typing___Optional[typing___Iterable[FeatureSetRequest]] = None, + features : typing___Optional[typing___Iterable[FeatureReference]] = None, dataset_source : typing___Optional[DatasetSource] = None, ) -> None: ... @classmethod @@ -267,10 +267,10 @@ class GetBatchFeaturesRequest(google___protobuf___message___Message): def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... if sys.version_info >= (3,): def HasField(self, field_name: typing_extensions___Literal[u"dataset_source"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",u"feature_sets"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",u"features"]) -> None: ... else: def HasField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source",u"feature_sets",b"feature_sets"]) -> None: ... + def ClearField(self, field_name: typing_extensions___Literal[u"dataset_source",b"dataset_source",u"features",b"features"]) -> None: ... class GetOnlineFeaturesResponse(google___protobuf___message___Message): DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index ca13c2573b..af019c3fdb 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -19,10 +19,7 @@ import pandas as pd import pyarrow as pa from feast.constants import DATETIME_COLUMN -from feast.types import ( - FeatureRow_pb2 as FeatureRowProto, - Field_pb2 as FieldProto, -) +from feast.types import FeatureRow_pb2 as FeatureRowProto, Field_pb2 as FieldProto from feast.types.Value_pb2 import ( Value as ProtoValue, ValueType as ProtoValueType, @@ -168,7 +165,7 @@ def convert_series_to_proto_values(row: pd.Series): def convert_dict_to_proto_values( - row: dict, df_datetime_dtype: pd.DataFrame.dtypes, feature_set + row: dict, df_datetime_dtype: pd.DataFrame.dtypes, feature_set ) -> FeatureRowProto.FeatureRow: """ Encode a dictionary describing a feature row into a FeatureRows object. @@ -186,7 +183,11 @@ def convert_dict_to_proto_values( event_timestamp=_pd_datetime_to_timestamp_proto( df_datetime_dtype, row[DATETIME_COLUMN] ), - feature_set=feature_set.name + ":" + str(feature_set.version), + feature_set=feature_set.project + + "/" + + feature_set.name + + ":" + + str(feature_set.version), ) for field_name, field in feature_set.fields.items(): @@ -219,14 +220,12 @@ def _pd_datetime_to_timestamp_proto(dtype, value) -> Timestamp: # If timestamp does not contain timezone, we assume it is of local # timezone and adjust it to UTC local_timezone = datetime.now(timezone.utc).astimezone().tzinfo - value = value.tz_localize(local_timezone).tz_convert("UTC").tz_localize( - None) + value = value.tz_localize(local_timezone).tz_convert("UTC").tz_localize(None) return Timestamp(seconds=int(value.timestamp())) if dtype.__str__() == "datetime64[ns, UTC]": return Timestamp(seconds=int(value.timestamp())) else: - return Timestamp( - seconds=np.datetime64(value).astype("int64") // 1000000) + return Timestamp(seconds=np.datetime64(value).astype("int64") // 1000000) def _type_err(item, dtype): @@ -355,6 +354,7 @@ def _python_value_to_proto_value(feast_value_type, value) -> ProtoValue: raise Exception(f"Unsupported data type: ${str(type(value))}") + def pa_to_feast_value_attr(pa_type: object): """ Returns the equivalent Feast ValueType string for the given pa.lib type. @@ -424,9 +424,7 @@ def pa_to_value_type(pa_type: object): return type_map[pa_type.__str__()] -def pa_to_feast_value_type( - value: object -) -> ValueType: +def pa_to_feast_value_type(value: object) -> ValueType: type_map = { "timestamp[ms]": ValueType.INT64, "int32": ValueType.INT32, @@ -447,46 +445,45 @@ def pa_to_feast_value_type( return type_map[value.type.__str__()] -def pa_column_to_timestamp_proto_column( - column: pa.lib.ChunkedArray -) -> Timestamp: +def pa_column_to_timestamp_proto_column(column: pa.lib.ChunkedArray) -> Timestamp: if not isinstance(column.type, TimestampType): raise Exception("Only TimestampType columns are allowed") proto_column = [] for val in column: timestamp = Timestamp() - timestamp.FromMicroseconds( - micros=int(val.as_py().timestamp() * 1_000_000)) + timestamp.FromMicroseconds(micros=int(val.as_py().timestamp() * 1_000_000)) proto_column.append(timestamp) return proto_column def pa_column_to_proto_column( - feast_value_type, - column: pa.lib.ChunkedArray + feast_value_type, column: pa.lib.ChunkedArray ) -> List[ProtoValue]: - type_map = {ValueType.INT32: "int32_val", - ValueType.INT64: "int64_val", - ValueType.FLOAT: "float_val", - ValueType.DOUBLE: "double_val", - ValueType.STRING: "string_val", - ValueType.BYTES: "bytes_val", - ValueType.BOOL: "bool_val", - ValueType.BOOL_LIST: {"bool_list_val": BoolList}, - ValueType.BYTES_LIST: {"bytes_list_val": BytesList}, - ValueType.STRING_LIST: {"string_list_val": StringList}, - ValueType.FLOAT_LIST: {"float_list_val": FloatList}, - ValueType.DOUBLE_LIST: {"double_list_val": DoubleList}, - ValueType.INT32_LIST: {"int32_list_val": Int32List}, - ValueType.INT64_LIST: {"int64_list_val": Int64List}, } + type_map = { + ValueType.INT32: "int32_val", + ValueType.INT64: "int64_val", + ValueType.FLOAT: "float_val", + ValueType.DOUBLE: "double_val", + ValueType.STRING: "string_val", + ValueType.BYTES: "bytes_val", + ValueType.BOOL: "bool_val", + ValueType.BOOL_LIST: {"bool_list_val": BoolList}, + ValueType.BYTES_LIST: {"bytes_list_val": BytesList}, + ValueType.STRING_LIST: {"string_list_val": StringList}, + ValueType.FLOAT_LIST: {"float_list_val": FloatList}, + ValueType.DOUBLE_LIST: {"double_list_val": DoubleList}, + ValueType.INT32_LIST: {"int32_list_val": Int32List}, + ValueType.INT64_LIST: {"int64_list_val": Int64List}, + } value = type_map[feast_value_type] # Process list types if type(value) == dict: list_param_name = list(value.keys())[0] - return [ProtoValue( - **{list_param_name: value[list_param_name](val=x.as_py())}) - for x in column] + return [ + ProtoValue(**{list_param_name: value[list_param_name](val=x.as_py())}) + for x in column + ] else: return [ProtoValue(**{value: x.as_py()}) for x in column] diff --git a/sdk/python/feast/types/__init__.py b/sdk/python/feast/types/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/sdk/python/setup.py b/sdk/python/setup.py index 9ac7225e80..075ce500ac 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -44,7 +44,7 @@ "pyarrow>=0.15.1", "numpy", "google", - "confluent_kafka" + "confluent_kafka", ] # README file from Feast repo root directory diff --git a/sdk/python/tests/fake_kafka.py b/sdk/python/tests/fake_kafka.py deleted file mode 100644 index c511ed1d27..0000000000 --- a/sdk/python/tests/fake_kafka.py +++ /dev/null @@ -1,21 +0,0 @@ -import queue -from typing import Dict - - -class FakeKafka: - def __init__(self): - self._messages = dict() # type: Dict[str, queue.Queue] - - def send(self, topic, message): - if topic not in self._messages: - self._messages[topic] = queue.Queue() - self._messages[topic].queue.append(message) - - def get(self, topic: str): - message = None - if self._messages[topic]: - message = self._messages[topic].get(block=False) - return message - - def flush(self, timeout): - return True diff --git a/sdk/python/tests/feast_core_server.py b/sdk/python/tests/feast_core_server.py index 61688f6504..7cc837a4f2 100644 --- a/sdk/python/tests/feast_core_server.py +++ b/sdk/python/tests/feast_core_server.py @@ -42,11 +42,13 @@ def ListFeatureSets(self, request: ListFeatureSetsRequest, context): for fs in list(self._feature_sets.values()) if ( not request.filter.feature_set_name - or fs.name == request.filter.feature_set_name + or request.filter.feature_set_name == "*" + or fs.spec.name == request.filter.feature_set_name ) and ( not request.filter.feature_set_version - or str(fs.version) == request.filter.feature_set_version + or str(fs.spec.version) == request.filter.feature_set_version + or request.filter.feature_set_version == "*" ) ] @@ -54,7 +56,6 @@ def ListFeatureSets(self, request: ListFeatureSetsRequest, context): def ApplyFeatureSet(self, request: ApplyFeatureSetRequest, context): feature_set = request.feature_set - if feature_set.spec.version is None: feature_set.spec.version = 1 else: @@ -70,7 +71,9 @@ def ApplyFeatureSet(self, request: ApplyFeatureSetRequest, context): status=FeatureSetStatus.STATUS_READY, created_timestamp=Timestamp(seconds=10), ) - applied_feature_set = FeatureSetProto(spec=feature_set.spec, meta=feature_set_meta) + applied_feature_set = FeatureSetProto( + spec=feature_set.spec, meta=feature_set_meta + ) self._feature_sets[feature_set.spec.name] = applied_feature_set _logger.info( diff --git a/sdk/python/tests/feast_serving_server.py b/sdk/python/tests/feast_serving_server.py index 6c202b7d8c..eb46bde121 100644 --- a/sdk/python/tests/feast_serving_server.py +++ b/sdk/python/tests/feast_serving_server.py @@ -3,25 +3,16 @@ import logging import grpc -import threading import feast.serving.ServingService_pb2_grpc as Serving from feast.serving.ServingService_pb2 import ( - GetBatchFeaturesResponse, GetOnlineFeaturesRequest, GetOnlineFeaturesResponse, GetFeastServingInfoResponse, ) -import fake_kafka from typing import Dict -import sqlite3 from feast.core.CoreService_pb2_grpc import CoreServiceStub -from feast.core.CoreService_pb2 import ( - ListFeatureSetsResponse, - ListStoresRequest, - ListStoresResponse, -) +from feast.core.CoreService_pb2 import ListFeatureSetsResponse from feast.core import FeatureSet_pb2 as FeatureSetProto -import stores from feast.types import ( FeatureRow_pb2 as FeatureRowProto, Field_pb2 as FieldProto, @@ -33,19 +24,13 @@ class ServingServicer(Serving.ServingServiceServicer): - def __init__(self, kafka: fake_kafka = None, core_url: str = None): - if kafka and core_url: + def __init__(self, core_url: str = None): + if core_url: self.__core_channel = None self.__connect_core(core_url) self._feature_sets = ( dict() ) # type: Dict[str, FeatureSetProto.FeatureSetSpec] - self._kafka = kafka - self._store = stores.SQLiteDatabase() - - thread = threading.Thread(target=self.__consume, args=()) - thread.daemon = True - thread.start() def __connect_core(self, core_url: str): if not core_url: @@ -74,24 +59,6 @@ def __get_feature_sets_from_core(self): for feature_set in list(feature_sets.feature_sets): self._feature_sets[feature_set.name] = feature_set - def __consume(self): - """ - Consume message in the background from Fake Kafka - """ - while True: - self.__get_feature_sets_from_core() - self.__register_feature_sets_with_store() - for feature_set in list(self._feature_sets.values()): - message = self._kafka.get(feature_set.source.kafka_source_config.topic) - if message is None: - break - self._store.upsert_feature_row(feature_set, message) - time.sleep(1) - - def __register_feature_sets_with_store(self): - for feature_set in list(self._feature_sets.values()): - self._store.register_feature_set(feature_set) - def GetFeastServingVersion(self, request, context): return GetFeastServingInfoResponse(version="0.3.2") diff --git a/sdk/python/tests/stores.py b/sdk/python/tests/stores.py deleted file mode 100644 index e511cb1db1..0000000000 --- a/sdk/python/tests/stores.py +++ /dev/null @@ -1,98 +0,0 @@ -from feast.types import FeatureRow_pb2 as FeatureRowProto -from feast.core import FeatureSet_pb2 as FeatureSetProto -import sqlite3 -from typing import Dict, List -from feast.entity import Entity -from feast.value_type import ValueType -from feast.feature_set import FeatureSet, Feature - -from feast.types import ( - FeatureRow_pb2 as FeatureRowProto, - Field_pb2 as FieldProto, - Value_pb2 as ValueProto, -) -from google.protobuf.timestamp_pb2 import Timestamp - - -class Database: - pass - - -class SQLiteDatabase(Database): - def __init__(self): - self._conn = sqlite3.connect(":memory:") - self._c = self._conn.cursor() - - def register_feature_set(self, feature_set: FeatureSetProto.FeatureSetSpec): - query = build_sqlite_create_table_query(feature_set) - print(query) - self._c.execute(query) - self._c.execute("SELECT name FROM sqlite_master WHERE type='table';") - - available_table = self._c.fetchall() - print(available_table) - - def upsert_feature_row( - self, - feature_set: FeatureSetProto.FeatureSetSpec, - feature_row: FeatureRowProto.FeatureRow, - ): - values = (feature_row.event_timestamp,) - for entity in list(feature_set.entities): - values = values + (get_feature_row_value_by_name(feature_row, entity.name),) - values = values + (feature_row.SerializeToString(),) - self._c.execute(build_sqlite_insert_feature_row_query(feature_set), values) - - -def build_sqlite_create_table_query(feature_set: FeatureSetProto.FeatureSetSpec): - query = ( - """ - CREATE TABLE IF NOT EXISTS {} ( - {} - PRIMARY KEY ({}) - ); - """ - ).format( - get_table_name(feature_set), - " ".join([column + " text NOT NULL," for column in get_columns(feature_set)]), - ", ".join( - get_columns(feature_set)[1:] - ), # exclude event_timestamp column for online stores - ) - # Hyphens become three underscores - query = query.replace("-", "___") - return query - - -def build_sqlite_insert_feature_row_query(feature_set: FeatureSetProto.FeatureSetSpec): - return """ - INSERT OR REPLACE INTO {} ({}) - VALUES(?,?,?,?,?,?) - """.format( - get_table_name(feature_set), ",".join(get_columns(feature_set)) - ) - - -def get_columns(feature_set: FeatureSetProto.FeatureSetSpec) -> List[str]: - return ( - ["event_timestamp"] - + [field.name for field in list(feature_set.entities)] - + ["value"] - ) - - -def get_feature_row_value_by_name(feature_row, name): - values = [field.value for field in list(feature_row.fields) if field.name == name] - if len(values) != 1: - raise Exception( - "Invalid number of features with name {} in feature row {}".format( - name, feature_row.name - ) - ) - return values[0] - - -def get_table_name(feature_set: FeatureSetProto.FeatureSetSpec) -> str: - if not feature_set.name and not feature_set.version: - raise ValueError("Feature set name or version is missing") - return (feature_set.name + "_" + str(feature_set.version)).replace("-", "___") diff --git a/sdk/python/tests/test_client.py b/sdk/python/tests/test_client.py index c5a98485fe..123cbe47fd 100644 --- a/sdk/python/tests/test_client.py +++ b/sdk/python/tests/test_client.py @@ -27,19 +27,17 @@ from feast.feature_set import Feature from feast.source import KafkaSource from feast.core.FeatureSet_pb2 import ( - FeatureSetSpec, - FeatureSpec, - EntitySpec, - FeatureSetMeta, - FeatureSetStatus, + FeatureSetSpec as FeatureSetSpecProto, + FeatureSpec as FeatureSpecProto, + EntitySpec as EntitySpecProto, + FeatureSetMeta as FeatureSetMetaProto, + FeatureSetStatus as FeatureSetStatusProto, + FeatureSet as FeatureSetProto, ) -from feast.core.FeatureSet_pb2 import FeatureSet as FeatureSetProto from feast.core.Source_pb2 import SourceType, KafkaSourceConfig, Source from feast.core.CoreService_pb2 import ( GetFeastCoreVersionResponse, - ListFeatureSetsResponse, GetFeatureSetResponse, - GetFeatureSetRequest, ) from feast.serving.ServingService_pb2 import ( GetFeastServingInfoResponse, @@ -134,7 +132,7 @@ def test_get_online_features(self, mock_client, mocker): fields = dict() for feature_num in range(1, 10): - fields["feature_set_1:1:feature_" + str(feature_num)] = ValueProto.Value( + fields[f"my_project/feature_{str(feature_num)}:1"] = ValueProto.Value( int64_val=feature_num ) field_values = GetOnlineFeaturesResponse.FieldValues(fields=fields) @@ -157,23 +155,22 @@ def test_get_online_features(self, mock_client, mocker): response = mock_client.get_online_features( entity_rows=entity_rows, - feature_ids=[ - "feature_set_1:1:feature_1", - "feature_set_1:1:feature_2", - "feature_set_1:1:feature_3", - "feature_set_1:1:feature_4", - "feature_set_1:1:feature_5", - "feature_set_1:1:feature_6", - "feature_set_1:1:feature_7", - "feature_set_1:1:feature_8", - "feature_set_1:1:feature_9", + feature_refs=[ + "my_project/feature_1:1", + "my_project/feature_2:1", + "my_project/feature_3:1", + "my_project/feature_4:1", + "my_project/feature_5:1", + "my_project/feature_6:1", + "my_project/feature_7:1", + "my_project/feature_8:1", + "my_project/feature_9:1", ], ) # type: GetOnlineFeaturesResponse assert ( - response.field_values[0].fields["feature_set_1:1:feature_1"].int64_val == 1 - and response.field_values[0].fields["feature_set_1:1:feature_9"].int64_val - == 9 + response.field_values[0].fields["my_project/feature_1:1"].int64_val == 1 + and response.field_values[0].fields["my_project/feature_9:1"].int64_val == 9 ) def test_get_feature_set(self, mock_client, mocker): @@ -186,22 +183,22 @@ def test_get_feature_set(self, mock_client, mocker): "GetFeatureSet", return_value=GetFeatureSetResponse( feature_set=FeatureSetProto( - spec=FeatureSetSpec( + spec=FeatureSetSpecProto( name="my_feature_set", version=2, max_age=Duration(seconds=3600), features=[ - FeatureSpec( + FeatureSpecProto( name="my_feature_1", value_type=ValueProto.ValueType.FLOAT, ), - FeatureSpec( + FeatureSpecProto( name="my_feature_2", value_type=ValueProto.ValueType.FLOAT, ), ], entities=[ - EntitySpec( + EntitySpecProto( name="my_entity_1", value_type=ValueProto.ValueType.INT64, ) @@ -212,11 +209,12 @@ def test_get_feature_set(self, mock_client, mocker): bootstrap_servers="localhost:9092", topic="topic" ), ), - ) + ), + meta=FeatureSetMetaProto(), ) ), ) - + mock_client.set_project("my_project") feature_set = mock_client.get_feature_set("my_feature_set", version=2) assert ( @@ -242,30 +240,31 @@ def test_get_batch_features(self, mock_client, mocker): "GetFeatureSet", return_value=GetFeatureSetResponse( feature_set=FeatureSetProto( - spec=FeatureSetSpec( + spec=FeatureSetSpecProto( name="customer_fs", version=1, + project="my_project", entities=[ - EntitySpec( + EntitySpecProto( name="customer", value_type=ValueProto.ValueType.INT64 ), - EntitySpec( + EntitySpecProto( name="transaction", value_type=ValueProto.ValueType.INT64, ), ], features=[ - FeatureSpec( + FeatureSpecProto( name="customer_feature_1", value_type=ValueProto.ValueType.FLOAT, ), - FeatureSpec( + FeatureSpecProto( name="customer_feature_2", value_type=ValueProto.ValueType.STRING, ), ], ), - meta=FeatureSetMeta(status=FeatureSetStatus.STATUS_READY), + meta=FeatureSetMetaProto(status=FeatureSetStatusProto.STATUS_READY), ) ), ) @@ -275,8 +274,8 @@ def test_get_batch_features(self, mock_client, mocker): "datetime": [datetime.utcnow() for _ in range(3)], "customer": [1001, 1002, 1003], "transaction": [1001, 1002, 1003], - "customer_fs:1:customer_feature_1": [1001, 1002, 1003], - "customer_fs:1:customer_feature_2": [1001, 1002, 1003], + "my_project/customer_feature_1:1": [1001, 1002, 1003], + "my_project/customer_feature_2:1": [1001, 1002, 1003], } ) @@ -320,6 +319,7 @@ def test_get_batch_features(self, mock_client, mocker): ), ) + mock_client.set_project("project1") response = mock_client.get_batch_features( entity_rows=pd.DataFrame( { @@ -330,9 +330,9 @@ def test_get_batch_features(self, mock_client, mocker): "transaction": [1001, 1002, 1003], } ), - feature_ids=[ - "customer_fs:1:customer_feature_1", - "customer_fs:1:customer_feature_2", + feature_refs=[ + "my_project/customer_feature_1:1", + "my_project/customer_feature_2:1", ], ) # type: Job @@ -341,15 +341,17 @@ def test_get_batch_features(self, mock_client, mocker): actual_dataframe = response.to_dataframe() assert actual_dataframe[ - ["customer_fs:1:customer_feature_1", "customer_fs:1:customer_feature_2"] + ["my_project/customer_feature_1:1", "my_project/customer_feature_2:1"] ].equals( expected_dataframe[ - ["customer_fs:1:customer_feature_1", "customer_fs:1:customer_feature_2"] + ["my_project/customer_feature_1:1", "my_project/customer_feature_2:1"] ] ) def test_apply_feature_set_success(self, client): + client.set_project("project1") + # Create Feature Sets fs1 = FeatureSet("my-feature-set-1") fs1.add(Feature(name="fs1-my-feature-1", dtype=ValueType.INT64)) @@ -378,7 +380,7 @@ def test_apply_feature_set_success(self, client): @pytest.mark.parametrize("dataframe", [dataframes.GOOD]) def test_feature_set_ingest_success(self, dataframe, client, mocker): - + client.set_project("project1") driver_fs = FeatureSet( "driver-feature-set", source=KafkaSource(brokers="kafka:9092", topic="test") ) @@ -390,7 +392,7 @@ def test_feature_set_ingest_success(self, dataframe, client, mocker): # Register with Feast core client.apply(driver_fs) driver_fs = driver_fs.to_proto() - driver_fs.meta.status = FeatureSetStatus.STATUS_READY + driver_fs.meta.status = FeatureSetStatusProto.STATUS_READY mocker.patch.object( client._core_service_stub, @@ -408,6 +410,7 @@ def test_feature_set_ingest_fail_if_pending( self, dataframe, exception, client, mocker ): with pytest.raises(exception): + client.set_project("project1") driver_fs = FeatureSet( "driver-feature-set", source=KafkaSource(brokers="kafka:9092", topic="test"), @@ -420,7 +423,7 @@ def test_feature_set_ingest_fail_if_pending( # Register with Feast core client.apply(driver_fs) driver_fs = driver_fs.to_proto() - driver_fs.meta.status = FeatureSetStatus.STATUS_PENDING + driver_fs.meta.status = FeatureSetStatusProto.STATUS_PENDING mocker.patch.object( client._core_service_stub, @@ -459,6 +462,8 @@ def test_feature_set_ingest_failure(self, client, dataframe, exception): @pytest.mark.parametrize("dataframe", [dataframes.ALL_TYPES]) def test_feature_set_types_success(self, client, dataframe, mocker): + client.set_project("project1") + all_types_fs = FeatureSet( name="all_types", entities=[Entity(name="user_id", dtype=ValueType.INT64)], diff --git a/sdk/python/tests/test_stores.py b/sdk/python/tests/test_stores.py deleted file mode 100644 index 330f272dac..0000000000 --- a/sdk/python/tests/test_stores.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2019 The Feast Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import pytest -import stores -from feast.feature_set import FeatureSet -from feast.feature import Feature -from feast.entity import Entity -from feast.value_type import ValueType -from feast.types import ( - FeatureRow_pb2 as FeatureRowProto, - Field_pb2 as FieldProto, - Value_pb2 as ValueProto, -) -from google.protobuf.timestamp_pb2 import Timestamp - - -class TestStores: - @pytest.fixture(scope="module") - def sqlite_store(self): - return stores.SQLiteDatabase() - - def test_register_feature_set(self, sqlite_store): - fs = FeatureSet("my-feature-set") - fs.add(Feature(name="my-feature-1", dtype=ValueType.INT64)) - fs.add(Feature(name="my-feature-2", dtype=ValueType.INT64)) - fs.add(Entity(name="my-entity-1", dtype=ValueType.INT64)) - fs._version = 1 - feature_set_spec_proto = fs.to_proto().spec - - sqlite_store.register_feature_set(feature_set_spec_proto) - feature_row = FeatureRowProto.FeatureRow( - feature_set="feature_set_1", - event_timestamp=Timestamp(), - fields=[ - FieldProto.Field( - name="feature_1", value=ValueProto.Value(float_val=1.2) - ), - FieldProto.Field( - name="feature_2", value=ValueProto.Value(float_val=1.2) - ), - FieldProto.Field( - name="feature_3", value=ValueProto.Value(float_val=1.2) - ), - ], - ) - # sqlite_store.upsert_feature_row(feature_set_proto, feature_row) - assert True diff --git a/serving/sample_redis_config.yml b/serving/sample_redis_config.yml index d6008365e0..b3461649a1 100644 --- a/serving/sample_redis_config.yml +++ b/serving/sample_redis_config.yml @@ -5,4 +5,5 @@ redis_config: port: 6379 subscriptions: - name: "*" - version: ">0" + project: "*" + version: "*" diff --git a/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java b/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java index 6e02c3f383..2afbdaf90d 100644 --- a/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java +++ b/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java @@ -19,10 +19,10 @@ import feast.core.StoreProto.Store; import feast.core.StoreProto.Store.RedisConfig; import feast.core.StoreProto.Store.StoreType; -import feast.serving.service.CachedSpecService; import feast.serving.service.JobService; import feast.serving.service.NoopJobService; import feast.serving.service.RedisBackedJobService; +import feast.serving.specs.CachedSpecService; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import redis.clients.jedis.Jedis; diff --git a/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java b/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java index 08b9655e3e..9380ded4c4 100644 --- a/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java +++ b/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java @@ -29,11 +29,11 @@ import feast.serving.FeastProperties; import feast.serving.FeastProperties.JobProperties; import feast.serving.service.BigQueryServingService; -import feast.serving.service.CachedSpecService; import feast.serving.service.JobService; import feast.serving.service.NoopJobService; import feast.serving.service.RedisServingService; import feast.serving.service.ServingService; +import feast.serving.specs.CachedSpecService; import io.opentracing.Tracer; import java.util.Map; import org.slf4j.Logger; diff --git a/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java b/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java index a18e844dcb..3c91c2765a 100644 --- a/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java +++ b/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java @@ -17,8 +17,8 @@ package feast.serving.configuration; import feast.serving.FeastProperties; -import feast.serving.service.CachedSpecService; -import feast.serving.service.CoreSpecService; +import feast.serving.specs.CachedSpecService; +import feast.serving.specs.CoreSpecService; import java.nio.file.Path; import java.nio.file.Paths; import java.util.concurrent.Executors; diff --git a/serving/src/main/java/feast/serving/controller/HealthServiceController.java b/serving/src/main/java/feast/serving/controller/HealthServiceController.java index b3b5dc7de4..5372854465 100644 --- a/serving/src/main/java/feast/serving/controller/HealthServiceController.java +++ b/serving/src/main/java/feast/serving/controller/HealthServiceController.java @@ -18,8 +18,8 @@ import feast.core.StoreProto.Store; import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; -import feast.serving.service.CachedSpecService; import feast.serving.service.ServingService; +import feast.serving.specs.CachedSpecService; import io.grpc.health.v1.HealthGrpc.HealthImplBase; import io.grpc.health.v1.HealthProto.HealthCheckRequest; import io.grpc.health.v1.HealthProto.HealthCheckResponse; diff --git a/serving/src/main/java/feast/serving/service/BigQueryServingService.java b/serving/src/main/java/feast/serving/service/BigQueryServingService.java index d1658bde54..7253dd46a3 100644 --- a/serving/src/main/java/feast/serving/service/BigQueryServingService.java +++ b/serving/src/main/java/feast/serving/service/BigQueryServingService.java @@ -18,7 +18,6 @@ import static feast.serving.store.bigquery.QueryTemplater.createEntityTableUUIDQuery; import static feast.serving.store.bigquery.QueryTemplater.generateFullTableName; -import static feast.serving.util.Metrics.requestCount; import static feast.serving.util.Metrics.requestLatency; import com.google.cloud.bigquery.BigQuery; @@ -33,7 +32,6 @@ import com.google.cloud.bigquery.Table; import com.google.cloud.bigquery.TableId; import com.google.cloud.storage.Storage; -import feast.core.FeatureSetProto.FeatureSetSpec; import feast.serving.ServingAPIProto; import feast.serving.ServingAPIProto.DataFormat; import feast.serving.ServingAPIProto.DatasetSource; @@ -48,6 +46,8 @@ import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; import feast.serving.ServingAPIProto.JobStatus; import feast.serving.ServingAPIProto.JobType; +import feast.serving.specs.CachedSpecService; +import feast.serving.specs.FeatureSetRequest; import feast.serving.store.bigquery.BatchRetrievalQueryRunnable; import feast.serving.store.bigquery.QueryTemplater; import feast.serving.store.bigquery.model.FeatureSetInfo; @@ -107,21 +107,8 @@ public GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest getF @Override public GetBatchFeaturesResponse getBatchFeatures(GetBatchFeaturesRequest getFeaturesRequest) { long startTime = System.currentTimeMillis(); - List featureSetSpecs = - getFeaturesRequest.getFeatureSetsList().stream() - .map( - featureSet -> { - requestCount.labels(featureSet.getName()).inc(); - return specService.getFeatureSet(featureSet.getName(), featureSet.getVersion()); - }) - .collect(Collectors.toList()); - - if (getFeaturesRequest.getFeatureSetsList().size() != featureSetSpecs.size()) { - throw Status.INVALID_ARGUMENT - .withDescription( - "Some of the feature sets requested do not exist in Feast. Please check your request payload.") - .asRuntimeException(); - } + List featureSetRequests = + specService.getFeatureSets(getFeaturesRequest.getFeaturesList()); Table entityTable; String entityTableName; @@ -143,8 +130,7 @@ public GetBatchFeaturesResponse getBatchFeatures(GetBatchFeaturesRequest getFeat .filter(name -> !name.equals("event_timestamp")) .collect(Collectors.toList()); - List featureSetInfos = - QueryTemplater.getFeatureSetInfos(featureSetSpecs, getFeaturesRequest.getFeatureSetsList()); + List featureSetInfos = QueryTemplater.getFeatureSetInfos(featureSetRequests); String feastJobId = UUID.randomUUID().toString(); ServingAPIProto.Job feastJob = @@ -170,7 +156,9 @@ public GetBatchFeaturesResponse getBatchFeatures(GetBatchFeaturesRequest getFeat .build()) .start(); - requestLatency.labels("getBatchFeatures").observe(System.currentTimeMillis() - startTime); + requestLatency + .labels("getBatchFeatures") + .observe((System.currentTimeMillis() - startTime) / 1000); return GetBatchFeaturesResponse.newBuilder().setJob(feastJob).build(); } diff --git a/serving/src/main/java/feast/serving/service/CachedSpecService.java b/serving/src/main/java/feast/serving/service/CachedSpecService.java deleted file mode 100644 index edf2da37a0..0000000000 --- a/serving/src/main/java/feast/serving/service/CachedSpecService.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.serving.service; - -import static feast.serving.util.mappers.YamlToProtoMapper.yamlToStoreProto; - -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.CacheLoader.InvalidCacheLoadException; -import com.google.common.cache.LoadingCache; -import feast.core.CoreServiceProto.ListFeatureSetsRequest; -import feast.core.CoreServiceProto.ListFeatureSetsRequest.Filter; -import feast.core.CoreServiceProto.ListFeatureSetsResponse; -import feast.core.CoreServiceProto.UpdateStoreRequest; -import feast.core.CoreServiceProto.UpdateStoreResponse; -import feast.core.FeatureSetProto.FeatureSet; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.StoreProto.Store; -import feast.core.StoreProto.Store.Subscription; -import feast.serving.exception.SpecRetrievalException; -import io.grpc.StatusRuntimeException; -import io.prometheus.client.Gauge; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import org.slf4j.Logger; - -/** In-memory cache of specs. */ -public class CachedSpecService { - - private static final int MAX_SPEC_COUNT = 1000; - private static final Logger log = org.slf4j.LoggerFactory.getLogger(CachedSpecService.class); - - private final CoreSpecService coreService; - private final Path configPath; - - private final CacheLoader featureSetSpecCacheLoader; - private final LoadingCache featureSetSpecCache; - private Store store; - - private static Gauge featureSetsCount = - Gauge.build() - .name("feature_set_count") - .subsystem("feast_serving") - .help("number of feature sets served by this instance") - .register(); - private static Gauge cacheLastUpdated = - Gauge.build() - .name("cache_last_updated") - .subsystem("feast_serving") - .help("epoch time of the last time the cache was updated") - .register(); - - public CachedSpecService(CoreSpecService coreService, Path configPath) { - this.configPath = configPath; - this.coreService = coreService; - this.store = updateStore(readConfig(configPath)); - - Map featureSetSpecs = getFeatureSetSpecMap(); - featureSetSpecCacheLoader = CacheLoader.from((String key) -> featureSetSpecs.get(key)); - featureSetSpecCache = - CacheBuilder.newBuilder().maximumSize(MAX_SPEC_COUNT).build(featureSetSpecCacheLoader); - } - - /** - * Get the current store configuration. - * - * @return StoreProto.Store store configuration for this serving instance - */ - public Store getStore() { - return this.store; - } - - /** - * Get a single FeatureSetSpec matching the given name and version. - * - * @param name of the featureSet - * @param version to retrieve - * @return FeatureSetSpec of the matching FeatureSet - */ - public FeatureSetSpec getFeatureSet(String name, int version) { - String id = String.format("%s:%d", name, version); - try { - return featureSetSpecCache.get(id); - } catch (InvalidCacheLoadException e) { - // if not found, try to retrieve from core - ListFeatureSetsRequest request = - ListFeatureSetsRequest.newBuilder() - .setFilter( - Filter.newBuilder() - .setFeatureSetName(name) - .setFeatureSetVersion(String.valueOf(version))) - .build(); - ListFeatureSetsResponse featureSets = coreService.listFeatureSets(request); - if (featureSets.getFeatureSetsList().size() == 0) { - throw new SpecRetrievalException( - String.format( - "Unable to retrieve featureSet with id %s from core, featureSet does not exist", - id)); - } - return featureSets.getFeatureSets(0).getSpec(); - } catch (ExecutionException e) { - throw new SpecRetrievalException( - String.format("Unable to retrieve featureSet with id %s", id), e); - } - } - - /** - * Reload the store configuration from the given config path, then retrieve the necessary specs - * from core to preload the cache. - */ - public void populateCache() { - this.store = updateStore(readConfig(configPath)); - Map featureSetSpecMap = getFeatureSetSpecMap(); - featureSetSpecCache.putAll(featureSetSpecMap); - - featureSetsCount.set(featureSetSpecCache.size()); - cacheLastUpdated.set(System.currentTimeMillis()); - } - - public void scheduledPopulateCache() { - try { - populateCache(); - } catch (Exception e) { - log.warn("Error updating store configuration and specs: {}", e.getMessage()); - } - } - - private Map getFeatureSetSpecMap() { - HashMap featureSetSpecs = new HashMap<>(); - - for (Subscription subscription : this.store.getSubscriptionsList()) { - try { - ListFeatureSetsResponse featureSetsResponse = - coreService.listFeatureSets( - ListFeatureSetsRequest.newBuilder() - .setFilter( - ListFeatureSetsRequest.Filter.newBuilder() - .setFeatureSetName(subscription.getName()) - .setFeatureSetVersion(subscription.getVersion())) - .build()); - - for (FeatureSet featureSet : featureSetsResponse.getFeatureSetsList()) { - FeatureSetSpec featureSetSpec = featureSet.getSpec(); - featureSetSpecs.put( - String.format("%s:%s", featureSetSpec.getName(), featureSetSpec.getVersion()), - featureSetSpec); - } - } catch (StatusRuntimeException e) { - throw new RuntimeException( - String.format("Unable to retrieve specs matching subscription %s", subscription), e); - } - } - return featureSetSpecs; - } - - private Store readConfig(Path path) { - try { - List fileContents = Files.readAllLines(path); - String yaml = fileContents.stream().reduce("", (l1, l2) -> l1 + "\n" + l2); - log.info("loaded store config at {}: \n{}", path.toString(), yaml); - return yamlToStoreProto(yaml); - } catch (IOException e) { - throw new RuntimeException( - String.format("Unable to read store config at %s", path.toAbsolutePath()), e); - } - } - - private Store updateStore(Store store) { - UpdateStoreRequest request = UpdateStoreRequest.newBuilder().setStore(store).build(); - try { - UpdateStoreResponse updateStoreResponse = coreService.updateStore(request); - if (!updateStoreResponse.getStore().equals(store)) { - throw new RuntimeException("Core store config not matching current store config"); - } - return updateStoreResponse.getStore(); - } catch (Exception e) { - throw new RuntimeException("Unable to update store configuration", e); - } - } -} diff --git a/serving/src/main/java/feast/serving/service/RedisServingService.java b/serving/src/main/java/feast/serving/service/RedisServingService.java index 7c0d65dc42..48fc485214 100644 --- a/serving/src/main/java/feast/serving/service/RedisServingService.java +++ b/serving/src/main/java/feast/serving/service/RedisServingService.java @@ -20,6 +20,8 @@ import static feast.serving.util.Metrics.requestCount; import static feast.serving.util.Metrics.requestLatency; import static feast.serving.util.Metrics.staleKeyCount; +import static feast.serving.util.RefUtil.generateFeatureSetStringRef; +import static feast.serving.util.RefUtil.generateFeatureStringRef; import com.google.common.collect.Maps; import com.google.protobuf.AbstractMessageLite; @@ -28,7 +30,7 @@ import feast.core.FeatureSetProto.EntitySpec; import feast.core.FeatureSetProto.FeatureSetSpec; import feast.serving.ServingAPIProto.FeastServingType; -import feast.serving.ServingAPIProto.FeatureSetRequest; +import feast.serving.ServingAPIProto.FeatureReference; import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; import feast.serving.ServingAPIProto.GetBatchFeaturesResponse; import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; @@ -39,6 +41,9 @@ import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; +import feast.serving.specs.CachedSpecService; +import feast.serving.specs.FeatureSetRequest; +import feast.serving.util.RefUtil; import feast.storage.RedisProto.RedisKey; import feast.types.FeatureRowProto.FeatureRow; import feast.types.FieldProto.Field; @@ -86,26 +91,18 @@ public GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest requ List entityRows = request.getEntityRowsList(); Map> featureValuesMap = entityRows.stream() - .collect(Collectors.toMap(er -> er, er -> Maps.newHashMap(er.getFieldsMap()))); - - List featureSetRequests = request.getFeatureSetsList(); + .collect(Collectors.toMap(row -> row, row -> Maps.newHashMap(row.getFieldsMap()))); + List featureSetRequests = + specService.getFeatureSets(request.getFeaturesList()); for (FeatureSetRequest featureSetRequest : featureSetRequests) { - FeatureSetSpec featureSetSpec = - specService.getFeatureSet(featureSetRequest.getName(), featureSetRequest.getVersion()); - List featureSetEntityNames = - featureSetSpec.getEntitiesList().stream() + featureSetRequest.getSpec().getEntitiesList().stream() .map(EntitySpec::getName) .collect(Collectors.toList()); - Duration defaultMaxAge = featureSetSpec.getMaxAge(); - if (featureSetRequest.getMaxAge().equals(Duration.getDefaultInstance())) { - featureSetRequest = featureSetRequest.toBuilder().setMaxAge(defaultMaxAge).build(); - } - List redisKeys = - getRedisKeys(featureSetEntityNames, entityRows, featureSetRequest); + getRedisKeys(featureSetEntityNames, entityRows, featureSetRequest.getSpec()); try { sendAndProcessMultiGet(redisKeys, entityRows, featureValuesMap, featureSetRequest); @@ -118,9 +115,11 @@ public GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest requ } List fieldValues = featureValuesMap.values().stream() - .map(m -> FieldValues.newBuilder().putAllFields(m).build()) + .map(valueMap -> FieldValues.newBuilder().putAllFields(valueMap).build()) .collect(Collectors.toList()); - requestLatency.labels("getOnlineFeatures").observe(System.currentTimeMillis() - startTime); + requestLatency + .labels("getOnlineFeatures") + .observe((System.currentTimeMillis() - startTime) / 1000); return getOnlineFeaturesResponseBuilder.addAllFieldValues(fieldValues).build(); } } @@ -140,19 +139,18 @@ public GetJobResponse getJob(GetJobRequest getJobRequest) { * * @param featureSetEntityNames entity names that actually belong to the featureSet * @param entityRows entity values to retrieve for - * @param featureSetRequest details of the requested featureSet + * @param featureSetSpec featureSetSpec of the features to retrieve * @return list of RedisKeys */ private List getRedisKeys( List featureSetEntityNames, List entityRows, - FeatureSetRequest featureSetRequest) { + FeatureSetSpec featureSetSpec) { try (Scope scope = tracer.buildSpan("Redis-makeRedisKeys").startActive(true)) { - String featureSetId = - String.format("%s:%s", featureSetRequest.getName(), featureSetRequest.getVersion()); + String featureSetRef = generateFeatureSetStringRef(featureSetSpec); List redisKeys = entityRows.stream() - .map(row -> makeRedisKey(featureSetId, featureSetEntityNames, row)) + .map(row -> makeRedisKey(featureSetRef, featureSetEntityNames, row)) .collect(Collectors.toList()); return redisKeys; } @@ -170,6 +168,7 @@ private RedisKey makeRedisKey( String featureSet, List featureSetEntityNames, EntityRow entityRow) { RedisKey.Builder builder = RedisKey.newBuilder().setFeatureSet(featureSet); Map fieldsMap = entityRow.getFieldsMap(); + featureSetEntityNames.sort(String::compareTo); for (int i = 0; i < featureSetEntityNames.size(); i++) { String entityName = featureSetEntityNames.get(i); @@ -198,14 +197,14 @@ private void sendAndProcessMultiGet( List jedisResps = sendMultiGet(redisKeys); long startTime = System.currentTimeMillis(); try (Scope scope = tracer.buildSpan("Redis-processResponse").startActive(true)) { - String featureSetId = - String.format("%s:%d", featureSetRequest.getName(), featureSetRequest.getVersion()); + FeatureSetSpec spec = featureSetRequest.getSpec(); Map nullValues = - featureSetRequest.getFeatureNamesList().stream() + featureSetRequest.getFeatureReferences().stream() .collect( Collectors.toMap( - name -> featureSetId + ":" + name, name -> Value.newBuilder().build())); + RefUtil::generateFeatureStringRef, + featureReference -> Value.newBuilder().build())); for (int i = 0; i < jedisResps.size(); i++) { EntityRow entityRow = entityRows.get(i); @@ -213,7 +212,16 @@ private void sendAndProcessMultiGet( byte[] jedisResponse = jedisResps.get(i); if (jedisResponse == null) { - missingKeyCount.labels(featureSetRequest.getName()).inc(); + featureSetRequest + .getFeatureReferences() + .parallelStream() + .forEach( + request -> + missingKeyCount + .labels( + spec.getProject(), + String.format("%s:%d", request.getName(), request.getVersion())) + .inc()); featureValues.putAll(nullValues); continue; } @@ -222,24 +230,55 @@ private void sendAndProcessMultiGet( boolean stale = isStale(featureSetRequest, entityRow, featureRow); if (stale) { - staleKeyCount.labels(featureSetRequest.getName()).inc(); + featureSetRequest + .getFeatureReferences() + .parallelStream() + .forEach( + request -> + staleKeyCount + .labels( + spec.getProject(), + String.format("%s:%d", request.getName(), request.getVersion())) + .inc()); featureValues.putAll(nullValues); continue; } - requestCount.labels(featureSetRequest.getName()).inc(); + featureSetRequest + .getFeatureReferences() + .parallelStream() + .forEach( + request -> + requestCount + .labels( + spec.getProject(), + String.format("%s:%d", request.getName(), request.getVersion())) + .inc()); + + Map featureNames = + featureSetRequest.getFeatureReferences().stream() + .collect( + Collectors.toMap( + FeatureReference::getName, featureReference -> featureReference)); featureRow.getFieldsList().stream() - .filter(f -> featureSetRequest.getFeatureNamesList().contains(f.getName())) - .forEach(f -> featureValues.put(featureSetId + ":" + f.getName(), f.getValue())); + .filter(field -> featureNames.keySet().contains(field.getName())) + .forEach( + field -> { + FeatureReference ref = featureNames.get(field.getName()); + String id = generateFeatureStringRef(ref); + featureValues.put(id, field.getValue()); + }); } } finally { - requestLatency.labels("processResponse").observe(System.currentTimeMillis() - startTime); + requestLatency + .labels("processResponse") + .observe((System.currentTimeMillis() - startTime) / 1000); } } private boolean isStale( FeatureSetRequest featureSetRequest, EntityRow entityRow, FeatureRow featureRow) { - if (featureSetRequest.getMaxAge().equals(Duration.getDefaultInstance())) { + if (featureSetRequest.getSpec().getMaxAge().equals(Duration.getDefaultInstance())) { return false; } long givenTimestamp = entityRow.getEntityTimestamp().getSeconds(); @@ -247,7 +286,7 @@ private boolean isStale( givenTimestamp = System.currentTimeMillis() / 1000; } long timeDifference = givenTimestamp - featureRow.getEventTimestamp().getSeconds(); - return timeDifference > featureSetRequest.getMaxAge().getSeconds(); + return timeDifference > featureSetRequest.getSpec().getMaxAge().getSeconds(); } /** @@ -272,7 +311,9 @@ private List sendMultiGet(List keys) { .withCause(e) .asRuntimeException(); } finally { - requestLatency.labels("sendMultiGet").observe(System.currentTimeMillis() - startTime); + requestLatency + .labels("sendMultiGet") + .observe((System.currentTimeMillis() - startTime) / 1000); } } } diff --git a/serving/src/main/java/feast/serving/specs/CachedSpecService.java b/serving/src/main/java/feast/serving/specs/CachedSpecService.java new file mode 100644 index 0000000000..040a870ffe --- /dev/null +++ b/serving/src/main/java/feast/serving/specs/CachedSpecService.java @@ -0,0 +1,262 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.specs; + +import static feast.serving.util.RefUtil.generateFeatureSetStringRef; +import static feast.serving.util.RefUtil.generateFeatureStringRef; +import static feast.serving.util.mappers.YamlToProtoMapper.yamlToStoreProto; +import static java.util.Comparator.comparingInt; +import static java.util.stream.Collectors.groupingBy; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import feast.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.core.CoreServiceProto.UpdateStoreRequest; +import feast.core.CoreServiceProto.UpdateStoreResponse; +import feast.core.FeatureSetProto.FeatureSet; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSpec; +import feast.core.StoreProto.Store; +import feast.core.StoreProto.Store.Subscription; +import feast.serving.ServingAPIProto.FeatureReference; +import feast.serving.exception.SpecRetrievalException; +import io.grpc.StatusRuntimeException; +import io.prometheus.client.Gauge; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.commons.lang3.tuple.Triple; +import org.slf4j.Logger; + +/** In-memory cache of specs. */ +public class CachedSpecService { + + private static final int MAX_SPEC_COUNT = 1000; + private static final Logger log = org.slf4j.LoggerFactory.getLogger(CachedSpecService.class); + + private final CoreSpecService coreService; + private final Path configPath; + + private final Map featureToFeatureSetMapping; + + private final CacheLoader featureSetCacheLoader; + private final LoadingCache featureSetCache; + private Store store; + + private static Gauge featureSetsCount = + Gauge.build() + .name("feature_set_count") + .subsystem("feast_serving") + .help("number of feature sets served by this instance") + .register(); + private static Gauge cacheLastUpdated = + Gauge.build() + .name("cache_last_updated") + .subsystem("feast_serving") + .help("epoch time of the last time the cache was updated") + .register(); + + public CachedSpecService(CoreSpecService coreService, Path configPath) { + this.configPath = configPath; + this.coreService = coreService; + this.store = updateStore(readConfig(configPath)); + + Map featureSets = getFeatureSetMap(); + featureToFeatureSetMapping = + new ConcurrentHashMap<>(getFeatureToFeatureSetMapping(featureSets)); + featureSetCacheLoader = CacheLoader.from(featureSets::get); + featureSetCache = + CacheBuilder.newBuilder().maximumSize(MAX_SPEC_COUNT).build(featureSetCacheLoader); + } + + /** + * Get the current store configuration. + * + * @return StoreProto.Store store configuration for this serving instance + */ + public Store getStore() { + return this.store; + } + + /** + * Get FeatureSetSpecs for the given features. + * + * @return FeatureSetRequest containing the specs, and their respective feature references + */ + public List getFeatureSets(List featureReferences) { + List featureSetRequests = new ArrayList<>(); + featureReferences.stream() + .map( + featureReference -> { + String featureSet = + featureToFeatureSetMapping.getOrDefault( + generateFeatureStringRef(featureReference), ""); + if (featureSet.isEmpty()) { + throw new SpecRetrievalException( + String.format("Unable to retrieve feature %s", featureReference)); + } + return Pair.of(featureSet, featureReference); + }) + .collect(groupingBy(Pair::getLeft)) + .forEach( + (fsName, featureRefs) -> { + try { + FeatureSetSpec featureSetSpec = featureSetCache.get(fsName); + List requestedFeatures = + featureRefs.stream().map(Pair::getRight).collect(Collectors.toList()); + FeatureSetRequest featureSetRequest = + FeatureSetRequest.newBuilder() + .setSpec(featureSetSpec) + .addAllFeatureReferences(requestedFeatures) + .build(); + featureSetRequests.add(featureSetRequest); + } catch (ExecutionException e) { + throw new SpecRetrievalException( + String.format("Unable to retrieve featureSet with id %s", fsName), e); + } + }); + return featureSetRequests; + } + + /** + * Reload the store configuration from the given config path, then retrieve the necessary specs + * from core to preload the cache. + */ + public void populateCache() { + this.store = updateStore(readConfig(configPath)); + Map featureSetMap = getFeatureSetMap(); + featureSetCache.putAll(featureSetMap); + featureToFeatureSetMapping.putAll(getFeatureToFeatureSetMapping(featureSetMap)); + + featureSetsCount.set(featureSetCache.size()); + cacheLastUpdated.set(System.currentTimeMillis()); + } + + public void scheduledPopulateCache() { + try { + populateCache(); + } catch (Exception e) { + log.warn("Error updating store configuration and specs: {}", e.getMessage()); + } + } + + private Map getFeatureSetMap() { + HashMap featureSets = new HashMap<>(); + + for (Subscription subscription : this.store.getSubscriptionsList()) { + try { + ListFeatureSetsResponse featureSetsResponse = + coreService.listFeatureSets( + ListFeatureSetsRequest.newBuilder() + .setFilter( + ListFeatureSetsRequest.Filter.newBuilder() + .setProject(subscription.getProject()) + .setFeatureSetName(subscription.getName()) + .setFeatureSetVersion(subscription.getVersion())) + .build()); + + for (FeatureSet featureSet : featureSetsResponse.getFeatureSetsList()) { + FeatureSetSpec spec = featureSet.getSpec(); + featureSets.put(generateFeatureSetStringRef(spec), spec); + } + } catch (StatusRuntimeException e) { + throw new RuntimeException( + String.format("Unable to retrieve specs matching subscription %s", subscription), e); + } + } + return featureSets; + } + + private Map getFeatureToFeatureSetMapping( + Map featureSets) { + HashMap mapping = new HashMap<>(); + + featureSets.values().stream() + .collect( + groupingBy( + featureSet -> + Triple.of( + featureSet.getProject(), featureSet.getName(), featureSet.getVersion()))) + .forEach( + (group, groupedFeatureSets) -> { + groupedFeatureSets = + groupedFeatureSets.stream() + .sorted(comparingInt(FeatureSetSpec::getVersion)) + .collect(Collectors.toList()); + for (int i = 0; i < groupedFeatureSets.size(); i++) { + FeatureSetSpec featureSetSpec = groupedFeatureSets.get(i); + for (FeatureSpec featureSpec : featureSetSpec.getFeaturesList()) { + FeatureReference featureRef = + FeatureReference.newBuilder() + .setProject(featureSetSpec.getProject()) + .setName(featureSpec.getName()) + .setVersion(featureSetSpec.getVersion()) + .build(); + mapping.put( + generateFeatureStringRef(featureRef), + generateFeatureSetStringRef(featureSetSpec)); + if (i == groupedFeatureSets.size() - 1) { + featureRef = + FeatureReference.newBuilder() + .setProject(featureSetSpec.getProject()) + .setName(featureSpec.getName()) + .build(); + mapping.put( + generateFeatureStringRef(featureRef), + generateFeatureSetStringRef(featureSetSpec)); + } + } + } + }); + return mapping; + } + + private Store readConfig(Path path) { + try { + List fileContents = Files.readAllLines(path); + String yaml = fileContents.stream().reduce("", (l1, l2) -> l1 + "\n" + l2); + log.info("loaded store config at {}: \n{}", path.toString(), yaml); + return yamlToStoreProto(yaml); + } catch (IOException e) { + throw new RuntimeException( + String.format("Unable to read store config at %s", path.toAbsolutePath()), e); + } + } + + private Store updateStore(Store store) { + UpdateStoreRequest request = UpdateStoreRequest.newBuilder().setStore(store).build(); + try { + UpdateStoreResponse updateStoreResponse = coreService.updateStore(request); + if (!updateStoreResponse.getStore().equals(store)) { + throw new RuntimeException("Core store config not matching current store config"); + } + return updateStoreResponse.getStore(); + } catch (Exception e) { + throw new RuntimeException("Unable to update store configuration", e); + } + } +} diff --git a/serving/src/main/java/feast/serving/service/CoreSpecService.java b/serving/src/main/java/feast/serving/specs/CoreSpecService.java similarity index 85% rename from serving/src/main/java/feast/serving/service/CoreSpecService.java rename to serving/src/main/java/feast/serving/specs/CoreSpecService.java index 438492d387..2f5cef342e 100644 --- a/serving/src/main/java/feast/serving/service/CoreSpecService.java +++ b/serving/src/main/java/feast/serving/specs/CoreSpecService.java @@ -14,9 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package feast.serving.service; +package feast.serving.specs; import feast.core.CoreServiceGrpc; +import feast.core.CoreServiceProto.GetFeatureSetRequest; +import feast.core.CoreServiceProto.GetFeatureSetResponse; import feast.core.CoreServiceProto.ListFeatureSetsRequest; import feast.core.CoreServiceProto.ListFeatureSetsResponse; import feast.core.CoreServiceProto.UpdateStoreRequest; @@ -37,6 +39,10 @@ public CoreSpecService(String feastCoreHost, int feastCorePort) { blockingStub = CoreServiceGrpc.newBlockingStub(channel); } + public GetFeatureSetResponse getFeatureSet(GetFeatureSetRequest getFeatureSetRequest) { + return blockingStub.getFeatureSet(getFeatureSetRequest); + } + public ListFeatureSetsResponse listFeatureSets(ListFeatureSetsRequest ListFeatureSetsRequest) { return blockingStub.listFeatureSets(ListFeatureSetsRequest); } diff --git a/serving/src/main/java/feast/serving/specs/FeatureSetRequest.java b/serving/src/main/java/feast/serving/specs/FeatureSetRequest.java new file mode 100644 index 0000000000..904630659d --- /dev/null +++ b/serving/src/main/java/feast/serving/specs/FeatureSetRequest.java @@ -0,0 +1,53 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.specs; + +import com.google.auto.value.AutoValue; +import com.google.common.collect.ImmutableSet; +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.serving.ServingAPIProto.FeatureReference; +import java.util.List; + +@AutoValue +public abstract class FeatureSetRequest { + public abstract FeatureSetSpec getSpec(); + + public abstract ImmutableSet getFeatureReferences(); + + public static Builder newBuilder() { + return new AutoValue_FeatureSetRequest.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setSpec(FeatureSetSpec spec); + + abstract ImmutableSet.Builder featureReferencesBuilder(); + + public Builder addAllFeatureReferences(List featureReferenceList) { + featureReferencesBuilder().addAll(featureReferenceList); + return this; + } + + public Builder addFeatureReference(FeatureReference featureReference) { + featureReferencesBuilder().add(featureReference); + return this; + } + + public abstract FeatureSetRequest build(); + } +} diff --git a/serving/src/main/java/feast/serving/store/bigquery/QueryTemplater.java b/serving/src/main/java/feast/serving/store/bigquery/QueryTemplater.java index d0e6db6748..e3f1138db8 100644 --- a/serving/src/main/java/feast/serving/store/bigquery/QueryTemplater.java +++ b/serving/src/main/java/feast/serving/store/bigquery/QueryTemplater.java @@ -22,7 +22,8 @@ import com.mitchellbosecke.pebble.template.PebbleTemplate; import feast.core.FeatureSetProto.EntitySpec; import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.serving.ServingAPIProto.FeatureSetRequest; +import feast.serving.ServingAPIProto.FeatureReference; +import feast.serving.specs.FeatureSetRequest; import feast.serving.store.bigquery.model.FeatureSetInfo; import java.io.IOException; import java.io.StringWriter; @@ -66,36 +67,30 @@ public static String createEntityTableUUIDQuery(String leftTableName) { * Generate the information necessary for the sql templating for point in time correctness join to * the entity dataset for each feature set requested. * - * @param featureSetSpecs List of feature set specs requested - * @param featureSetRequests List of feature set requests from the batch retrieval request + * @param featureSetRequests List of feature sets requested * @return List of FeatureSetInfos */ - public static List getFeatureSetInfos( - List featureSetSpecs, List featureSetRequests) + public static List getFeatureSetInfos(List featureSetRequests) throws IllegalArgumentException { - if (featureSetRequests.size() != featureSetSpecs.size()) { - throw new IllegalArgumentException( - "Number of feature sets not matching number of feature set requests"); - } - List featureSetInfos = new ArrayList<>(); - - for (int i = 0; i < featureSetRequests.size(); i++) { - FeatureSetSpec spec = featureSetSpecs.get(i); - FeatureSetRequest request = featureSetRequests.get(i); - Duration maxAge = getMaxAge(request, spec); + for (FeatureSetRequest featureSetRequest : featureSetRequests) { + FeatureSetSpec spec = featureSetRequest.getSpec(); + Duration maxAge = spec.getMaxAge(); List fsEntities = spec.getEntitiesList().stream().map(EntitySpec::getName).collect(Collectors.toList()); - String id = String.format("%s:%s", spec.getName(), spec.getVersion()); + List features = + featureSetRequest.getFeatureReferences().stream() + .map(FeatureReference::getName) + .collect(Collectors.toList()); featureSetInfos.add( new FeatureSetInfo( - id, + spec.getProject(), spec.getName(), spec.getVersion(), maxAge.getSeconds(), fsEntities, - request.getFeatureNamesList(), + features, "")); } return featureSetInfos; @@ -159,13 +154,6 @@ public static String createJoinQuery( return writer.toString(); } - private static Duration getMaxAge(FeatureSetRequest featureSet, FeatureSetSpec featureSetSpec) { - if (featureSet.getMaxAge() == Duration.getDefaultInstance()) { - return featureSetSpec.getMaxAge(); - } - return featureSet.getMaxAge(); - } - public static String generateFullTableName(TableId tableId) { return String.format( "%s.%s.%s", tableId.getProject(), tableId.getDataset(), tableId.getTable()); diff --git a/serving/src/main/java/feast/serving/store/bigquery/model/FeatureSetInfo.java b/serving/src/main/java/feast/serving/store/bigquery/model/FeatureSetInfo.java index ddda1bf6a7..77c80ead0e 100644 --- a/serving/src/main/java/feast/serving/store/bigquery/model/FeatureSetInfo.java +++ b/serving/src/main/java/feast/serving/store/bigquery/model/FeatureSetInfo.java @@ -20,7 +20,7 @@ public class FeatureSetInfo { - private final String id; + private final String project; private final String name; private final int version; private final long maxAge; @@ -29,14 +29,14 @@ public class FeatureSetInfo { private final String table; public FeatureSetInfo( - String id, + String project, String name, int version, long maxAge, List entities, List features, String table) { - this.id = id; + this.project = project; this.name = name; this.version = version; this.maxAge = maxAge; @@ -47,7 +47,7 @@ public FeatureSetInfo( public FeatureSetInfo(FeatureSetInfo featureSetInfo, String table) { - this.id = featureSetInfo.getId(); + this.project = featureSetInfo.getProject(); this.name = featureSetInfo.getName(); this.version = featureSetInfo.getVersion(); this.maxAge = featureSetInfo.getMaxAge(); @@ -56,8 +56,8 @@ public FeatureSetInfo(FeatureSetInfo featureSetInfo, String table) { this.table = table; } - public String getId() { - return id; + public String getProject() { + return project; } public String getName() { diff --git a/serving/src/main/java/feast/serving/util/Metrics.java b/serving/src/main/java/feast/serving/util/Metrics.java index ffd6d1a0d6..99f6353e74 100644 --- a/serving/src/main/java/feast/serving/util/Metrics.java +++ b/serving/src/main/java/feast/serving/util/Metrics.java @@ -23,10 +23,10 @@ public class Metrics { public static final Histogram requestLatency = Histogram.build() - .buckets(2, 4, 6, 8, 10, 15, 20, 25, 30, 35, 50) + .buckets(0.001, 0.002, 0.004, 0.006, 0.008, 0.01, 0.015, 0.02, 0.025, 0.03, 0.035, 0.05) .name("request_latency_ms") .subsystem("feast_serving") - .help("Request latency in milliseconds.") + .help("Request latency in seconds.") .labelNames("method") .register(); @@ -35,7 +35,7 @@ public class Metrics { .name("request_feature_count") .subsystem("feast_serving") .help("number of feature rows requested") - .labelNames("feature_set_name") + .labelNames("project", "feature_name") .register(); public static final Counter missingKeyCount = @@ -43,7 +43,7 @@ public class Metrics { .name("missing_feature_count") .subsystem("feast_serving") .help("number requested feature rows that were not found") - .labelNames("feature_set_name") + .labelNames("project", "feature_name") .register(); public static final Counter staleKeyCount = @@ -51,6 +51,6 @@ public class Metrics { .name("stale_feature_count") .subsystem("feast_serving") .help("number requested feature rows that were stale") - .labelNames("feature_set_name") + .labelNames("project", "feature_name") .register(); } diff --git a/serving/src/main/java/feast/serving/util/RefUtil.java b/serving/src/main/java/feast/serving/util/RefUtil.java new file mode 100644 index 0000000000..74de3e6562 --- /dev/null +++ b/serving/src/main/java/feast/serving/util/RefUtil.java @@ -0,0 +1,38 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.util; + +import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.serving.ServingAPIProto.FeatureReference; + +public class RefUtil { + public static String generateFeatureStringRef(FeatureReference featureReference) { + String ref = String.format("%s/%s", featureReference.getProject(), featureReference.getName()); + if (featureReference.getVersion() > 0) { + return ref + String.format(":%d", featureReference.getVersion()); + } + return ref; + } + + public static String generateFeatureSetStringRef(FeatureSetSpec featureSetSpec) { + String ref = String.format("%s/%s", featureSetSpec.getProject(), featureSetSpec.getName()); + if (featureSetSpec.getVersion() > 0) { + return ref + String.format(":%d", featureSetSpec.getVersion()); + } + return ref; + } +} diff --git a/serving/src/main/java/feast/serving/util/RequestHelper.java b/serving/src/main/java/feast/serving/util/RequestHelper.java index 4127b6afef..e6e8e8629a 100644 --- a/serving/src/main/java/feast/serving/util/RequestHelper.java +++ b/serving/src/main/java/feast/serving/util/RequestHelper.java @@ -16,9 +16,12 @@ */ package feast.serving.util; +import feast.serving.ServingAPIProto.FeatureReference; import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; import io.grpc.Status; +import java.util.Set; +import java.util.stream.Collectors; public class RequestHelper { @@ -43,5 +46,15 @@ public static void validateBatchRequest(GetBatchFeaturesRequest getFeaturesReque .withDescription("Dataset source must be provided: only file source supported") .asRuntimeException(); } + + Set uniqueFeatureNames = + getFeaturesRequest.getFeaturesList().stream() + .map(FeatureReference::getName) + .collect(Collectors.toSet()); + if (uniqueFeatureNames.size() != getFeaturesRequest.getFeaturesList().size()) { + throw Status.INVALID_ARGUMENT + .withDescription("Feature names must be unique within the request") + .asRuntimeException(); + } } } diff --git a/serving/src/main/resources/templates/join_featuresets.sql b/serving/src/main/resources/templates/join_featuresets.sql index f913a63dbd..e57b0c1031 100644 --- a/serving/src/main/resources/templates/join_featuresets.sql +++ b/serving/src/main/resources/templates/join_featuresets.sql @@ -5,7 +5,7 @@ LEFT JOIN ( SELECT uuid, {% for featureName in featureSet.features %} - {{ featureSet.name }}_v{{ featureSet.version }}_{{ featureName }}{% if loop.last %}{% else %}, {% endif %} + {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}{% if loop.last %}{% else %}, {% endif %} {% endfor %} FROM `{{ featureSet.table }}` ) USING (uuid) @@ -15,7 +15,7 @@ LEFT JOIN ( {{ entities | join(', ') }} {% for featureSet in featureSets %} {% for featureName in featureSet.features %} - ,{{ featureSet.name }}_v{{ featureSet.version }}_{{ featureName }} + ,{{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }} as {{ featureName }} {% endfor %} {% endfor %} FROM joined \ No newline at end of file diff --git a/serving/src/main/resources/templates/single_featureset_pit_join.sql b/serving/src/main/resources/templates/single_featureset_pit_join.sql index 38fb67db42..c83735660f 100644 --- a/serving/src/main/resources/templates/single_featureset_pit_join.sql +++ b/serving/src/main/resources/templates/single_featureset_pit_join.sql @@ -1,7 +1,7 @@ WITH union_features AS (SELECT uuid, event_timestamp, - NULL as {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + NULL as {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, NULL as created_timestamp, {{ featureSet.entities | join(', ')}}, true AS is_entity_table @@ -10,18 +10,18 @@ UNION ALL SELECT NULL as uuid, event_timestamp, - event_timestamp as {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + event_timestamp as {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, created_timestamp, {{ featureSet.entities | join(', ')}}, false AS is_entity_table -FROM `{{projectId}}.{{datasetId}}.{{ featureSet.name }}_v{{ featureSet.version }}` WHERE event_timestamp <= '{{maxTimestamp}}' AND event_timestamp >= Timestamp_sub(TIMESTAMP '{{ minTimestamp }}', interval {{ featureSet.maxAge }} second) +FROM `{{projectId}}.{{datasetId}}.{{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}` WHERE event_timestamp <= '{{maxTimestamp}}' AND event_timestamp >= Timestamp_sub(TIMESTAMP '{{ minTimestamp }}', interval {{ featureSet.maxAge }} second) ) SELECT uuid, event_timestamp, {{ featureSet.entities | join(', ')}}, {% for featureName in featureSet.features %} - IF(event_timestamp >= {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp AND Timestamp_sub(event_timestamp, interval {{ featureSet.maxAge }} second) < {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, {{ featureSet.name }}_v{{ featureSet.version }}_{{ featureName }}, NULL) as {{ featureSet.name }}_v{{ featureSet.version }}_{{ featureName }}{% if loop.last %}{% else %}, {% endif %} + IF(event_timestamp >= {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp AND Timestamp_sub(event_timestamp, interval {{ featureSet.maxAge }} second) < {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}, NULL) as {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}{% if loop.last %}{% else %}, {% endif %} {% endfor %} FROM ( SELECT @@ -29,19 +29,19 @@ SELECT event_timestamp, {{ featureSet.entities | join(', ')}}, FIRST_VALUE(created_timestamp IGNORE NULLS) over w AS created_timestamp, - FIRST_VALUE({{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp IGNORE NULLS) over w AS {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + FIRST_VALUE({{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp IGNORE NULLS) over w AS {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, is_entity_table FROM union_features WINDOW w AS (PARTITION BY {{ featureSet.entities | join(', ') }} ORDER BY event_timestamp DESC, is_entity_table DESC, created_timestamp DESC ROWS BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING) ) LEFT JOIN ( SELECT - event_timestamp as {{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + event_timestamp as {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, created_timestamp, {{ featureSet.entities | join(', ')}}, {% for featureName in featureSet.features %} - {{ featureName }} as {{ featureSet.name }}_v{{ featureSet.version }}_{{ featureName }}{% if loop.last %}{% else %}, {% endif %} + {{ featureName }} as {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}{% if loop.last %}{% else %}, {% endif %} {% endfor %} -FROM `{{projectId}}.{{datasetId}}.{{ featureSet.name }}_v{{ featureSet.version }}` WHERE event_timestamp <= '{{maxTimestamp}}' AND event_timestamp >= Timestamp_sub(TIMESTAMP '{{ minTimestamp }}', interval {{ featureSet.maxAge }} second) -) USING ({{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, created_timestamp, {{ featureSet.entities | join(', ')}}) +FROM `{{projectId}}.{{datasetId}}.{{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}` WHERE event_timestamp <= '{{maxTimestamp}}' AND event_timestamp >= Timestamp_sub(TIMESTAMP '{{ minTimestamp }}', interval {{ featureSet.maxAge }} second) +) USING ({{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, created_timestamp, {{ featureSet.entities | join(', ')}}) WHERE is_entity_table \ No newline at end of file diff --git a/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java b/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java index 6dfc54ec2b..f2c51bc7dd 100644 --- a/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java +++ b/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java @@ -18,10 +18,9 @@ import static org.mockito.MockitoAnnotations.initMocks; -import com.google.common.collect.Lists; import com.google.protobuf.Timestamp; import feast.serving.FeastProperties; -import feast.serving.ServingAPIProto.FeatureSetRequest; +import feast.serving.ServingAPIProto.FeatureReference; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; @@ -52,11 +51,17 @@ public void setUp() { validRequest = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets( - FeatureSetRequest.newBuilder() - .setName("featureSet") + .addFeatures( + FeatureReference.newBuilder() + .setName("feature1") .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .setProject("project") + .build()) + .addFeatures( + FeatureReference.newBuilder() + .setName("feature2") + .setVersion(1) + .setProject("project") .build()) .addEntityRows( EntityRow.newBuilder() diff --git a/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java b/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java index 5bd2038f2b..abeb44bd73 100644 --- a/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java +++ b/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java @@ -17,7 +17,8 @@ package feast.serving.service; import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.*; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; @@ -28,14 +29,20 @@ import feast.core.CoreServiceProto.UpdateStoreResponse; import feast.core.FeatureSetProto; import feast.core.FeatureSetProto.FeatureSetSpec; +import feast.core.FeatureSetProto.FeatureSpec; import feast.core.StoreProto.Store; import feast.core.StoreProto.Store.RedisConfig; import feast.core.StoreProto.Store.StoreType; import feast.core.StoreProto.Store.Subscription; +import feast.serving.ServingAPIProto.FeatureReference; +import feast.serving.specs.CachedSpecService; +import feast.serving.specs.CoreSpecService; +import feast.serving.specs.FeatureSetRequest; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; +import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -70,10 +77,12 @@ public void setUp() throws IOException { + " host: localhost\n" + " port: 6379\n" + "subscriptions:\n" - + "- name: fs1\n" - + " version: \">0\"\n" - + "- name: fs2\n" - + " version: \">0\""; + + "- project: project\n" + + " name: fs1\n" + + " version: \"*\"\n" + + "- project: project\n" + + " name: fs2\n" + + " version: \"*\""; BufferedWriter writer = new BufferedWriter(new FileWriter(configFile)); writer.write(yamlString); writer.close(); @@ -83,17 +92,49 @@ public void setUp() throws IOException { .setName("SERVING") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) - .addSubscriptions(Subscription.newBuilder().setName("fs1").setVersion(">0").build()) - .addSubscriptions(Subscription.newBuilder().setName("fs2").setVersion(">0").build()) + .addSubscriptions( + Subscription.newBuilder() + .setProject("project") + .setName("fs1") + .setVersion("*") + .build()) + .addSubscriptions( + Subscription.newBuilder() + .setProject("project") + .setName("fs2") + .setVersion("*") + .build()) .build(); when(coreService.updateStore(UpdateStoreRequest.newBuilder().setStore(store).build())) .thenReturn(UpdateStoreResponse.newBuilder().setStore(store).build()); featureSetSpecs = new LinkedHashMap<>(); - featureSetSpecs.put("fs1:1", FeatureSetSpec.newBuilder().setName("fs1").setVersion(1).build()); - featureSetSpecs.put("fs1:2", FeatureSetSpec.newBuilder().setName("fs1").setVersion(2).build()); - featureSetSpecs.put("fs2:1", FeatureSetSpec.newBuilder().setName("fs2").setVersion(1).build()); + featureSetSpecs.put( + "fs1:1", + FeatureSetSpec.newBuilder() + .setProject("project") + .setName("fs1") + .setVersion(1) + .addFeatures(FeatureSpec.newBuilder().setName("feature")) + .build()); + featureSetSpecs.put( + "fs1:2", + FeatureSetSpec.newBuilder() + .setProject("project") + .setName("fs1") + .setVersion(2) + .addFeatures(FeatureSpec.newBuilder().setName("feature")) + .addFeatures(FeatureSpec.newBuilder().setName("feature2")) + .build()); + featureSetSpecs.put( + "fs2:1", + FeatureSetSpec.newBuilder() + .setProject("project") + .setName("fs2") + .setVersion(1) + .addFeatures(FeatureSpec.newBuilder().setName("feature3")) + .build()); List fs1FeatureSets = Lists.newArrayList( @@ -106,8 +147,9 @@ public void setUp() throws IOException { ListFeatureSetsRequest.newBuilder() .setFilter( ListFeatureSetsRequest.Filter.newBuilder() + .setProject("project") .setFeatureSetName("fs1") - .setFeatureSetVersion(">0") + .setFeatureSetVersion("*") .build()) .build())) .thenReturn(ListFeatureSetsResponse.newBuilder().addAllFeatureSets(fs1FeatureSets).build()); @@ -115,8 +157,9 @@ public void setUp() throws IOException { ListFeatureSetsRequest.newBuilder() .setFilter( ListFeatureSetsRequest.Filter.newBuilder() + .setProject("project") .setFeatureSetName("fs2") - .setFeatureSetVersion(">0") + .setFeatureSetVersion("*") .build()) .build())) .thenReturn(ListFeatureSetsResponse.newBuilder().addAllFeatureSets(fs2FeatureSets).build()); @@ -139,8 +182,108 @@ public void shouldPopulateAndReturnStore() { @Test public void shouldPopulateAndReturnFeatureSets() { cachedSpecService.populateCache(); - assertThat(cachedSpecService.getFeatureSet("fs1", 1), equalTo(featureSetSpecs.get("fs1:1"))); - assertThat(cachedSpecService.getFeatureSet("fs1", 2), equalTo(featureSetSpecs.get("fs1:2"))); - assertThat(cachedSpecService.getFeatureSet("fs2", 1), equalTo(featureSetSpecs.get("fs2:1"))); + FeatureReference frv1 = + FeatureReference.newBuilder() + .setProject("project") + .setName("feature") + .setVersion(1) + .build(); + FeatureReference frv2 = + FeatureReference.newBuilder() + .setProject("project") + .setName("feature") + .setVersion(2) + .build(); + + assertThat( + cachedSpecService.getFeatureSets(Collections.singletonList(frv1)), + equalTo( + Lists.newArrayList( + FeatureSetRequest.newBuilder() + .addFeatureReference(frv1) + .setSpec(featureSetSpecs.get("fs1:1")) + .build()))); + assertThat( + cachedSpecService.getFeatureSets(Collections.singletonList(frv2)), + equalTo( + Lists.newArrayList( + FeatureSetRequest.newBuilder() + .addFeatureReference(frv2) + .setSpec(featureSetSpecs.get("fs1:2")) + .build()))); + } + + @Test + public void shouldPopulateAndReturnLatestFeatureSetIfVersionsNotSupplied() { + cachedSpecService.populateCache(); + FeatureReference frv1 = + FeatureReference.newBuilder().setProject("project").setName("feature").build(); + + assertThat( + cachedSpecService.getFeatureSets(Collections.singletonList(frv1)), + equalTo( + Lists.newArrayList( + FeatureSetRequest.newBuilder() + .addFeatureReference(frv1) + .setSpec(featureSetSpecs.get("fs1:2")) + .build()))); + } + + @Test + public void shouldPopulateAndReturnFeatureSetsGivenFeaturesFromDifferentFeatureSets() { + cachedSpecService.populateCache(); + FeatureReference frv1 = + FeatureReference.newBuilder() + .setProject("project") + .setName("feature") + .setVersion(1) + .build(); + FeatureReference fr3 = + FeatureReference.newBuilder() + .setProject("project") + .setName("feature3") + .setVersion(1) + .build(); + + assertThat( + cachedSpecService.getFeatureSets(Lists.newArrayList(frv1, fr3)), + containsInAnyOrder( + Lists.newArrayList( + FeatureSetRequest.newBuilder() + .addFeatureReference(frv1) + .setSpec(featureSetSpecs.get("fs1:1")) + .build(), + FeatureSetRequest.newBuilder() + .addFeatureReference(fr3) + .setSpec(featureSetSpecs.get("fs2:1")) + .build()) + .toArray())); + } + + @Test + public void shouldPopulateAndReturnFeatureSetGivenFeaturesFromSameFeatureSet() { + cachedSpecService.populateCache(); + FeatureReference fr1 = + FeatureReference.newBuilder() + .setProject("project") + .setName("feature") + .setVersion(2) + .build(); + FeatureReference fr2 = + FeatureReference.newBuilder() + .setProject("project") + .setName("feature2") + .setVersion(2) + .build(); + + assertThat( + cachedSpecService.getFeatureSets(Lists.newArrayList(fr1, fr2)), + equalTo( + Lists.newArrayList( + FeatureSetRequest.newBuilder() + .addFeatureReference(fr1) + .addFeatureReference(fr2) + .setSpec(featureSetSpecs.get("fs1:2")) + .build()))); } } diff --git a/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java b/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java index 890699db6d..042107e117 100644 --- a/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java +++ b/serving/src/test/java/feast/serving/service/RedisServingServiceTest.java @@ -27,17 +27,20 @@ import com.google.protobuf.Timestamp; import feast.core.FeatureSetProto.EntitySpec; import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.serving.ServingAPIProto.FeatureSetRequest; +import feast.serving.ServingAPIProto.FeatureReference; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; +import feast.serving.specs.CachedSpecService; +import feast.serving.specs.FeatureSetRequest; import feast.storage.RedisProto.RedisKey; import feast.types.FeatureRowProto.FeatureRow; import feast.types.FieldProto.Field; import feast.types.ValueProto.Value; import io.opentracing.Tracer; import io.opentracing.Tracer.SpanBuilder; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -70,14 +73,14 @@ public void setUp() { redisKeyList = Lists.newArrayList( RedisKey.newBuilder() - .setFeatureSet("featureSet:1") + .setFeatureSet("project/featureSet:1") .addAllEntities( Lists.newArrayList( Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), Field.newBuilder().setName("entity2").setValue(strValue("a")).build())) .build(), RedisKey.newBuilder() - .setFeatureSet("featureSet:1") + .setFeatureSet("project/featureSet:1") .addAllEntities( Lists.newArrayList( Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), @@ -93,11 +96,17 @@ public void setUp() { public void shouldReturnResponseWithValuesIfKeysPresent() { GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets( - FeatureSetRequest.newBuilder() - .setName("featureSet") + .addFeatures( + FeatureReference.newBuilder() + .setName("feature1") .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .setProject("project") + .build()) + .addFeatures( + FeatureReference.newBuilder() + .setName("feature2") + .setVersion(1) + .setProject("project") .build()) .addEntityRows( EntityRow.newBuilder() @@ -134,9 +143,16 @@ public void shouldReturnResponseWithValuesIfKeysPresent() { .setFeatureSet("featureSet:1") .build()); + FeatureSetRequest featureSetRequest = + FeatureSetRequest.newBuilder() + .addAllFeatureReferences(request.getFeaturesList()) + .setSpec(getFeatureSetSpec()) + .build(); + List featureRowBytes = featureRows.stream().map(AbstractMessageLite::toByteArray).collect(Collectors.toList()); - when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpec()); + when(specService.getFeatureSets(request.getFeaturesList())) + .thenReturn(Collections.singletonList(featureSetRequest)); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); @@ -147,14 +163,14 @@ public void shouldReturnResponseWithValuesIfKeysPresent() { FieldValues.newBuilder() .putFields("entity1", intValue(1)) .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1)) - .putFields("featureSet:1:feature2", intValue(1))) + .putFields("project/feature1:1", intValue(1)) + .putFields("project/feature2:1", intValue(1))) .addFieldValues( FieldValues.newBuilder() .putFields("entity1", intValue(2)) .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", intValue(2)) - .putFields("featureSet:1:feature2", intValue(2))) + .putFields("project/feature1:1", intValue(2)) + .putFields("project/feature2:1", intValue(2))) .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); assertThat( @@ -165,11 +181,17 @@ public void shouldReturnResponseWithValuesIfKeysPresent() { public void shouldReturnResponseWithValuesWhenFeatureSetSpecHasUnspecifiedMaxAge() { GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets( - FeatureSetRequest.newBuilder() - .setName("featureSet") + .addFeatures( + FeatureReference.newBuilder() + .setName("feature1") + .setVersion(1) + .setProject("project") + .build()) + .addFeatures( + FeatureReference.newBuilder() + .setName("feature2") .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .setProject("project") .build()) .addEntityRows( EntityRow.newBuilder() @@ -206,9 +228,16 @@ public void shouldReturnResponseWithValuesWhenFeatureSetSpecHasUnspecifiedMaxAge .setFeatureSet("featureSet:1") .build()); + FeatureSetRequest featureSetRequest = + FeatureSetRequest.newBuilder() + .addAllFeatureReferences(request.getFeaturesList()) + .setSpec(getFeatureSetSpecWithNoMaxAge()) + .build(); + List featureRowBytes = featureRows.stream().map(AbstractMessageLite::toByteArray).collect(Collectors.toList()); - when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpecWithNoMaxAge()); + when(specService.getFeatureSets(request.getFeaturesList())) + .thenReturn(Collections.singletonList(featureSetRequest)); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); @@ -219,14 +248,14 @@ public void shouldReturnResponseWithValuesWhenFeatureSetSpecHasUnspecifiedMaxAge FieldValues.newBuilder() .putFields("entity1", intValue(1)) .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1)) - .putFields("featureSet:1:feature2", intValue(1))) + .putFields("project/feature1:1", intValue(1)) + .putFields("project/feature2:1", intValue(1))) .addFieldValues( FieldValues.newBuilder() .putFields("entity1", intValue(2)) .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", intValue(2)) - .putFields("featureSet:1:feature2", intValue(2))) + .putFields("project/feature1:1", intValue(2)) + .putFields("project/feature2:1", intValue(2))) .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); assertThat( @@ -234,16 +263,17 @@ public void shouldReturnResponseWithValuesWhenFeatureSetSpecHasUnspecifiedMaxAge } @Test - public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { - // some keys not present, should have empty values + public void shouldReturnKeysWithoutVersionifNotProvided() { GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets( - FeatureSetRequest.newBuilder() - .setName("featureSet") + .addFeatures( + FeatureReference.newBuilder() + .setName("feature1") .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .setProject("project") .build()) + .addFeatures( + FeatureReference.newBuilder().setName("feature2").setProject("project").build()) .addEntityRows( EntityRow.newBuilder() .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) @@ -269,18 +299,26 @@ public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { .setFeatureSet("featureSet:1") .build(), FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder()) + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) .addAllFields( Lists.newArrayList( Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), - Field.newBuilder().setName("feature1").build(), - Field.newBuilder().setName("feature2").build())) + Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) .setFeatureSet("featureSet:1") .build()); - List featureRowBytes = Lists.newArrayList(featureRows.get(0).toByteArray(), null); - when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpec()); + FeatureSetRequest featureSetRequest = + FeatureSetRequest.newBuilder() + .addAllFeatureReferences(request.getFeaturesList()) + .setSpec(getFeatureSetSpec()) + .build(); + + List featureRowBytes = + featureRows.stream().map(AbstractMessageLite::toByteArray).collect(Collectors.toList()); + when(specService.getFeatureSets(request.getFeaturesList())) + .thenReturn(Collections.singletonList(featureSetRequest)); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); @@ -291,14 +329,14 @@ public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { FieldValues.newBuilder() .putFields("entity1", intValue(1)) .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1)) - .putFields("featureSet:1:feature2", intValue(1))) + .putFields("project/feature1:1", intValue(1)) + .putFields("project/feature2", intValue(1))) .addFieldValues( FieldValues.newBuilder() .putFields("entity1", intValue(2)) .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", Value.newBuilder().build()) - .putFields("featureSet:1:feature2", Value.newBuilder().build())) + .putFields("project/feature1:1", intValue(2)) + .putFields("project/feature2", intValue(2))) .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); assertThat( @@ -306,16 +344,21 @@ public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { } @Test - public void shouldReturnResponseWithUnsetValuesIfMaxAgeIsExceeded() { - // keys present, but too stale comp. to maxAge set in request + public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { + // some keys not present, should have empty values GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets( - FeatureSetRequest.newBuilder() - .setName("featureSet") + .addFeatures( + FeatureReference.newBuilder() + .setName("feature1") + .setVersion(1) + .setProject("project") + .build()) + .addFeatures( + FeatureReference.newBuilder() + .setName("feature2") .setVersion(1) - .setMaxAge(Duration.newBuilder().setSeconds(10)) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .setProject("project") .build()) .addEntityRows( EntityRow.newBuilder() @@ -342,20 +385,25 @@ public void shouldReturnResponseWithUnsetValuesIfMaxAgeIsExceeded() { .setFeatureSet("featureSet:1") .build(), FeatureRow.newBuilder() - .setEventTimestamp( - Timestamp.newBuilder().setSeconds(50)) // this value should be nulled + .setEventTimestamp(Timestamp.newBuilder()) .addAllFields( Lists.newArrayList( Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) + Field.newBuilder().setName("feature1").build(), + Field.newBuilder().setName("feature2").build())) .setFeatureSet("featureSet:1") .build()); - List featureRowBytes = - featureRows.stream().map(AbstractMessageLite::toByteArray).collect(Collectors.toList()); - when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpec()); + FeatureSetRequest featureSetRequest = + FeatureSetRequest.newBuilder() + .addAllFeatureReferences(request.getFeaturesList()) + .setSpec(getFeatureSetSpec()) + .build(); + + List featureRowBytes = Lists.newArrayList(featureRows.get(0).toByteArray(), null); + when(specService.getFeatureSets(request.getFeaturesList())) + .thenReturn(Collections.singletonList(featureSetRequest)); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); @@ -366,14 +414,14 @@ public void shouldReturnResponseWithUnsetValuesIfMaxAgeIsExceeded() { FieldValues.newBuilder() .putFields("entity1", intValue(1)) .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1)) - .putFields("featureSet:1:feature2", intValue(1))) + .putFields("project/feature1:1", intValue(1)) + .putFields("project/feature2:1", intValue(1))) .addFieldValues( FieldValues.newBuilder() .putFields("entity1", intValue(2)) .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", Value.newBuilder().build()) - .putFields("featureSet:1:feature2", Value.newBuilder().build())) + .putFields("project/feature1:1", Value.newBuilder().build()) + .putFields("project/feature2:1", Value.newBuilder().build())) .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); assertThat( @@ -381,15 +429,21 @@ public void shouldReturnResponseWithUnsetValuesIfMaxAgeIsExceeded() { } @Test - public void shouldReturnResponseWithUnsetValuesIfDefaultMaxAgeIsExceeded() { - // keys present, but too stale comp. to maxAge set in featureSetSpec + public void shouldReturnResponseWithUnsetValuesIfMaxAgeIsExceeded() { + // keys present, but too stale comp. to maxAge GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets( - FeatureSetRequest.newBuilder() - .setName("featureSet") + .addFeatures( + FeatureReference.newBuilder() + .setName("feature1") + .setVersion(1) + .setProject("project") + .build()) + .addFeatures( + FeatureReference.newBuilder() + .setName("feature2") .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1", "feature2")) + .setProject("project") .build()) .addEntityRows( EntityRow.newBuilder() @@ -417,7 +471,7 @@ public void shouldReturnResponseWithUnsetValuesIfDefaultMaxAgeIsExceeded() { .build(), FeatureRow.newBuilder() .setEventTimestamp( - Timestamp.newBuilder().setSeconds(0)) // this value should be nulled + Timestamp.newBuilder().setSeconds(50)) // this value should be nulled .addAllFields( Lists.newArrayList( Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), @@ -427,9 +481,18 @@ public void shouldReturnResponseWithUnsetValuesIfDefaultMaxAgeIsExceeded() { .setFeatureSet("featureSet:1") .build()); + FeatureSetSpec spec = + getFeatureSetSpec().toBuilder().setMaxAge(Duration.newBuilder().setSeconds(1)).build(); + FeatureSetRequest featureSetRequest = + FeatureSetRequest.newBuilder() + .addAllFeatureReferences(request.getFeaturesList()) + .setSpec(spec) + .build(); + List featureRowBytes = featureRows.stream().map(AbstractMessageLite::toByteArray).collect(Collectors.toList()); - when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpec()); + when(specService.getFeatureSets(request.getFeaturesList())) + .thenReturn(Collections.singletonList(featureSetRequest)); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); @@ -440,14 +503,14 @@ public void shouldReturnResponseWithUnsetValuesIfDefaultMaxAgeIsExceeded() { FieldValues.newBuilder() .putFields("entity1", intValue(1)) .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1)) - .putFields("featureSet:1:feature2", intValue(1))) + .putFields("project/feature1:1", intValue(1)) + .putFields("project/feature2:1", intValue(1))) .addFieldValues( FieldValues.newBuilder() .putFields("entity1", intValue(2)) .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", Value.newBuilder().build()) - .putFields("featureSet:1:feature2", Value.newBuilder().build())) + .putFields("project/feature1:1", Value.newBuilder().build()) + .putFields("project/feature2:1", Value.newBuilder().build())) .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); assertThat( @@ -459,11 +522,11 @@ public void shouldFilterOutUndesiredRows() { // requested rows less than the rows available in the featureset GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatureSets( - FeatureSetRequest.newBuilder() - .setName("featureSet") + .addFeatures( + FeatureReference.newBuilder() + .setName("feature1") .setVersion(1) - .addAllFeatureNames(Lists.newArrayList("feature1")) + .setProject("project") .build()) .addEntityRows( EntityRow.newBuilder() @@ -500,9 +563,16 @@ public void shouldFilterOutUndesiredRows() { .setFeatureSet("featureSet:1") .build()); + FeatureSetRequest featureSetRequest = + FeatureSetRequest.newBuilder() + .addAllFeatureReferences(request.getFeaturesList()) + .setSpec(getFeatureSetSpec()) + .build(); + List featureRowBytes = featureRows.stream().map(AbstractMessageLite::toByteArray).collect(Collectors.toList()); - when(specService.getFeatureSet("featureSet", 1)).thenReturn(getFeatureSetSpec()); + when(specService.getFeatureSets(request.getFeaturesList())) + .thenReturn(Collections.singletonList(featureSetRequest)); when(jedisPool.getResource()).thenReturn(jedis); when(jedis.mget(redisKeyList)).thenReturn(featureRowBytes); when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); @@ -513,12 +583,12 @@ public void shouldFilterOutUndesiredRows() { FieldValues.newBuilder() .putFields("entity1", intValue(1)) .putFields("entity2", strValue("a")) - .putFields("featureSet:1:feature1", intValue(1))) + .putFields("project/feature1:1", intValue(1))) .addFieldValues( FieldValues.newBuilder() .putFields("entity1", intValue(2)) .putFields("entity2", strValue("b")) - .putFields("featureSet:1:feature1", intValue(2))) + .putFields("project/feature1:1", intValue(2))) .build(); GetOnlineFeaturesResponse actual = redisServingService.getOnlineFeatures(request); assertThat( @@ -541,6 +611,9 @@ private Value strValue(String val) { private FeatureSetSpec getFeatureSetSpec() { return FeatureSetSpec.newBuilder() + .setProject("project") + .setName("featureSet") + .setVersion(1) .addEntities(EntitySpec.newBuilder().setName("entity1")) .addEntities(EntitySpec.newBuilder().setName("entity2")) .setMaxAge(Duration.newBuilder().setSeconds(30)) // default @@ -549,6 +622,9 @@ private FeatureSetSpec getFeatureSetSpec() { private FeatureSetSpec getFeatureSetSpecWithNoMaxAge() { return FeatureSetSpec.newBuilder() + .setProject("project") + .setName("featureSet") + .setVersion(1) .addEntities(EntitySpec.newBuilder().setName("entity1")) .addEntities(EntitySpec.newBuilder().setName("entity2")) .setMaxAge(Duration.newBuilder().setSeconds(0).setNanos(0).build()) diff --git a/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java b/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java index 9437aa0333..6f95f5307b 100644 --- a/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java +++ b/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java @@ -37,15 +37,17 @@ public void shouldConvertYamlToProto() throws IOException { + " host: localhost\n" + " port: 6379\n" + "subscriptions:\n" - + "- name: \"*\"\n" - + " version: \">0\"\n"; + + "- project: \"*\"\n" + + " name: \"*\"\n" + + " version: \"*\"\n"; Store store = YamlToProtoMapper.yamlToStoreProto(yaml); Store expected = Store.newBuilder() .setName("test") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) - .addSubscriptions(Subscription.newBuilder().setName("*").setVersion(">0")) + .addSubscriptions( + Subscription.newBuilder().setProject("*").setName("*").setVersion("*")) .build(); assertThat(store, equalTo(expected)); } diff --git a/tests/e2e/all_types_parquet/all_types_parquet.yaml b/tests/e2e/all_types_parquet/all_types_parquet.yaml index cf5ea70235..2043b6b473 100644 --- a/tests/e2e/all_types_parquet/all_types_parquet.yaml +++ b/tests/e2e/all_types_parquet/all_types_parquet.yaml @@ -5,28 +5,28 @@ spec: - name: customer_id valueType: INT64 features: - - name: int32_feature + - name: int32_feature_parquet valueType: INT64 - - name: int64_feature + - name: int64_feature_parquet valueType: INT64 - - name: float_feature + - name: float_feature_parquet valueType: DOUBLE - - name: double_feature + - name: double_feature_parquet valueType: DOUBLE - - name: string_feature + - name: string_feature_parquet valueType: STRING - - name: bytes_feature + - name: bytes_feature_parquet valueType: BYTES - - name: int32_list_feature + - name: int32_list_feature_parquet valueType: INT64_LIST - - name: int64_list_feature + - name: int64_list_feature_parquet valueType: INT64_LIST - - name: float_list_feature + - name: float_list_feature_parquet valueType: DOUBLE_LIST - - name: double_list_feature + - name: double_list_feature_parquet valueType: DOUBLE_LIST - - name: string_list_feature + - name: string_list_feature_parquet valueType: STRING_LIST - - name: bytes_list_feature + - name: bytes_list_feature_parquet valueType: BYTES_LIST maxAge: 0s diff --git a/tests/e2e/basic-ingest-redis-serving.py b/tests/e2e/basic-ingest-redis-serving.py index f674363f36..1aeccfa5a3 100644 --- a/tests/e2e/basic-ingest-redis-serving.py +++ b/tests/e2e/basic-ingest-redis-serving.py @@ -20,9 +20,10 @@ import tempfile import os from feast.feature import Feature +import uuid FLOAT_TOLERANCE = 0.00001 - +PROJECT_NAME = 'basic_' + uuid.uuid4().hex.upper()[0:6] @pytest.fixture(scope='module') def core_url(pytestconfig): @@ -44,6 +45,8 @@ def allow_dirty(pytestconfig): def client(core_url, serving_url, allow_dirty): # Get client for core and serving client = Client(core_url=core_url, serving_url=serving_url) + client.create_project(PROJECT_NAME) + client.set_project(PROJECT_NAME) # Ensure Feast core is active, but empty if not allow_dirty: @@ -76,13 +79,11 @@ def test_basic_register_feature_set_success(client): # Load feature set from file cust_trans_fs_expected = FeatureSet.from_yaml("basic/cust_trans_fs.yaml") + client.set_project(PROJECT_NAME) + # Register feature set client.apply(cust_trans_fs_expected) - # Feast Core needs some time to fully commit the FeatureSet applied - # when there is no existing job yet for the Featureset - time.sleep(15) - cust_trans_fs_actual = client.get_feature_set(name="customer_transactions") assert cust_trans_fs_actual == cust_trans_fs_expected @@ -99,10 +100,13 @@ def test_basic_register_feature_set_success(client): @pytest.mark.timeout(300) @pytest.mark.run(order=11) def test_basic_ingest_success(client, basic_dataframe): + client.set_project(PROJECT_NAME) + cust_trans_fs = client.get_feature_set(name="customer_transactions") # Ingest customer transaction data client.ingest(cust_trans_fs, basic_dataframe) + time.sleep(5) @pytest.mark.timeout(45) @@ -112,6 +116,8 @@ def test_basic_retrieve_online_success(client, basic_dataframe): while True: time.sleep(1) + client.set_project(PROJECT_NAME) + response = client.get_online_features( entity_rows=[ GetOnlineFeaturesRequest.EntityRow( @@ -122,9 +128,9 @@ def test_basic_retrieve_online_success(client, basic_dataframe): } ) ], - feature_ids=[ - "customer_transactions:1:daily_transactions", - "customer_transactions:1:total_transactions", + feature_refs=[ + "daily_transactions", + "total_transactions", ], ) # type: GetOnlineFeaturesResponse @@ -133,7 +139,7 @@ def test_basic_retrieve_online_success(client, basic_dataframe): returned_daily_transactions = float( response.field_values[0] - .fields["customer_transactions:1:daily_transactions"] + .fields[PROJECT_NAME + "/daily_transactions"] .float_val ) sent_daily_transactions = float( @@ -216,6 +222,7 @@ def test_all_types_register_feature_set_success(client): Feature(name="bytes_feature", dtype=ValueType.BYTES), Feature(name="bool_feature", dtype=ValueType.BOOL), Feature(name="double_feature", dtype=ValueType.DOUBLE), + Feature(name="double_list_feature", dtype=ValueType.DOUBLE_LIST), Feature(name="float_list_feature", dtype=ValueType.FLOAT_LIST), Feature(name="int64_list_feature", dtype=ValueType.INT64_LIST), Feature(name="int32_list_feature", dtype=ValueType.INT32_LIST), @@ -256,7 +263,7 @@ def test_all_types_ingest_success(client, all_types_dataframe): client.ingest(all_types_fs, all_types_dataframe) -@pytest.mark.timeout(15) +@pytest.mark.timeout(45) @pytest.mark.run(order=22) def test_all_types_retrieve_online_success(client, all_types_dataframe): # Poll serving for feature values until the correct values are returned @@ -270,29 +277,30 @@ def test_all_types_retrieve_online_success(client, all_types_dataframe): int64_val=all_types_dataframe.iloc[0]["user_id"])} ) ], - feature_ids=[ - "all_types:1:float_feature", - "all_types:1:int64_feature", - "all_types:1:int32_feature", - "all_types:1:string_feature", - "all_types:1:bytes_feature", - "all_types:1:bool_feature", - "all_types:1:double_feature", - "all_types:1:float_list_feature", - "all_types:1:int64_list_feature", - "all_types:1:int32_list_feature", - "all_types:1:string_list_feature", - "all_types:1:bytes_list_feature", - "all_types:1:double_list_feature", + feature_refs=[ + "float_feature", + "int64_feature", + "int32_feature", + "string_feature", + "bytes_feature", + "bool_feature", + "double_feature", + "float_list_feature", + "int64_list_feature", + "int32_list_feature", + "string_list_feature", + "bytes_list_feature", + "double_list_feature", ], ) # type: GetOnlineFeaturesResponse if response is None: continue + returned_float_list = ( response.field_values[0] - .fields["all_types:1:float_list_feature"] + .fields[PROJECT_NAME+"/float_list_feature"] .float_list_val.val ) @@ -315,8 +323,8 @@ def large_volume_dataframe(): range(ROW_COUNT) ], "customer_id": [offset + inc for inc in range(ROW_COUNT)], - "daily_transactions": [np.random.rand() for _ in range(ROW_COUNT)], - "total_transactions": [256 for _ in range(ROW_COUNT)], + "daily_transactions_large": [np.random.rand() for _ in range(ROW_COUNT)], + "total_transactions_large": [256 for _ in range(ROW_COUNT)], } ) return customer_data @@ -376,9 +384,9 @@ def test_large_volume_retrieve_online_success(client, large_volume_dataframe): } ) ], - feature_ids=[ - "customer_transactions_large:1:daily_transactions", - "customer_transactions_large:1:total_transactions", + feature_refs=[ + "daily_transactions_large", + "total_transactions_large", ], ) # type: GetOnlineFeaturesResponse @@ -387,11 +395,11 @@ def test_large_volume_retrieve_online_success(client, large_volume_dataframe): returned_daily_transactions = float( response.field_values[0] - .fields["customer_transactions_large:1:daily_transactions"] + .fields[PROJECT_NAME + "/daily_transactions_large"] .float_val ) sent_daily_transactions = float( - large_volume_dataframe.iloc[0]["daily_transactions"]) + large_volume_dataframe.iloc[0]["daily_transactions_large"]) if math.isclose( sent_daily_transactions, @@ -410,42 +418,42 @@ def all_types_parquet_file(): "datetime": [datetime.utcnow() for _ in range(COUNT)], "customer_id": [np.int32(random.randint(0, 10000)) for _ in range(COUNT)], - "int32_feature": [np.int32(random.randint(0, 10000)) for _ in + "int32_feature_parquet": [np.int32(random.randint(0, 10000)) for _ in range(COUNT)], - "int64_feature": [np.int64(random.randint(0, 10000)) for _ in + "int64_feature_parquet": [np.int64(random.randint(0, 10000)) for _ in range(COUNT)], - "float_feature": [np.float(random.random()) for _ in range(COUNT)], - "double_feature": [np.float64(random.random()) for _ in + "float_feature_parquet": [np.float(random.random()) for _ in range(COUNT)], + "double_feature_parquet": [np.float64(random.random()) for _ in range(COUNT)], - "string_feature": ["one" + str(random.random()) for _ in + "string_feature_parquet": ["one" + str(random.random()) for _ in range(COUNT)], - "bytes_feature": [b"one" for _ in range(COUNT)], - "int32_list_feature": [ + "bytes_feature_parquet": [b"one" for _ in range(COUNT)], + "int32_list_feature_parquet": [ np.array([1, 2, 3, random.randint(0, 10000)], dtype=np.int32) for _ in range(COUNT) ], - "int64_list_feature": [ + "int64_list_feature_parquet": [ np.array([1, random.randint(0, 10000), 3, 4], dtype=np.int64) for _ in range(COUNT) ], - "float_list_feature": [ + "float_list_feature_parquet": [ np.array([1.1, 1.2, 1.3, random.random()], dtype=np.float32) for _ in range(COUNT) ], - "double_list_feature": [ + "double_list_feature_parquet": [ np.array([1.1, 1.2, 1.3, random.random()], dtype=np.float64) for _ in range(COUNT) ], - "string_list_feature": [ + "string_list_feature_parquet": [ np.array(["one", "two" + str(random.random()), "three"]) for _ in range(COUNT) ], - "bytes_list_feature": [ + "bytes_list_feature_parquet": [ np.array([b"one", b"two", b"three"]) for _ in range(COUNT) ], } diff --git a/tests/e2e/bq-batch-retrieval.py b/tests/e2e/bq-batch-retrieval.py index 3458eb4740..9ed3490678 100644 --- a/tests/e2e/bq-batch-retrieval.py +++ b/tests/e2e/bq-batch-retrieval.py @@ -4,6 +4,7 @@ from datetime import timedelta from urllib.parse import urlparse +import uuid import numpy as np import pandas as pd import pytest @@ -17,6 +18,7 @@ from google.protobuf.duration_pb2 import Duration from pandavro import to_avro +PROJECT_NAME = 'batch_' + uuid.uuid4().hex.upper()[0:6] @pytest.fixture(scope="module") def core_url(pytestconfig): @@ -42,6 +44,8 @@ def gcs_path(pytestconfig): def client(core_url, serving_url, allow_dirty): # Get client for core and serving client = Client(core_url=core_url, serving_url=serving_url) + client.create_project(PROJECT_NAME) + client.set_project(PROJECT_NAME) # Ensure Feast core is active, but empty if not allow_dirty: @@ -51,16 +55,68 @@ def client(core_url, serving_url, allow_dirty): return client +@pytest.mark.first +def test_apply_all_featuresets(client): + client.set_project(PROJECT_NAME) -def test_get_batch_features_with_file(client): file_fs1 = FeatureSet( - "file_feature_set", - features=[Feature("feature_value", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], + "file_feature_set", + features=[Feature("feature_value1", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) + client.apply(file_fs1) + + gcs_fs1 = FeatureSet( + "gcs_feature_set", + features=[Feature("feature_value2", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) + client.apply(gcs_fs1) + + proc_time_fs = FeatureSet( + "processing_time", + features=[Feature("feature_value3", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) + client.apply(proc_time_fs) + + add_cols_fs = FeatureSet( + "additional_columns", + features=[Feature("feature_value4", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) + client.apply(add_cols_fs) + + historical_fs = FeatureSet( + "historical", + features=[Feature("feature_value5", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) + client.apply(historical_fs) + + fs1 = FeatureSet( + "feature_set_1", + features=[Feature("feature_value6", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) + + fs2 = FeatureSet( + "feature_set_2", + features=[Feature("other_feature_value7", ValueType.INT64)], + entities=[Entity("other_entity_id", ValueType.INT64)], max_age=Duration(seconds=100), ) + client.apply(fs1) + client.apply(fs2) - client.apply(file_fs1) + +def test_get_batch_features_with_file(client): file_fs1 = client.get_feature_set(name="file_feature_set", version=1) N_ROWS = 10 @@ -69,7 +125,7 @@ def test_get_batch_features_with_file(client): { "datetime": [time_offset] * N_ROWS, "entity_id": [i for i in range(N_ROWS)], - "feature_value": [f"{i}" for i in range(N_ROWS)], + "feature_value1": [f"{i}" for i in range(N_ROWS)], } ) client.ingest(file_fs1, features_1_df) @@ -77,27 +133,20 @@ def test_get_batch_features_with_file(client): # Rename column (datetime -> event_timestamp) features_1_df = features_1_df.rename(columns={"datetime": "event_timestamp"}) - to_avro(df=features_1_df, file_path_or_buffer="file_feature_set.avro") + to_avro(df=features_1_df[["event_timestamp", "entity_id"]], file_path_or_buffer="file_feature_set.avro") + time.sleep(15) feature_retrieval_job = client.get_batch_features( - entity_rows="file://file_feature_set.avro", feature_ids=["file_feature_set:1:feature_value"] + entity_rows="file://file_feature_set.avro", feature_refs=[f"{PROJECT_NAME}/feature_value1:1"] ) output = feature_retrieval_job.to_dataframe() print(output.head()) - assert output["entity_id"].to_list() == [int(i) for i in output["file_feature_set_v1_feature_value"].to_list()] + assert output["entity_id"].to_list() == [int(i) for i in output["feature_value1"].to_list()] def test_get_batch_features_with_gs_path(client, gcs_path): - gcs_fs1 = FeatureSet( - "gcs_feature_set", - features=[Feature("feature_value", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) - - client.apply(gcs_fs1) gcs_fs1 = client.get_feature_set(name="gcs_feature_set", version=1) N_ROWS = 10 @@ -106,7 +155,7 @@ def test_get_batch_features_with_gs_path(client, gcs_path): { "datetime": [time_offset] * N_ROWS, "entity_id": [i for i in range(N_ROWS)], - "feature_value": [f"{i}" for i in range(N_ROWS)], + "feature_value2": [f"{i}" for i in range(N_ROWS)], } ) client.ingest(gcs_fs1, features_1_df) @@ -116,7 +165,7 @@ def test_get_batch_features_with_gs_path(client, gcs_path): # Output file to local file_name = "gcs_feature_set.avro" - to_avro(df=features_1_df, file_path_or_buffer=file_name) + to_avro(df=features_1_df[["event_timestamp", "entity_id"]], file_path_or_buffer=file_name) uri = urlparse(gcs_path) bucket = uri.hostname @@ -129,26 +178,19 @@ def test_get_batch_features_with_gs_path(client, gcs_path): blob = bucket.blob(remote_path) blob.upload_from_filename(file_name) + time.sleep(15) feature_retrieval_job = client.get_batch_features( entity_rows=f"{gcs_path}{ts}/*", - feature_ids=["gcs_feature_set:1:feature_value"] + feature_refs=[f"{PROJECT_NAME}/feature_value2:1"] ) output = feature_retrieval_job.to_dataframe() print(output.head()) - assert output["entity_id"].to_list() == [int(i) for i in output["gcs_feature_set_v1_feature_value"].to_list()] + assert output["entity_id"].to_list() == [int(i) for i in output["feature_value2"].to_list()] def test_order_by_creation_time(client): - proc_time_fs = FeatureSet( - "processing_time", - features=[Feature("feature_value", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) - client.apply(proc_time_fs) - time.sleep(10) proc_time_fs = client.get_feature_set(name="processing_time", version=1) time_offset = datetime.utcnow().replace(tzinfo=pytz.utc) @@ -157,43 +199,35 @@ def test_order_by_creation_time(client): { "datetime": [time_offset] * N_ROWS, "entity_id": [i for i in range(N_ROWS)], - "feature_value": ["WRONG"] * N_ROWS, + "feature_value3": ["WRONG"] * N_ROWS, } ) correct_df = pd.DataFrame( { "datetime": [time_offset] * N_ROWS, "entity_id": [i for i in range(N_ROWS)], - "feature_value": ["CORRECT"] * N_ROWS, + "feature_value3": ["CORRECT"] * N_ROWS, } ) client.ingest(proc_time_fs, incorrect_df) - time.sleep(10) + time.sleep(15) client.ingest(proc_time_fs, correct_df) feature_retrieval_job = client.get_batch_features( - entity_rows=incorrect_df[["datetime", "entity_id"]], feature_ids=["processing_time:1:feature_value"] + entity_rows=incorrect_df[["datetime", "entity_id"]], feature_refs=[f"{PROJECT_NAME}/feature_value3:1"] ) output = feature_retrieval_job.to_dataframe() print(output.head()) - assert output["processing_time_v1_feature_value"].to_list() == ["CORRECT"] * N_ROWS + assert output["feature_value3"].to_list() == ["CORRECT"] * N_ROWS def test_additional_columns_in_entity_table(client): - add_cols_fs = FeatureSet( - "additional_columns", - features=[Feature("feature_value", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) - client.apply(add_cols_fs) - time.sleep(10) add_cols_fs = client.get_feature_set(name="additional_columns", version=1) N_ROWS = 10 time_offset = datetime.utcnow().replace(tzinfo=pytz.utc) features_df = pd.DataFrame( - {"datetime": [time_offset] * N_ROWS, "entity_id": [i for i in range(N_ROWS)], "feature_value": ["abc"] * N_ROWS} + {"datetime": [time_offset] * N_ROWS, "entity_id": [i for i in range(N_ROWS)], "feature_value4": ["abc"] * N_ROWS} ) client.ingest(add_cols_fs, features_df) @@ -205,26 +239,20 @@ def test_additional_columns_in_entity_table(client): "additional_float_col": [random.random() for i in range(N_ROWS)], } ) + + time.sleep(15) feature_retrieval_job = client.get_batch_features( - entity_rows=entity_df, feature_ids=["additional_columns:1:feature_value"] + entity_rows=entity_df, feature_refs=[f"{PROJECT_NAME}/feature_value4:1"] ) output = feature_retrieval_job.to_dataframe() print(output.head()) assert np.allclose(output["additional_float_col"], entity_df["additional_float_col"]) assert output["additional_string_col"].to_list() == entity_df["additional_string_col"].to_list() - assert output["additional_columns_v1_feature_value"].to_list() == features_df["feature_value"].to_list() + assert output["feature_value4"].to_list() == features_df["feature_value4"].to_list() def test_point_in_time_correctness_join(client): - historical_fs = FeatureSet( - "historical", - features=[Feature("feature_value", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) - client.apply(historical_fs) - time.sleep(10) historical_fs = client.get_feature_set(name="historical", version=1) time_offset = datetime.utcnow().replace(tzinfo=pytz.utc) @@ -238,7 +266,7 @@ def test_point_in_time_correctness_join(client): ] * N_EXAMPLES, "entity_id": [i for i in range(N_EXAMPLES) for _ in range(3)], - "feature_value": ["WRONG", "WRONG", "CORRECT"] * N_EXAMPLES, + "feature_value5": ["WRONG", "WRONG", "CORRECT"] * N_EXAMPLES, } ) entity_df = pd.DataFrame( @@ -247,32 +275,16 @@ def test_point_in_time_correctness_join(client): client.ingest(historical_fs, historical_df) - feature_retrieval_job = client.get_batch_features(entity_rows=entity_df, feature_ids=["historical:1:feature_value"]) + time.sleep(15) + feature_retrieval_job = client.get_batch_features(entity_rows=entity_df, feature_refs=[f"{PROJECT_NAME}/feature_value5"]) output = feature_retrieval_job.to_dataframe() print(output.head()) - assert output["historical_v1_feature_value"].to_list() == ["CORRECT"] * N_EXAMPLES + assert output["feature_value5"].to_list() == ["CORRECT"] * N_EXAMPLES def test_multiple_featureset_joins(client): - fs1 = FeatureSet( - "feature_set_1", - features=[Feature("feature_value", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) - - fs2 = FeatureSet( - "feature_set_2", - features=[Feature("other_feature_value", ValueType.INT64)], - entities=[Entity("other_entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) - - client.apply(fs1) fs1 = client.get_feature_set(name="feature_set_1", version=1) - - client.apply(fs2) fs2 = client.get_feature_set(name="feature_set_2", version=1) N_ROWS = 10 @@ -281,7 +293,7 @@ def test_multiple_featureset_joins(client): { "datetime": [time_offset] * N_ROWS, "entity_id": [i for i in range(N_ROWS)], - "feature_value": [f"{i}" for i in range(N_ROWS)], + "feature_value6": [f"{i}" for i in range(N_ROWS)], } ) client.ingest(fs1, features_1_df) @@ -290,7 +302,7 @@ def test_multiple_featureset_joins(client): { "datetime": [time_offset] * N_ROWS, "other_entity_id": [i for i in range(N_ROWS)], - "other_feature_value": [i for i in range(N_ROWS)], + "other_feature_value7": [i for i in range(N_ROWS)], } ) client.ingest(fs2, features_2_df) @@ -302,11 +314,13 @@ def test_multiple_featureset_joins(client): "other_entity_id": [N_ROWS - 1 - i for i in range(N_ROWS)], } ) + + time.sleep(15) feature_retrieval_job = client.get_batch_features( - entity_rows=entity_df, feature_ids=["feature_set_1:1:feature_value", "feature_set_2:1:other_feature_value"] + entity_rows=entity_df, feature_refs=[f"{PROJECT_NAME}/feature_value6:1", f"{PROJECT_NAME}/other_feature_value7:1"] ) output = feature_retrieval_job.to_dataframe() print(output.head()) - assert output["entity_id"].to_list() == [int(i) for i in output["feature_set_1_v1_feature_value"].to_list()] - assert output["other_entity_id"].to_list() == output["feature_set_2_v1_other_feature_value"].to_list() + assert output["entity_id"].to_list() == [int(i) for i in output["feature_value6"].to_list()] + assert output["other_entity_id"].to_list() == output["other_feature_value7"].to_list() diff --git a/tests/e2e/large_volume/cust_trans_large_fs.yaml b/tests/e2e/large_volume/cust_trans_large_fs.yaml index 54bf4cac28..7f36151392 100644 --- a/tests/e2e/large_volume/cust_trans_large_fs.yaml +++ b/tests/e2e/large_volume/cust_trans_large_fs.yaml @@ -5,8 +5,8 @@ spec: - name: customer_id valueType: INT64 features: - - name: daily_transactions + - name: daily_transactions_large valueType: FLOAT - - name: total_transactions + - name: total_transactions_large valueType: FLOAT maxAge: 3600s