diff --git a/src/main/java/io/cryostat/net/web/http/api/v1/RecordingsPostHandler.java b/src/main/java/io/cryostat/net/web/http/api/v1/RecordingsPostHandler.java index 985534b8e7..42f9aedcce 100644 --- a/src/main/java/io/cryostat/net/web/http/api/v1/RecordingsPostHandler.java +++ b/src/main/java/io/cryostat/net/web/http/api/v1/RecordingsPostHandler.java @@ -201,7 +201,7 @@ public void handleAuthenticated(RoutingContext ctx) throws Exception { ? 0 : Integer.parseInt(m.group(4).substring(1)); - final String subdirectoryName = RecordingArchiveHelper.UNLABELLED; + final String subdirectoryName = RecordingArchiveHelper.UPLOADED_RECORDINGS_SUBDIRECTORY; final String basename = String.format("%s_%s_%s", targetName, recordingName, timestamp); final String uploadedFileName = upload.uploadedFileName(); validateRecording( diff --git a/src/main/java/io/cryostat/net/web/http/api/v2/graph/AllArchivedRecordingsFetcher.java b/src/main/java/io/cryostat/net/web/http/api/v2/graph/AllArchivedRecordingsFetcher.java new file mode 100644 index 0000000000..9a8d4e9df9 --- /dev/null +++ b/src/main/java/io/cryostat/net/web/http/api/v2/graph/AllArchivedRecordingsFetcher.java @@ -0,0 +1,93 @@ +/* + * Copyright The Cryostat Authors + * + * The Universal Permissive License (UPL), Version 1.0 + * + * Subject to the condition set forth below, permission is hereby granted to any + * person obtaining a copy of this software, associated documentation and/or data + * (collectively the "Software"), free of charge and under any and all copyright + * rights in the Software, and any and all patent rights owned or freely + * licensable by each licensor hereunder covering either (i) the unmodified + * Software as contributed to or provided by such licensor, or (ii) the Larger + * Works (as defined below), to deal in both + * + * (a) the Software, and + * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if + * one is included with the Software (each a "Larger Work" to which the Software + * is contributed by such licensors), + * + * without restriction, including without limitation the rights to copy, create + * derivative works of, display, perform, and distribute the Software and make, + * use, sell, offer for sale, import, export, have made, and have sold the + * Software and the Larger Work(s), and to sublicense the foregoing rights on + * either these or other terms. + * + * This license is subject to the following condition: + * The above copyright notice and either this complete permission notice or at + * a minimum a reference to the UPL must be included in all copies or + * substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ +package io.cryostat.net.web.http.api.v2.graph; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +import javax.inject.Inject; + +import io.cryostat.net.web.http.api.v2.graph.labels.LabelSelectorMatcher; +import io.cryostat.recordings.RecordingArchiveHelper; +import io.cryostat.rules.ArchivedRecordingInfo; + +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; + +class AllArchivedRecordingsFetcher implements DataFetcher> { + + private final RecordingArchiveHelper archiveHelper; + + @Inject + AllArchivedRecordingsFetcher(RecordingArchiveHelper archiveHelper) { + this.archiveHelper = archiveHelper; + } + + public List get(DataFetchingEnvironment environment) throws Exception { + FilterInput filter = FilterInput.from(environment); + List result = new ArrayList<>(); + if (filter.contains(FilterInput.Key.SOURCE_TARGET)) { + String targetId = filter.get(FilterInput.Key.SOURCE_TARGET); + result = archiveHelper.getRecordings(targetId).get(); + } else { + result = archiveHelper.getRecordings().get(); + } + if (filter.contains(FilterInput.Key.NAME)) { + String recordingName = filter.get(FilterInput.Key.NAME); + result = + result.stream() + .filter(r -> Objects.equals(r.getName(), recordingName)) + .collect(Collectors.toList()); + } + if (filter.contains(FilterInput.Key.LABELS)) { + List labels = filter.get(FilterInput.Key.LABELS); + for (String label : labels) { + result = + result.stream() + .filter( + r -> + LabelSelectorMatcher.parse(label) + .test(r.getMetadata().getLabels())) + .collect(Collectors.toList()); + } + } + return result; + } +} diff --git a/src/main/java/io/cryostat/net/web/http/api/v2/graph/ArchivedRecordingsFetcher.java b/src/main/java/io/cryostat/net/web/http/api/v2/graph/ArchivedRecordingsFetcher.java index d82a532981..eee55b154e 100644 --- a/src/main/java/io/cryostat/net/web/http/api/v2/graph/ArchivedRecordingsFetcher.java +++ b/src/main/java/io/cryostat/net/web/http/api/v2/graph/ArchivedRecordingsFetcher.java @@ -44,34 +44,41 @@ import javax.inject.Inject; +import io.cryostat.net.web.http.api.v2.graph.ArchivedRecordingsFetcher.Archived; import io.cryostat.net.web.http.api.v2.graph.RecordingsFetcher.Recordings; import io.cryostat.net.web.http.api.v2.graph.labels.LabelSelectorMatcher; import io.cryostat.rules.ArchivedRecordingInfo; +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -class ArchivedRecordingsFetcher implements DataFetcher> { +@SuppressFBWarnings( + value = "URF_UNREAD_FIELD", + justification = + "The Archived and AggregateInfo fields are serialized and returned to the client by" + + " the GraphQL engine") +class ArchivedRecordingsFetcher implements DataFetcher { @Inject ArchivedRecordingsFetcher() {} - public List get(DataFetchingEnvironment environment) throws Exception { + public Archived get(DataFetchingEnvironment environment) throws Exception { Recordings source = environment.getSource(); FilterInput filter = FilterInput.from(environment); - List result = new ArrayList<>(source.archived); + List recordings = new ArrayList<>(source.archived); if (filter.contains(FilterInput.Key.NAME)) { String recordingName = filter.get(FilterInput.Key.NAME); - result = - result.stream() + recordings = + recordings.stream() .filter(r -> Objects.equals(r.getName(), recordingName)) .collect(Collectors.toList()); } if (filter.contains(FilterInput.Key.LABELS)) { List labels = filter.get(FilterInput.Key.LABELS); for (String label : labels) { - result = - result.stream() + recordings = + recordings.stream() .filter( r -> LabelSelectorMatcher.parse(label) @@ -79,6 +86,22 @@ public List get(DataFetchingEnvironment environment) thro .collect(Collectors.toList()); } } - return result; + + Archived archived = new Archived(); + AggregateInfo aggregate = new AggregateInfo(); + archived.data = recordings; + aggregate.count = Long.valueOf(archived.data.size()); + archived.aggregate = aggregate; + + return archived; + } + + static class Archived { + List data; + AggregateInfo aggregate; + } + + static class AggregateInfo { + Long count; } } diff --git a/src/main/java/io/cryostat/net/web/http/api/v2/graph/FilterInput.java b/src/main/java/io/cryostat/net/web/http/api/v2/graph/FilterInput.java index 773f41e6c7..945635f090 100644 --- a/src/main/java/io/cryostat/net/web/http/api/v2/graph/FilterInput.java +++ b/src/main/java/io/cryostat/net/web/http/api/v2/graph/FilterInput.java @@ -68,6 +68,7 @@ enum Key { NAME("name"), LABELS("labels"), ANNOTATIONS("annotations"), + SOURCE_TARGET("sourceTarget"), NODE_TYPE("nodeType"), STATE("state"), CONTINUOUS("continuous"), diff --git a/src/main/java/io/cryostat/net/web/http/api/v2/graph/GraphModule.java b/src/main/java/io/cryostat/net/web/http/api/v2/graph/GraphModule.java index 7cd5976ccb..2d16542713 100644 --- a/src/main/java/io/cryostat/net/web/http/api/v2/graph/GraphModule.java +++ b/src/main/java/io/cryostat/net/web/http/api/v2/graph/GraphModule.java @@ -100,6 +100,7 @@ static GraphQL provideGraphQL( RecordingsFetcher recordingsFetcher, ActiveRecordingsFetcher activeRecordingsFetcher, ArchivedRecordingsFetcher archivedRecordingsFetcher, + AllArchivedRecordingsFetcher allArchivedRecordingsFetcher, StartRecordingOnTargetMutator startRecordingOnTargetMutator, SnapshotOnTargetMutator snapshotOnTargetMutator, StopRecordingMutator stopRecordingMutator, @@ -129,6 +130,10 @@ static GraphQL provideGraphQL( .type( TypeRuntimeWiring.newTypeWiring("Query") .dataFetcher("targetNodes", targetNodesFetcher)) + .type( + TypeRuntimeWiring.newTypeWiring("Query") + .dataFetcher( + "archivedRecordings", allArchivedRecordingsFetcher)) .type( TypeRuntimeWiring.newTypeWiring("EnvironmentNode") .dataFetcher("children", nodeChildrenFetcher)) diff --git a/src/main/java/io/cryostat/net/web/http/api/v2/graph/RecordingsFetcher.java b/src/main/java/io/cryostat/net/web/http/api/v2/graph/RecordingsFetcher.java index c52919b9c6..95605b8c40 100644 --- a/src/main/java/io/cryostat/net/web/http/api/v2/graph/RecordingsFetcher.java +++ b/src/main/java/io/cryostat/net/web/http/api/v2/graph/RecordingsFetcher.java @@ -102,49 +102,59 @@ public Recordings get(DataFetchingEnvironment environment) throws Exception { String targetId = target.getServiceUri().toString(); Recordings recordings = new Recordings(); - ConnectionDescriptor cd = - new ConnectionDescriptor(targetId, credentialsManager.getCredentials(target)); - // FIXME populating these two struct members are each async tasks. we should do them in - // parallel - recordings.archived = archiveHelper.getRecordings(targetId).get(); - recordings.active = - tcm.executeConnectedTask( - cd, - conn -> { - return conn.getService().getAvailableRecordings().stream() - .map( - r -> { - try { - String downloadUrl = - webServer - .get() - .getDownloadURL( - conn, r.getName()); - String reportUrl = - webServer - .get() - .getReportURL( - conn, r.getName()); - Metadata metadata = - metadataManager.getMetadata( - targetId, r.getName()); - return new GraphRecordingDescriptor( - target, - r, - downloadUrl, - reportUrl, - metadata); - } catch (QuantityConversionException - | URISyntaxException - | IOException e) { - logger.error(e); - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - }, - false); + List requestedFields = + environment.getSelectionSet().getFields().stream() + .map(field -> field.getName()) + .collect(Collectors.toList()); + + if (requestedFields.contains("active")) { + ConnectionDescriptor cd = + new ConnectionDescriptor(targetId, credentialsManager.getCredentials(target)); + // FIXME populating these two struct members are each async tasks. we should do them in + // parallel + recordings.active = + tcm.executeConnectedTask( + cd, + conn -> { + return conn.getService().getAvailableRecordings().stream() + .map( + r -> { + try { + String downloadUrl = + webServer + .get() + .getDownloadURL( + conn, r.getName()); + String reportUrl = + webServer + .get() + .getReportURL( + conn, r.getName()); + Metadata metadata = + metadataManager.getMetadata( + targetId, r.getName()); + return new GraphRecordingDescriptor( + target, + r, + downloadUrl, + reportUrl, + metadata); + } catch (QuantityConversionException + | URISyntaxException + | IOException e) { + logger.error(e); + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + }, + false); + } + + if (requestedFields.contains("archived")) { + recordings.archived = archiveHelper.getRecordings(targetId).get(); + } return recordings; } diff --git a/src/main/java/io/cryostat/recordings/RecordingArchiveHelper.java b/src/main/java/io/cryostat/recordings/RecordingArchiveHelper.java index 0cf7bf606f..6d285dc196 100644 --- a/src/main/java/io/cryostat/recordings/RecordingArchiveHelper.java +++ b/src/main/java/io/cryostat/recordings/RecordingArchiveHelper.java @@ -101,8 +101,8 @@ public class RecordingArchiveHelper { private static final String SAVE_NOTIFICATION_CATEGORY = "ActiveRecordingSaved"; private static final String DELETE_NOTIFICATION_CATEGORY = "ArchivedRecordingDeleted"; - public static final String UNLABELLED = "unlabelled"; public static final String ARCHIVES = "archives"; + public static final String UPLOADED_RECORDINGS_SUBDIRECTORY = "uploads"; RecordingArchiveHelper( FileSystem fs, @@ -220,7 +220,7 @@ public Future deleteRecording(String recordingName) { ARCHIVES, recordingName)); String subdirectoryName = parentPath.getFileName().toString(); String targetId = - (subdirectoryName.equals(UNLABELLED)) + (subdirectoryName.equals(UPLOADED_RECORDINGS_SUBDIRECTORY)) ? "" : new String(base32.decode(subdirectoryName), StandardCharsets.UTF_8); notificationFactory @@ -273,8 +273,11 @@ public Path getCachedReportPath(String recordingName) { public Future> getRecordings(String targetId) { CompletableFuture> future = new CompletableFuture<>(); - String encodedServiceUri = base32.encodeAsString(targetId.getBytes(StandardCharsets.UTF_8)); - Path specificRecordingsPath = archivedRecordingsPath.resolve(encodedServiceUri); + String subdirectory = + targetId.equals(UPLOADED_RECORDINGS_SUBDIRECTORY) + ? targetId + : base32.encodeAsString(targetId.getBytes(StandardCharsets.UTF_8)); + Path specificRecordingsPath = archivedRecordingsPath.resolve(subdirectory); try { if (!fs.exists(archivedRecordingsPath)) { @@ -308,7 +311,7 @@ public Future> getRecordings(String targetId) { file -> { try { return new ArchivedRecordingInfo( - encodedServiceUri, + subdirectory, file, webServer.getArchivedDownloadURL(file), webServer.getArchivedReportURL(file), diff --git a/src/main/resources/queries.graphqls b/src/main/resources/queries.graphqls index 9f82016256..f32ae14c79 100644 --- a/src/main/resources/queries.graphqls +++ b/src/main/resources/queries.graphqls @@ -1,5 +1,6 @@ type Query { rootNode: EnvironmentNode! environmentNodes(filter: EnvironmentNodeFilterInput): [EnvironmentNode!]! - targetNodes(filter: TargetNodesFilterInput): [TargetNode!]! # TODO add filters for names, labels, annotations + targetNodes(filter: TargetNodesFilterInput): [TargetNode!]! + archivedRecordings(filter: ArchivedRecordingFilterInput): [ArchivedRecording!]! } diff --git a/src/main/resources/types.graphqls b/src/main/resources/types.graphqls index e0c71c0c31..8fa10b6af7 100644 --- a/src/main/resources/types.graphqls +++ b/src/main/resources/types.graphqls @@ -38,6 +38,7 @@ input ActiveRecordingFilterInput { input ArchivedRecordingFilterInput { name: String labels: [String] + sourceTarget: String } type ServiceRef { @@ -82,7 +83,7 @@ interface Node { type Recordings { active(filter: ActiveRecordingFilterInput): [ActiveRecording!]! - archived(filter: ArchivedRecordingFilterInput): [ArchivedRecording!]! + archived(filter: ArchivedRecordingFilterInput): Archived! } type ActiveRecording implements Recording { @@ -114,6 +115,15 @@ type ArchivedRecording implements Recording { doDelete: ArchivedRecording! } +type Archived { + data: [ArchivedRecording!]! + aggregate: AggregateInfo! +} + +type AggregateInfo { + count: Long! +} + interface Recording { name: String! reportUrl: Url! diff --git a/src/test/java/io/cryostat/recordings/RecordingArchiveHelperTest.java b/src/test/java/io/cryostat/recordings/RecordingArchiveHelperTest.java index c8628474c1..3680436c98 100644 --- a/src/test/java/io/cryostat/recordings/RecordingArchiveHelperTest.java +++ b/src/test/java/io/cryostat/recordings/RecordingArchiveHelperTest.java @@ -939,4 +939,72 @@ void getRecordingsShouldHandleIOException() throws Exception { } }); } + + @Test + void getRecordingsShouldDifferentiateBetweenUploadsAndTarget() throws Exception { + Mockito.when(fs.exists(Mockito.any())).thenReturn(true); + Mockito.when(fs.isReadable(Mockito.any())).thenReturn(true); + Mockito.when(fs.isDirectory(Mockito.any())).thenReturn(true); + + Mockito.when(base32.encodeAsString(Mockito.any())).thenReturn("encodedServiceUri"); + + String targetIdUploads = "uploads"; + String targetIdTarget = "someServiceUri"; + Path specificRecordingsPath = Path.of("/some/path/"); + Mockito.when(archivedRecordingsPath.resolve(Mockito.anyString())) + .thenReturn(specificRecordingsPath); + Mockito.when(fs.listDirectoryChildren(specificRecordingsPath)) + .thenReturn(List.of("foo_recording")); + + Mockito.when(webServer.getArchivedReportURL(Mockito.anyString())) + .thenAnswer( + new Answer() { + @Override + public String answer(InvocationOnMock invocation) throws Throwable { + String name = invocation.getArgument(0); + return "/some/path/archive/" + name; + } + }); + Mockito.when(webServer.getArchivedDownloadURL(Mockito.anyString())) + .thenAnswer( + new Answer() { + @Override + public String answer(InvocationOnMock invocation) throws Throwable { + String name = invocation.getArgument(0); + return "/some/path/download/" + name; + } + }); + + Mockito.when(recordingMetadataManager.getMetadata(Mockito.anyString(), Mockito.anyString())) + .thenReturn(new Metadata()); + + // Test get recordings from uploads + List result = + recordingArchiveHelper.getRecordings(targetIdUploads).get(); + + Mockito.verify(archivedRecordingsPath).resolve(targetIdUploads); + + List expected = + List.of( + new ArchivedRecordingInfo( + targetIdUploads, + "foo_recording", + "/some/path/download/foo_recording", + "/some/path/archive/foo_recording")); + MatcherAssert.assertThat(result, Matchers.equalTo(expected)); + + // Test get recordings from target + result = recordingArchiveHelper.getRecordings(targetIdTarget).get(); + + Mockito.verify(base32).encodeAsString(targetIdTarget.getBytes(StandardCharsets.UTF_8)); + Mockito.verify(archivedRecordingsPath).resolve("encodedServiceUri"); + + expected = + List.of( + new ArchivedRecordingInfo( + "encodedServiceUri", + "foo_recording", + "/some/path/download/foo_recording", + "/some/path/archive/foo_recording")); + } } diff --git a/src/test/java/itest/GraphQLIT.java b/src/test/java/itest/GraphQLIT.java index da381383f0..4fea816fb5 100644 --- a/src/test/java/itest/GraphQLIT.java +++ b/src/test/java/itest/GraphQLIT.java @@ -349,8 +349,8 @@ void testDeleteMutation() throws Exception { query.put( "query", "query { targetNodes(filter: { annotations: \"PORT == 9093\" }) { recordings {" - + " active { name doDelete { name } } archived { name doDelete { name } } } }" - + " }"); + + " active { name doDelete { name } } archived { data { name doDelete { name }" + + " } aggregate { count } } } } }"); webClient .post("/api/v2.2/graphql") .sendJson( @@ -370,10 +370,11 @@ void testDeleteMutation() throws Exception { TargetNode node = actual.data.targetNodes.get(0); MatcherAssert.assertThat(node.recordings.active, Matchers.hasSize(1)); - MatcherAssert.assertThat(node.recordings.archived, Matchers.hasSize(1)); + MatcherAssert.assertThat(node.recordings.archived.data, Matchers.hasSize(1)); + MatcherAssert.assertThat(node.recordings.archived.aggregate.count, Matchers.equalTo(1L)); ActiveRecording activeRecording = node.recordings.active.get(0); - ArchivedRecording archivedRecording = node.recordings.archived.get(0); + ArchivedRecording archivedRecording = node.recordings.archived.data.get(0); MatcherAssert.assertThat(activeRecording.name, Matchers.equalTo("graphql-itest")); MatcherAssert.assertThat(activeRecording.doDelete.name, Matchers.equalTo("graphql-itest")); @@ -488,9 +489,68 @@ public boolean equals(Object obj) { } } + static class AggregateInfo { + Long count; + + @Override + public String toString() { + return "AggregateInfo [count=" + count + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(count); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + AggregateInfo other = (AggregateInfo) obj; + return Objects.equals(count, other.count); + } + } + + static class Archived { + List data; + AggregateInfo aggregate; + + @Override + public String toString() { + return "Archived [data=" + data + ", aggregate=" + aggregate + "]"; + } + + @Override + public int hashCode() { + return Objects.hash(data, aggregate); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Archived other = (Archived) obj; + return Objects.equals(data, other.data) && Objects.equals(aggregate, other.aggregate); + } + } + static class Recordings { List active; - List archived; + Archived archived; @Override public String toString() {