Skip to content

Commit

Permalink
feat(graphql): Add query for all archived recordings (#924)
Browse files Browse the repository at this point in the history
* feat(graphql): Add query for all archived recordings

* fixup! feat(graphql): Add query for all archived recordings

* Run spotless

* Add unit test

* Refactoring

* Fix imports

* Add an aggregate count field for the archived recordings GraphQL query

* fixup! Add an aggregate count field for the archived recordings GraphQL query

* Run mvn spotless:apply

* Add back changes overwritten during rebase. Surpress URF_UNREAD_FIELD warnings for new Archived and AggregateInfo static classes
  • Loading branch information
Hareet Dhillon authored Jul 29, 2022
1 parent eccb6e1 commit 3961fec
Show file tree
Hide file tree
Showing 11 changed files with 338 additions and 64 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ public void handleAuthenticated(RoutingContext ctx) throws Exception {
? 0
: Integer.parseInt(m.group(4).substring(1));

final String subdirectoryName = RecordingArchiveHelper.UNLABELLED;
final String subdirectoryName = RecordingArchiveHelper.UPLOADED_RECORDINGS_SUBDIRECTORY;
final String basename = String.format("%s_%s_%s", targetName, recordingName, timestamp);
final String uploadedFileName = upload.uploadedFileName();
validateRecording(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
/*
* Copyright The Cryostat Authors
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or data
* (collectively the "Software"), free of charge and under any and all copyright
* rights in the Software, and any and all patent rights owned or freely
* licensable by each licensor hereunder covering either (i) the unmodified
* Software as contributed to or provided by such licensor, or (ii) the Larger
* Works (as defined below), to deal in both
*
* (a) the Software, and
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software (each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
* The above copyright notice and either this complete permission notice or at
* a minimum a reference to the UPL must be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.cryostat.net.web.http.api.v2.graph;

import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;

import javax.inject.Inject;

import io.cryostat.net.web.http.api.v2.graph.labels.LabelSelectorMatcher;
import io.cryostat.recordings.RecordingArchiveHelper;
import io.cryostat.rules.ArchivedRecordingInfo;

import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;

class AllArchivedRecordingsFetcher implements DataFetcher<List<ArchivedRecordingInfo>> {

private final RecordingArchiveHelper archiveHelper;

@Inject
AllArchivedRecordingsFetcher(RecordingArchiveHelper archiveHelper) {
this.archiveHelper = archiveHelper;
}

public List<ArchivedRecordingInfo> get(DataFetchingEnvironment environment) throws Exception {
FilterInput filter = FilterInput.from(environment);
List<ArchivedRecordingInfo> result = new ArrayList<>();
if (filter.contains(FilterInput.Key.SOURCE_TARGET)) {
String targetId = filter.get(FilterInput.Key.SOURCE_TARGET);
result = archiveHelper.getRecordings(targetId).get();
} else {
result = archiveHelper.getRecordings().get();
}
if (filter.contains(FilterInput.Key.NAME)) {
String recordingName = filter.get(FilterInput.Key.NAME);
result =
result.stream()
.filter(r -> Objects.equals(r.getName(), recordingName))
.collect(Collectors.toList());
}
if (filter.contains(FilterInput.Key.LABELS)) {
List<String> labels = filter.get(FilterInput.Key.LABELS);
for (String label : labels) {
result =
result.stream()
.filter(
r ->
LabelSelectorMatcher.parse(label)
.test(r.getMetadata().getLabels()))
.collect(Collectors.toList());
}
}
return result;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,41 +44,64 @@

import javax.inject.Inject;

import io.cryostat.net.web.http.api.v2.graph.ArchivedRecordingsFetcher.Archived;
import io.cryostat.net.web.http.api.v2.graph.RecordingsFetcher.Recordings;
import io.cryostat.net.web.http.api.v2.graph.labels.LabelSelectorMatcher;
import io.cryostat.rules.ArchivedRecordingInfo;

import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;

class ArchivedRecordingsFetcher implements DataFetcher<List<ArchivedRecordingInfo>> {
@SuppressFBWarnings(
value = "URF_UNREAD_FIELD",
justification =
"The Archived and AggregateInfo fields are serialized and returned to the client by"
+ " the GraphQL engine")
class ArchivedRecordingsFetcher implements DataFetcher<Archived> {

@Inject
ArchivedRecordingsFetcher() {}

public List<ArchivedRecordingInfo> get(DataFetchingEnvironment environment) throws Exception {
public Archived get(DataFetchingEnvironment environment) throws Exception {
Recordings source = environment.getSource();
FilterInput filter = FilterInput.from(environment);
List<ArchivedRecordingInfo> result = new ArrayList<>(source.archived);
List<ArchivedRecordingInfo> recordings = new ArrayList<>(source.archived);
if (filter.contains(FilterInput.Key.NAME)) {
String recordingName = filter.get(FilterInput.Key.NAME);
result =
result.stream()
recordings =
recordings.stream()
.filter(r -> Objects.equals(r.getName(), recordingName))
.collect(Collectors.toList());
}
if (filter.contains(FilterInput.Key.LABELS)) {
List<String> labels = filter.get(FilterInput.Key.LABELS);
for (String label : labels) {
result =
result.stream()
recordings =
recordings.stream()
.filter(
r ->
LabelSelectorMatcher.parse(label)
.test(r.getMetadata().getLabels()))
.collect(Collectors.toList());
}
}
return result;

Archived archived = new Archived();
AggregateInfo aggregate = new AggregateInfo();
archived.data = recordings;
aggregate.count = Long.valueOf(archived.data.size());
archived.aggregate = aggregate;

return archived;
}

static class Archived {
List<ArchivedRecordingInfo> data;
AggregateInfo aggregate;
}

static class AggregateInfo {
Long count;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ enum Key {
NAME("name"),
LABELS("labels"),
ANNOTATIONS("annotations"),
SOURCE_TARGET("sourceTarget"),
NODE_TYPE("nodeType"),
STATE("state"),
CONTINUOUS("continuous"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ static GraphQL provideGraphQL(
RecordingsFetcher recordingsFetcher,
ActiveRecordingsFetcher activeRecordingsFetcher,
ArchivedRecordingsFetcher archivedRecordingsFetcher,
AllArchivedRecordingsFetcher allArchivedRecordingsFetcher,
StartRecordingOnTargetMutator startRecordingOnTargetMutator,
SnapshotOnTargetMutator snapshotOnTargetMutator,
StopRecordingMutator stopRecordingMutator,
Expand Down Expand Up @@ -129,6 +130,10 @@ static GraphQL provideGraphQL(
.type(
TypeRuntimeWiring.newTypeWiring("Query")
.dataFetcher("targetNodes", targetNodesFetcher))
.type(
TypeRuntimeWiring.newTypeWiring("Query")
.dataFetcher(
"archivedRecordings", allArchivedRecordingsFetcher))
.type(
TypeRuntimeWiring.newTypeWiring("EnvironmentNode")
.dataFetcher("children", nodeChildrenFetcher))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,49 +102,59 @@ public Recordings get(DataFetchingEnvironment environment) throws Exception {
String targetId = target.getServiceUri().toString();
Recordings recordings = new Recordings();

ConnectionDescriptor cd =
new ConnectionDescriptor(targetId, credentialsManager.getCredentials(target));
// FIXME populating these two struct members are each async tasks. we should do them in
// parallel
recordings.archived = archiveHelper.getRecordings(targetId).get();
recordings.active =
tcm.executeConnectedTask(
cd,
conn -> {
return conn.getService().getAvailableRecordings().stream()
.map(
r -> {
try {
String downloadUrl =
webServer
.get()
.getDownloadURL(
conn, r.getName());
String reportUrl =
webServer
.get()
.getReportURL(
conn, r.getName());
Metadata metadata =
metadataManager.getMetadata(
targetId, r.getName());
return new GraphRecordingDescriptor(
target,
r,
downloadUrl,
reportUrl,
metadata);
} catch (QuantityConversionException
| URISyntaxException
| IOException e) {
logger.error(e);
return null;
}
})
.filter(Objects::nonNull)
.collect(Collectors.toList());
},
false);
List<String> requestedFields =
environment.getSelectionSet().getFields().stream()
.map(field -> field.getName())
.collect(Collectors.toList());

if (requestedFields.contains("active")) {
ConnectionDescriptor cd =
new ConnectionDescriptor(targetId, credentialsManager.getCredentials(target));
// FIXME populating these two struct members are each async tasks. we should do them in
// parallel
recordings.active =
tcm.executeConnectedTask(
cd,
conn -> {
return conn.getService().getAvailableRecordings().stream()
.map(
r -> {
try {
String downloadUrl =
webServer
.get()
.getDownloadURL(
conn, r.getName());
String reportUrl =
webServer
.get()
.getReportURL(
conn, r.getName());
Metadata metadata =
metadataManager.getMetadata(
targetId, r.getName());
return new GraphRecordingDescriptor(
target,
r,
downloadUrl,
reportUrl,
metadata);
} catch (QuantityConversionException
| URISyntaxException
| IOException e) {
logger.error(e);
return null;
}
})
.filter(Objects::nonNull)
.collect(Collectors.toList());
},
false);
}

if (requestedFields.contains("archived")) {
recordings.archived = archiveHelper.getRecordings(targetId).get();
}

return recordings;
}
Expand Down
13 changes: 8 additions & 5 deletions src/main/java/io/cryostat/recordings/RecordingArchiveHelper.java
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,8 @@ public class RecordingArchiveHelper {
private static final String SAVE_NOTIFICATION_CATEGORY = "ActiveRecordingSaved";
private static final String DELETE_NOTIFICATION_CATEGORY = "ArchivedRecordingDeleted";

public static final String UNLABELLED = "unlabelled";
public static final String ARCHIVES = "archives";
public static final String UPLOADED_RECORDINGS_SUBDIRECTORY = "uploads";

RecordingArchiveHelper(
FileSystem fs,
Expand Down Expand Up @@ -220,7 +220,7 @@ public Future<ArchivedRecordingInfo> deleteRecording(String recordingName) {
ARCHIVES, recordingName));
String subdirectoryName = parentPath.getFileName().toString();
String targetId =
(subdirectoryName.equals(UNLABELLED))
(subdirectoryName.equals(UPLOADED_RECORDINGS_SUBDIRECTORY))
? ""
: new String(base32.decode(subdirectoryName), StandardCharsets.UTF_8);
notificationFactory
Expand Down Expand Up @@ -273,8 +273,11 @@ public Path getCachedReportPath(String recordingName) {
public Future<List<ArchivedRecordingInfo>> getRecordings(String targetId) {
CompletableFuture<List<ArchivedRecordingInfo>> future = new CompletableFuture<>();

String encodedServiceUri = base32.encodeAsString(targetId.getBytes(StandardCharsets.UTF_8));
Path specificRecordingsPath = archivedRecordingsPath.resolve(encodedServiceUri);
String subdirectory =
targetId.equals(UPLOADED_RECORDINGS_SUBDIRECTORY)
? targetId
: base32.encodeAsString(targetId.getBytes(StandardCharsets.UTF_8));
Path specificRecordingsPath = archivedRecordingsPath.resolve(subdirectory);

try {
if (!fs.exists(archivedRecordingsPath)) {
Expand Down Expand Up @@ -308,7 +311,7 @@ public Future<List<ArchivedRecordingInfo>> getRecordings(String targetId) {
file -> {
try {
return new ArchivedRecordingInfo(
encodedServiceUri,
subdirectory,
file,
webServer.getArchivedDownloadURL(file),
webServer.getArchivedReportURL(file),
Expand Down
3 changes: 2 additions & 1 deletion src/main/resources/queries.graphqls
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
type Query {
rootNode: EnvironmentNode!
environmentNodes(filter: EnvironmentNodeFilterInput): [EnvironmentNode!]!
targetNodes(filter: TargetNodesFilterInput): [TargetNode!]! # TODO add filters for names, labels, annotations
targetNodes(filter: TargetNodesFilterInput): [TargetNode!]!
archivedRecordings(filter: ArchivedRecordingFilterInput): [ArchivedRecording!]!
}
12 changes: 11 additions & 1 deletion src/main/resources/types.graphqls
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ input ActiveRecordingFilterInput {
input ArchivedRecordingFilterInput {
name: String
labels: [String]
sourceTarget: String
}

type ServiceRef {
Expand Down Expand Up @@ -82,7 +83,7 @@ interface Node {

type Recordings {
active(filter: ActiveRecordingFilterInput): [ActiveRecording!]!
archived(filter: ArchivedRecordingFilterInput): [ArchivedRecording!]!
archived(filter: ArchivedRecordingFilterInput): Archived!
}

type ActiveRecording implements Recording {
Expand Down Expand Up @@ -114,6 +115,15 @@ type ArchivedRecording implements Recording {
doDelete: ArchivedRecording!
}

type Archived {
data: [ArchivedRecording!]!
aggregate: AggregateInfo!
}

type AggregateInfo {
count: Long!
}

interface Recording {
name: String!
reportUrl: Url!
Expand Down
Loading

0 comments on commit 3961fec

Please sign in to comment.