diff --git a/CHANGELOG.md b/CHANGELOG.md index 1cd0e18db9..898a65cde0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,12 +4,7 @@ ### Features - - Add `enableDartSymbolication` option to Sentry.init() for **Flutter iOS, macOS and Android** ([#2256](https://github.com/getsentry/sentry-dart/pull/2256)) - - This flag enables symbolication of Dart stack traces when native debug images are not available. - - Useful when using Sentry.init() instead of SentryFlutter.init() in Flutter projects for example due to size limitations. - - `true` by default but automatically set to `false` when using SentryFlutter.init() because the SentryFlutter fetches debug images from the native SDK integrations. -- Support allowUrls and denyUrls for Flutter Web ([#2227](https://github.com/getsentry/sentry-dart/pull/2227)) -- Session replay Alpha for Android and iOS ([#2208](https://github.com/getsentry/sentry-dart/pull/2208), [#2269](https://github.com/getsentry/sentry-dart/pull/2269), [#2236](https://github.com/getsentry/sentry-dart/pull/2236)). +- Session replay Alpha for Android and iOS ([#2208](https://github.com/getsentry/sentry-dart/pull/2208), [#2269](https://github.com/getsentry/sentry-dart/pull/2269), [#2236](https://github.com/getsentry/sentry-dart/pull/2236), [#2275](https://github.com/getsentry/sentry-dart/pull/2275). To try out replay, you can set following options (access is limited to early access orgs on Sentry. If you're interested, [sign up for the waitlist](https://sentry.io/lp/mobile-replay-beta/)): @@ -39,6 +34,10 @@ ``` - Collect touch breadcrumbs for all buttons, not just those with `key` specified. ([#2242](https://github.com/getsentry/sentry-dart/pull/2242)) +- Add `enableDartSymbolication` option to Sentry.init() for **Flutter iOS, macOS and Android** ([#2256](https://github.com/getsentry/sentry-dart/pull/2256)) + - This flag enables symbolication of Dart stack traces when native debug images are not available. + - Useful when using Sentry.init() instead of SentryFlutter.init() in Flutter projects for example due to size limitations. + - `true` by default but automatically set to `false` when using SentryFlutter.init() because the SentryFlutter fetches debug images from the native SDK integrations. ### Dependencies diff --git a/flutter/lib/src/native/java/sentry_native_java.dart b/flutter/lib/src/native/java/sentry_native_java.dart index 5ccd3a1c67..df5e11b814 100644 --- a/flutter/lib/src/native/java/sentry_native_java.dart +++ b/flutter/lib/src/native/java/sentry_native_java.dart @@ -1,5 +1,6 @@ import 'dart:ui'; +import 'package:flutter/services.dart'; import 'package:meta/meta.dart'; import '../../../sentry_flutter.dart'; @@ -13,6 +14,8 @@ import '../sentry_native_channel.dart'; @internal class SentryNativeJava extends SentryNativeChannel { ScheduledScreenshotRecorder? _replayRecorder; + String? _replayCacheDir; + _IdleFrameFiller? _idleFrameFiller; SentryNativeJava(super.options, super.channel); @override @@ -47,8 +50,7 @@ class SentryNativeJava extends SentryNativeChannel { break; case 'ReplayRecorder.stop': - await _replayRecorder?.stop(); - _replayRecorder = null; + await _stopRecorder(); hub.configureScope((s) { // ignore: invalid_use_of_internal_member @@ -58,9 +60,11 @@ class SentryNativeJava extends SentryNativeChannel { break; case 'ReplayRecorder.pause': await _replayRecorder?.stop(); + await _idleFrameFiller?.pause(); break; case 'ReplayRecorder.resume': _replayRecorder?.start(); + await _idleFrameFiller?.resume(); break; default: throw UnimplementedError('Method ${call.method} not implemented'); @@ -73,13 +77,22 @@ class SentryNativeJava extends SentryNativeChannel { @override Future close() async { + await _stopRecorder(); + return super.close(); + } + + Future _stopRecorder() async { await _replayRecorder?.stop(); + await _idleFrameFiller?.stop(); _replayRecorder = null; - return super.close(); + _idleFrameFiller = null; } void _startRecorder( String cacheDir, ScheduledScreenshotRecorderConfig config) { + _idleFrameFiller = _IdleFrameFiller( + Duration(milliseconds: 1000 ~/ config.frameRate), _addReplayScreenshot); + // Note: time measurements using a Stopwatch in a debug build: // save as rawRgba (1230876 bytes): 0.257 ms -- discarded // save as PNG (25401 bytes): 43.110 ms -- used for the final image @@ -90,39 +103,107 @@ class SentryNativeJava extends SentryNativeChannel { ScreenshotRecorderCallback callback = (image) async { var imageData = await image.toByteData(format: ImageByteFormat.png); if (imageData != null) { - final timestamp = DateTime.now().millisecondsSinceEpoch; - final filePath = "$cacheDir/$timestamp.png"; - - options.logger( - SentryLevel.debug, - 'Replay: Saving screenshot to $filePath (' - '${image.width}x${image.height} pixels, ' - '${imageData.lengthInBytes} bytes)'); - try { - await options.fileSystem - .file(filePath) - .writeAsBytes(imageData.buffer.asUint8List(), flush: true); - - await channel.invokeMethod( - 'addReplayScreenshot', - {'path': filePath, 'timestamp': timestamp}, - ); - } catch (error, stackTrace) { - options.logger( - SentryLevel.error, - 'Native call `addReplayScreenshot` failed', - exception: error, - stackTrace: stackTrace, - ); - // ignore: invalid_use_of_internal_member - if (options.automatedTestMode) { - rethrow; - } - } + final screenshot = _Screenshot(image.width, image.height, imageData); + await _addReplayScreenshot(screenshot); + _idleFrameFiller?.actualFrameReceived(screenshot); } }; + _replayCacheDir = cacheDir; _replayRecorder = ScheduledScreenshotRecorder(config, callback, options) ..start(); } + + Future _addReplayScreenshot(_Screenshot screenshot) async { + final timestamp = DateTime.now().millisecondsSinceEpoch; + final filePath = "$_replayCacheDir/$timestamp.png"; + + options.logger( + SentryLevel.debug, + 'Replay: Saving screenshot to $filePath (' + '${screenshot.width}x${screenshot.height} pixels, ' + '${screenshot.data.lengthInBytes} bytes)'); + try { + await options.fileSystem + .file(filePath) + .writeAsBytes(screenshot.data.buffer.asUint8List(), flush: true); + + await channel.invokeMethod( + 'addReplayScreenshot', + {'path': filePath, 'timestamp': timestamp}, + ); + } catch (error, stackTrace) { + options.logger( + SentryLevel.error, + 'Native call `addReplayScreenshot` failed', + exception: error, + stackTrace: stackTrace, + ); + // ignore: invalid_use_of_internal_member + if (options.automatedTestMode) { + rethrow; + } + } + } +} + +class _Screenshot { + final int width; + final int height; + final ByteData data; + + _Screenshot(this.width, this.height, this.data); +} + +// Workaround for https://github.com/getsentry/sentry-java/issues/3677 +// In short: when there are no postFrameCallbacks issued by Flutter (because +// there are no animations or user interactions), the replay recorder will +// need to get screenshots at a fixed frame rate. This class is responsible for +// filling the gaps between actual frames with the most recent frame. +class _IdleFrameFiller { + final Duration _interval; + final Future Function(_Screenshot screenshot) _callback; + bool _running = true; + Future? _scheduled; + _Screenshot? _mostRecent; + + _IdleFrameFiller(this._interval, this._callback); + + void actualFrameReceived(_Screenshot screenshot) { + // We store the most recent frame but only repost it when the most recent + // one is the same instance (unchanged). + _mostRecent = screenshot; + // Also, the initial reposted frame will be delayed to allow actual frames + // to cancel the reposting. + repostLater(_interval * 1.5, screenshot); + } + + Future stop() async { + // Clearing [_mostRecent] stops the delayed callback from posting the image. + _mostRecent = null; + _running = false; + await _scheduled; + _scheduled = null; + } + + Future pause() async { + _running = false; + } + + Future resume() async { + _running = true; + } + + void repostLater(Duration delay, _Screenshot screenshot) { + _scheduled = Future.delayed(delay, () async { + // Only repost if the screenshot haven't changed. + if (screenshot == _mostRecent) { + if (_running) { + await _callback(screenshot); + } + // On subsequent frames, we stick to the actual frame rate. + repostLater(_interval, screenshot); + } + }); + } } diff --git a/flutter/test/replay/replay_native_test.dart b/flutter/test/replay/replay_native_test.dart index 319bb5f88f..81c6f456c3 100644 --- a/flutter/test/replay/replay_native_test.dart +++ b/flutter/test/replay/replay_native_test.dart @@ -46,7 +46,7 @@ void main() { 'directory': 'dir', 'width': 800, 'height': 600, - 'frameRate': 1000, + 'frameRate': 10, }; } @@ -142,16 +142,15 @@ void main() { var callbackFinished = Completer(); nextFrame({bool wait = true}) async { + final future = callbackFinished.future; tester.binding.scheduleFrame(); - await Future.delayed(const Duration(milliseconds: 100)); await tester.pumpAndSettle(const Duration(seconds: 1)); - await callbackFinished.future.timeout( - Duration(milliseconds: wait ? 1000 : 100), onTimeout: () { + await future.timeout(Duration(milliseconds: wait ? 1000 : 100), + onTimeout: () { if (wait) { fail('native callback not called'); } }); - callbackFinished = Completer(); } imageInfo(File file) => file.readAsBytesSync().length; @@ -162,10 +161,11 @@ void main() { final capturedImages = {}; when(native.handler('addReplayScreenshot', any)) .thenAnswer((invocation) async { - callbackFinished.complete(); final path = invocation.positionalArguments[1]["path"] as String; capturedImages[path] = imageInfo(fs.file(path)); + callbackFinished.complete(); + callbackFinished = Completer(); return null; }); @@ -191,18 +191,37 @@ void main() { expect(capturedImages, equals(fsImages())); await nextFrame(); - expect(fsImages().values, [size, size]); + fsImages().values.forEach((s) => expect(s, size)); expect(capturedImages, equals(fsImages())); - await native.invokeFromNative('ReplayRecorder.stop'); + await native.invokeFromNative('ReplayRecorder.pause'); + var count = capturedImages.length; + + await nextFrame(wait: false); + await Future.delayed(const Duration(milliseconds: 100)); + fsImages().values.forEach((s) => expect(s, size)); + expect(capturedImages, equals(fsImages())); + expect(capturedImages.length, count); await nextFrame(wait: false); - expect(fsImages().values, [size, size]); + fsImages().values.forEach((s) => expect(s, size)); expect(capturedImages, equals(fsImages())); + expect(capturedImages.length, count); + await native.invokeFromNative('ReplayRecorder.resume'); + + await nextFrame(); + fsImages().values.forEach((s) => expect(s, size)); + expect(capturedImages, equals(fsImages())); + expect(capturedImages.length, greaterThan(count)); + + await native.invokeFromNative('ReplayRecorder.stop'); + count = capturedImages.length; + await Future.delayed(const Duration(milliseconds: 100)); await nextFrame(wait: false); - expect(fsImages().values, [size, size]); + fsImages().values.forEach((s) => expect(s, size)); expect(capturedImages, equals(fsImages())); + expect(capturedImages.length, count); } else if (mockPlatform.isIOS) { // configureScope() is called on iOS when(hub.configureScope(captureAny)).thenReturn(null);