diff --git a/.dockerignore b/.dockerignore index 289446be7650b1..64aadf0e405875 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,6 +3,7 @@ /.git /.gitignore /.venv +/.webpack_cache /fixtures /node_modules /tests diff --git a/.eslintrc.js b/.eslintrc.js index acb2dfc66cf476..dfb3482ca6aa79 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -2,17 +2,804 @@ const detectDeprecations = !!process.env.SENTRY_DETECT_DEPRECATIONS; +const baseRules = { + /** + * Strict mode + */ + // https://eslint.org/docs/rules/strict + strict: ['error', 'global'], + + /** + * Variables + */ + // https://eslint.org/docs/rules/no-shadow-restricted-names + 'no-shadow-restricted-names': ['error'], + + /** + * Possible errors + */ + // https://eslint.org/docs/rules/no-cond-assign + 'no-cond-assign': ['error', 'always'], + + // https://eslint.org/docs/rules/no-alert + 'no-alert': ['error'], + + // https://eslint.org/docs/rules/no-constant-condition + 'no-constant-condition': ['warn'], + + // https://eslint.org/docs/rules/no-empty + 'no-empty': ['error'], + + // https://eslint.org/docs/rules/no-ex-assign + 'no-ex-assign': ['error'], + + // https://eslint.org/docs/rules/no-extra-boolean-cast + 'no-extra-boolean-cast': ['error'], + + // https://eslint.org/docs/rules/no-func-assign + 'no-func-assign': ['error'], + + // https://eslint.org/docs/rules/no-inner-declarations + 'no-inner-declarations': ['error'], + + // https://eslint.org/docs/rules/no-invalid-regexp + 'no-invalid-regexp': ['error'], + + // https://eslint.org/docs/rules/no-irregular-whitespace + 'no-irregular-whitespace': ['error'], + + // https://eslint.org/docs/rules/no-obj-calls + 'no-obj-calls': ['error'], + + // https://eslint.org/docs/rules/no-sparse-arrays + 'no-sparse-arrays': ['error'], + + // https://eslint.org/docs/rules/block-scoped-var + 'block-scoped-var': ['error'], + + /** + * Best practices + */ + // https://eslint.org/docs/rules/consistent-return + 'consistent-return': ['error'], + + // https://eslint.org/docs/rules/default-case + 'default-case': ['error'], + + // https://eslint.org/docs/rules/dot-notation + 'dot-notation': [ + 'error', + { + allowKeywords: true, + }, + ], + + // https://eslint.org/docs/rules/guard-for-in [REVISIT ME] + 'guard-for-in': ['off'], + + // https://eslint.org/docs/rules/no-caller + 'no-caller': ['error'], + + // https://eslint.org/docs/rules/no-eval + 'no-eval': ['error'], + + // https://eslint.org/docs/rules/no-extend-native + 'no-extend-native': ['error'], + + // https://eslint.org/docs/rules/no-extra-bind + 'no-extra-bind': ['error'], + + // https://eslint.org/docs/rules/no-fallthrough + 'no-fallthrough': ['error'], + + // https://eslint.org/docs/rules/no-floating-decimal + 'no-floating-decimal': ['error'], + + // https://eslint.org/docs/rules/no-implied-eval + 'no-implied-eval': ['error'], + + // https://eslint.org/docs/rules/no-lone-blocks + 'no-lone-blocks': ['error'], + + // https://eslint.org/docs/rules/no-loop-func + 'no-loop-func': ['error'], + + // https://eslint.org/docs/rules/no-multi-str + 'no-multi-str': ['error'], + + // https://eslint.org/docs/rules/no-native-reassign + 'no-native-reassign': ['error'], + + // https://eslint.org/docs/rules/no-new + 'no-new': ['error'], + + // https://eslint.org/docs/rules/no-new-func + 'no-new-func': ['error'], + + // https://eslint.org/docs/rules/no-new-wrappers + 'no-new-wrappers': ['error'], + + // https://eslint.org/docs/rules/no-octal + 'no-octal': ['error'], + + // https://eslint.org/docs/rules/no-octal-escape + 'no-octal-escape': ['error'], + + // https://eslint.org/docs/rules/no-param-reassign [REVISIT ME] + 'no-param-reassign': ['off'], + + // https://eslint.org/docs/rules/no-proto + 'no-proto': ['error'], + + // https://eslint.org/docs/rules/no-return-assign + 'no-return-assign': ['error'], + + // https://eslint.org/docs/rules/no-script-url + 'no-script-url': ['error'], + + // https://eslint.org/docs/rules/no-self-compare + 'no-self-compare': ['error'], + + // https://eslint.org/docs/rules/no-sequences + 'no-sequences': ['error'], + + // https://eslint.org/docs/rules/no-throw-literal + 'no-throw-literal': ['error'], + + // https://eslint.org/docs/rules/no-with + 'no-with': ['error'], + + // https://eslint.org/docs/rules/radix + radix: ['error'], + + // https://eslint.org/docs/rules/space-in-brackets.html + 'computed-property-spacing': ['error', 'never'], + + // https://eslint.org/docs/rules/space-in-brackets.html + 'array-bracket-spacing': ['error', 'never'], + + // https://eslint.org/docs/rules/space-in-brackets.html + 'object-curly-spacing': ['error', 'never'], + + // https://eslint.org/docs/rules/object-shorthand + 'object-shorthand': ['error', 'properties'], + + // https://eslint.org/docs/rules/space-infix-ops.html + 'space-infix-ops': ['error'], + + // https://eslint.org/docs/rules/vars-on-top + 'vars-on-top': ['off'], + + // https://eslint.org/docs/rules/wrap-iife + 'wrap-iife': ['error', 'any'], + + // https://eslint.org/docs/rules/array-callback-return + 'array-callback-return': ['error'], + + // https://eslint.org/docs/rules/yoda + yoda: ['error'], + + // https://eslint.org/docs/rules/no-else-return + 'no-else-return': ['error', {allowElseIf: false}], + + // https://eslint.org/docs/rules/require-await + 'require-await': ['error'], + + // https://eslint.org/docs/rules/multiline-comment-style + 'multiline-comment-style': ['error', 'separate-lines'], + + // https://eslint.org/docs/rules/spaced-comment + 'spaced-comment': [ + 'error', + 'always', + { + line: {markers: ['/'], exceptions: ['-', '+']}, + block: {exceptions: ['*'], balanced: true}, + }, + ], +}; + +const reactReactRules = { + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/display-name.md + 'react/display-name': ['off'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-multi-comp.md + 'react/no-multi-comp': [ + 'off', + { + ignoreStateless: true, + }, + ], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-fragments.md + 'react/jsx-fragments': ['error', 'element'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-handler-names.md + // Ensures that any component or prop methods used to handle events are correctly prefixed. + 'react/jsx-handler-names': [ + 'off', + { + eventHandlerPrefix: 'handle', + eventHandlerPropPrefix: 'on', + }, + ], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-key.md + 'react/jsx-key': ['error'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-no-undef.md + 'react/jsx-no-undef': ['error'], + + // Disabled as we use the newer JSX transform babel plugin. + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-uses-react.md + 'react/jsx-uses-react': ['off'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-uses-vars.md + 'react/jsx-uses-vars': ['error'], + + /** + * Deprecation related rules + */ + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-deprecated.md + 'react/no-deprecated': ['error'], + + // Prevent usage of the return value of React.render + // deprecation: https://facebook.github.io/react/docs/react-dom.html#render + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-render-return-value.md + 'react/no-render-return-value': ['error'], + + // Children should always be actual children, not passed in as a prop. + // When using JSX, the children should be nested between the opening and closing tags. When not using JSX, the children should be passed as additional arguments to React.createElement. + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-children-prop.md + 'react/no-children-prop': ['error'], + + // This rule helps prevent problems caused by using children and the dangerouslySetInnerHTML prop at the same time. + // React will throw a warning if this rule is ignored. + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-danger-with-children.md + 'react/no-danger-with-children': ['error'], + + // Prevent direct mutation of this.state + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-direct-mutation-state.md + 'react/no-direct-mutation-state': ['error'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-mount-set-state.md + 'react/no-did-mount-set-state': ['error'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-did-update-set-state.md" + 'react/no-did-update-set-state': ['error'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-redundant-should-component-update.md + 'react/no-redundant-should-component-update': ['error'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-typos.md + 'react/no-typos': ['error'], + + // Prevent invalid characters from appearing in markup + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unescaped-entities.md + 'react/no-unescaped-entities': ['off'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unknown-property.md + 'react/no-unknown-property': ['error', {ignore: ['css']}], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-unused-prop-types.md + // Disabled since this currently fails to correctly detect a lot of + // typescript prop type usage. + 'react/no-unused-prop-types': ['off'], + + // We do not need proptypes since we're using typescript + 'react/prop-types': ['off'], + + // When writing the render method in a component it is easy to forget to return the JSX content. + // This rule will warn if the return statement is missing. + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/require-render-return.md + 'react/require-render-return': ['error'], + + // Disabled as we are using the newer JSX transform babel plugin. + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/react-in-jsx-scope.md + 'react/react-in-jsx-scope': ['off'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/self-closing-comp.md + 'react/self-closing-comp': ['error'], + + // This also causes issues with typescript + // See: https://github.com/yannickcr/eslint-plugin-react/issues/2066 + // + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/sort-comp.md + 'react/sort-comp': ['warn'], + + // Disabled because of prettier + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/wrap-multilines.md + 'react/jsx-wrap-multilines': ['off'], + + // Consistent (never add ={true}) + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-boolean-value.md + 'react/jsx-boolean-value': ['error', 'never'], + + // Consistent function component declaration styles + // https://github.com/jsx-eslint/eslint-plugin-react/blob/master/docs/rules/function-component-definition.md + 'react/function-component-definition': [ + 'error', + {namedComponents: 'function-declaration'}, + ], +}; + +const reactImportRules = { + // Not recommended to be enabled with typescript-eslint + // https://typescript-eslint.io/linting/troubleshooting/performance-troubleshooting/#eslint-plugin-import + 'import/no-unresolved': ['off'], + 'import/named': ['off'], + 'import/default': ['off'], + 'import/export': ['off'], + 'import/no-named-as-default-member': ['off'], + + // Redflags + // do not allow a default import name to match a named export (airbnb: error) + // Issue with `DefaultIssuePlugin` and `app/plugins/index` + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-named-as-default.md + 'import/no-named-as-default': ['off'], + + // disallow use of jsdoc-marked-deprecated imports + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-deprecated.md + 'import/no-deprecated': ['off'], + + // Forbid mutable exports (airbnb: error) + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-mutable-exports.md + // TODO: enable? + 'import/no-mutable-exports': ['off'], + + // disallow require() + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-commonjs.md + 'import/no-commonjs': ['off'], + + // disallow AMD require/define + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-amd.md + 'import/no-amd': ['error'], + + // disallow duplicate imports + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-duplicates.md + 'import/no-duplicates': ['error'], + + // disallow namespace imports + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-namespace.md + 'import/no-namespace': ['off'], + + // Ensure consistent use of file extension within the import path + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/extensions.md + // TODO this fucks up getsentry + 'import/extensions': [ + 'off', + 'always', + { + js: 'never', + jsx: 'never', + }, + ], + + // Require a newline after the last import/require in a group + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/newline-after-import.md + 'import/newline-after-import': ['error'], + + // Require modules with a single export to use a default export (airbnb: error) + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/prefer-default-export.md + 'import/prefer-default-export': ['off'], + + // Restrict which files can be imported in a given folder + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-restricted-paths.md + 'import/no-restricted-paths': ['off'], + + // Forbid modules to have too many dependencies + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/max-dependencies.md + 'import/max-dependencies': ['off', {max: 10}], + + // Forbid import of modules using absolute paths + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-absolute-path.md + 'import/no-absolute-path': ['error'], + + // Forbid require() calls with expressions (airbnb: error) + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-dynamic-require.md + 'import/no-dynamic-require': ['off'], + + // Use webpack default chunk names + 'import/dynamic-import-chunkname': ['off'], + + // prevent importing the submodules of other modules + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-internal-modules.md + 'import/no-internal-modules': [ + 'off', + { + allow: [], + }, + ], + + // Warn if a module could be mistakenly parsed as a script by a consumer + // leveraging Unambiguous JavaScript Grammar + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/unambiguous.md + // this should not be enabled until this proposal has at least been *presented* to TC39. + // At the moment, it"s not a thing. + 'import/unambiguous': ['off'], + + // Forbid Webpack loader syntax in imports + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-webpack-loader-syntax.md + 'import/no-webpack-loader-syntax': ['error'], + + // Prevent unassigned imports + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-unassigned-import.md + // importing for side effects is perfectly acceptable, if you need side effects. + 'import/no-unassigned-import': ['off'], + + // Prevent importing the default as if it were named + // https://github.com/benmosher/eslint-plugin-import/blob/master/docs/rules/no-named-default.md + 'import/no-named-default': ['error'], + + // Reports if a module"s default export is unnamed + // https://github.com/benmosher/eslint-plugin-import/blob/d9b712ac7fd1fddc391f7b234827925c160d956f/docs/rules/no-anonymous-default-export.md + 'import/no-anonymous-default-export': [ + 'error', + { + allowArray: false, + allowArrowFunction: false, + allowAnonymousClass: false, + allowAnonymousFunction: false, + allowCallExpression: true, + allowLiteral: false, + allowObject: false, + }, + ], +}; + +const reactJestRules = { + 'jest/no-disabled-tests': 'error', +}; + +const reactRules = { + ...reactReactRules, + ...reactImportRules, + ...reactJestRules, + /** + * React hooks + */ + 'react-hooks/exhaustive-deps': 'error', + // Biome not yet enforcing all parts of this rule https://github.com/biomejs/biome/issues/1984 + 'react-hooks/rules-of-hooks': 'error', + + /** + * Custom + */ + // highlights literals in JSX components w/o translation tags + 'getsentry/jsx-needs-il8n': ['off'], + 'testing-library/render-result-naming-convention': 'off', + 'testing-library/no-unnecessary-act': 'off', + + // Disabled as we have many tests which render as simple validations + 'jest/expect-expect': 'off', + + // Disabled as we have some comment out tests that cannot be + // uncommented due to typescript errors. + 'jest/no-commented-out-tests': 'off', + + // Disabled as we do sometimes have conditional expects + 'jest/no-conditional-expect': 'off', + + // Useful for exporting some test utilities + 'jest/no-export': 'off', + + 'typescript-sort-keys/interface': [ + 'error', + 'asc', + {caseSensitive: true, natural: false, requiredFirst: true}, + ], +}; + +const appRules = { + /** + * emotion rules for v10 + * + * This probably aren't as necessary anymore, but let's remove when we move to v11 + */ + '@emotion/jsx-import': 'off', + '@emotion/no-vanilla': 'error', + '@emotion/import-from-emotion': 'error', + '@emotion/styled-import': 'error', + + // no-undef is redundant with typescript as tsc will complain + // A downside is that we won't get eslint errors about it, but your editors should + // support tsc errors so.... + // https://eslint.org/docs/rules/no-undef + 'no-undef': 'off', + + // Let formatter handle this + 'arrow-body-style': 'off', + + /** + * Need to use typescript version of these rules + * https://eslint.org/docs/rules/no-shadow + */ + 'no-shadow': 'off', + '@typescript-eslint/no-shadow': 'error', + + // This only override the `args` rule (which is "none"). There are too many errors and it's difficult to manually + // fix them all, so we'll have to incrementally update. + // https://eslint.org/docs/rules/no-unused-vars + 'no-unused-vars': 'off', + '@typescript-eslint/no-unused-vars': [ + 'error', + { + vars: 'all', + args: 'all', + // TODO(scttcper): We could enable this to enforce catch (error) + // https://eslint.org/docs/latest/rules/no-unused-vars#caughterrors + caughtErrors: 'none', + + // Ignore vars that start with an underscore + // e.g. if you want to omit a property using object spread: + // + // const {name: _name, ...props} = this.props; + // + varsIgnorePattern: '^_', + argsIgnorePattern: '^_', + destructuredArrayIgnorePattern: '^_', + }, + ], + + // https://eslint.org/docs/rules/no-use-before-define + 'no-use-before-define': 'off', + // This seems to have been turned on while previously it had been off + '@typescript-eslint/no-use-before-define': ['off'], + + /** + * Restricted imports, e.g. deprecated libraries, etc + * + * See: https://eslint.org/docs/rules/no-restricted-imports + */ + 'no-restricted-imports': [ + 'error', + { + paths: [ + { + name: 'enzyme', + message: + 'Please import from `sentry-test/enzyme` instead. See: https://github.com/getsentry/frontend-handbook#undefined-theme-properties-in-tests for more information', + }, + { + name: '@testing-library/react', + message: + 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase', + }, + { + name: '@testing-library/react-hooks', + message: + 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase', + }, + { + name: '@testing-library/user-event', + message: + 'Please import from `sentry-test/reactTestingLibrary` instead so that we can ensure consistency throughout the codebase', + }, + { + name: '@sentry/browser', + message: + 'Please import from `@sentry/react` to ensure consistency throughout the codebase.', + }, + { + name: 'marked', + message: + "Please import marked from 'app/utils/marked' so that we can ensure sanitation of marked output", + }, + + { + name: 'lodash', + message: + "Please import lodash utilities individually. e.g. `import isEqual from 'lodash/isEqual';`. See https://github.com/getsentry/frontend-handbook#lodash from for information", + }, + { + name: 'lodash/get', + message: + 'Optional chaining `?.` and nullish coalescing operators `??` are available and preferred over using `lodash/get`. See https://github.com/getsentry/frontend-handbook#new-syntax for more information', + }, + { + name: 'react-bootstrap', + message: + 'Avoid usage of any react-bootstrap components as it will soon be removed', + }, + { + name: 'sentry/utils/theme', + importNames: ['lightColors', 'darkColors'], + message: + "'lightColors' and 'darkColors' exports intended for use in Storybook only. Instead, use theme prop from emotion or the useTheme hook.", + }, + { + name: 'react-router', + importNames: ['withRouter'], + message: + "Use 'useLocation', 'useParams', 'useNavigate', 'useRoutes' from sentry/utils instead.", + }, + { + name: 'sentry/utils/withSentryRouter', + importNames: ['withSentryRouter'], + message: + "Use 'useLocation', 'useParams', 'useNavigate', 'useRoutes' from sentry/utils instead.", + }, + ], + }, + ], + + /** + * Better import sorting + */ + 'sort-imports': 'off', + 'import/order': 'off', + 'simple-import-sort/imports': [ + 'error', + { + groups: [ + // Side effect imports. + ['^\\u0000'], + + // Node.js builtins. + // biome-ignore lint/correctness/noNodejsModules: Need to get the list of things! + [`^(${require('node:module').builtinModules.join('|')})(/|$)`], + + // Packages. `react` related packages come first. + ['^react', '^@?\\w'], + + // Test should be separate from the app + ['^(sentry-test|getsentry-test)(/.*|$)'], + + // Internal packages. + ['^(sentry-locale|sentry-images)(/.*|$)'], + + ['^(getsentry-images)(/.*|$)'], + + ['^(app|sentry)(/.*|$)'], + + // Getsentry packages. + ['^(admin|getsentry)(/.*|$)'], + + // Style imports. + ['^.+\\.less$'], + + // Parent imports. Put `..` last. + ['^\\.\\.(?!/?$)', '^\\.\\./?$'], + + // Other relative imports. Put same-folder imports and `.` last. + ['^\\./(?=.*/)(?!/?$)', '^\\.(?!/?$)', '^\\./?$'], + ], + }, + ], + + 'sentry/no-digits-in-tn': ['error'], + + 'sentry/no-dynamic-translations': ['error'], + + // https://github.com/xojs/eslint-config-xo-typescript/blob/9791a067d6a119a21a4db72c02f1da95e25ffbb6/index.js#L95 + '@typescript-eslint/no-restricted-types': [ + 'error', + { + types: { + // TODO(scttcper): Turn object on to make our types more strict + // object: { + // message: 'The `object` type is hard to use. Use `Record` instead. See: https://github.com/typescript-eslint/typescript-eslint/pull/848', + // fixWith: 'Record' + // }, + Buffer: { + message: + 'Use Uint8Array instead. See: https://sindresorhus.com/blog/goodbye-nodejs-buffer', + suggest: ['Uint8Array'], + }, + '[]': "Don't use the empty array type `[]`. It only allows empty arrays. Use `SomeType[]` instead.", + '[[]]': + "Don't use `[[]]`. It only allows an array with a single element which is an empty array. Use `SomeType[][]` instead.", + '[[[]]]': "Don't use `[[[]]]`. Use `SomeType[][][]` instead.", + }, + }, + ], + // TODO(scttcper): Turn no-empty-object-type on to make our types more strict + // '@typescript-eslint/no-empty-object-type': 'error', + // TODO(scttcper): Turn no-function on to make our types more strict + // '@typescript-eslint/no-unsafe-function-type': 'error', + '@typescript-eslint/no-wrapper-object-types': 'error', + + // Naming convention enforcements + '@typescript-eslint/naming-convention': [ + 'error', + { + selector: 'typeLike', + format: ['PascalCase'], + leadingUnderscore: 'allow', + }, + { + selector: 'enumMember', + format: ['UPPER_CASE'], + }, + ], + + // Don't allow lookbehind expressions in regexp as they crash safari + // We've accidentally used lookbehinds a few times and caused problems. + 'no-lookahead-lookbehind-regexp/no-lookahead-lookbehind-regexp': [ + 'error', + 'no-lookbehind', + 'no-negative-lookbehind', + ], +}; + +const strictRules = { + // https://eslint.org/docs/rules/no-console + 'no-console': ['error'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-is-mounted.md + 'react/no-is-mounted': ['error'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-find-dom-node.md + // Recommended to use callback refs instead + 'react/no-find-dom-node': ['error'], + + // https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/no-string-refs.md + // This is now considered legacy, callback refs preferred + 'react/no-string-refs': ['error'], + + 'jest/no-large-snapshots': ['error', {maxSize: 2000}], + + 'sentry/no-styled-shortcut': ['error'], +}; + +const extendsList = [ + 'plugin:jest/recommended', + 'plugin:jest-dom/recommended', + 'plugin:import/typescript', +]; +if (detectDeprecations) { + extendsList.push('plugin:deprecation/recommended'); +} + module.exports = { root: true, - extends: detectDeprecations - ? ['sentry-app/strict', 'plugin:deprecation/recommended'] - : ['sentry-app/strict'], + extends: extendsList, + + plugins: [ + 'jest-dom', + 'testing-library', + 'typescript-sort-keys', + 'react-hooks', + '@typescript-eslint', + '@emotion', + 'import', + 'react', + 'sentry', + 'simple-import-sort', + 'no-lookahead-lookbehind-regexp', + ], + + parser: '@typescript-eslint/parser', parserOptions: detectDeprecations ? { + warnOnUnsupportedTypeScriptVersion: false, + ecmaVersion: 6, + sourceType: 'module', + ecmaFeatures: { + jsx: true, + modules: true, + legacyDecorators: true, + }, project: './tsconfig.json', } - : {}, + : { + warnOnUnsupportedTypeScriptVersion: false, + ecmaVersion: 6, + sourceType: 'module', + ecmaFeatures: { + jsx: true, + modules: true, + legacyDecorators: true, + }, + }, + + env: { + browser: true, + es6: true, + jest: true, + jquery: true, // hard-loaded into vendor.js + }, globals: { require: false, @@ -21,7 +808,25 @@ module.exports = { tick: true, jest: true, }, + + settings: { + react: { + version: '17.0.2', // React version, can not `detect` because of getsentry + }, + 'import/parsers': { + '@typescript-eslint/parser': ['.ts', '.tsx'], + }, + 'import/resolver': { + typescript: {}, + }, + 'import/extensions': ['.js', '.jsx'], + }, + rules: { + ...baseRules, + ...reactRules, + ...appRules, + ...strictRules, 'react-hooks/rules-of-hooks': 'error', 'react-hooks/exhaustive-deps': [ 'error', @@ -138,8 +943,12 @@ module.exports = { }, { files: ['static/**/*.spec.{ts,js}', 'tests/js/**/*.{ts,js}'], - extends: ['plugin:testing-library/react', 'sentry-app/strict'], + extends: ['plugin:testing-library/react', ...extendsList], rules: { + ...baseRules, + ...reactRules, + ...appRules, + ...strictRules, // TODO(@anonrig): Remove this from eslint-sentry-config 'space-infix-ops': 'off', 'object-shorthand': 'off', diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 53d94e25298713..9695c88749836f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -97,7 +97,7 @@ Makefile @getsentry/owners-sentr /src/sentry/analytics/events/relocation_*.py @getsentry/open-source /src/sentry/api/endpoints/organization_fork.py @getsentry/open-source /src/sentry/api/endpoints/relocation/ @getsentry/open-source -/src/sentry/api/serialiers/models/relocation/ @getsentry/open-source +/src/sentry/api/serializers/models/relocation/ @getsentry/open-source /src/sentry/models/relocation/ @getsentry/open-source /src/sentry/relocation/ @getsentry/open-source /src/sentry/tasks/relocation.py @getsentry/open-source @@ -228,8 +228,6 @@ yarn.lock @getsentry/owners-js-de /tests/snuba/api/endpoints/test_organization_events_vitals.py @getsentry/visibility /tests/snuba/api/endpoints/test_organization_tagkey_values.py @getsentry/visibility -/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py @getsentry/data - /src/sentry/spans/ @getsentry/visibility /tests/sentry/spans/ @getsentry/visibility @@ -471,10 +469,8 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge /tests/sentry/api/endpoints/test_projects_metrics_visibility.py @getsentry/telemetry-experience /src/sentry/api/endpoints/organization_onboarding* @getsentry/telemetry-experience /tests/sentry/api/endpoints/test_organization_onboarding* @getsentry/telemetry-experience -/src/sentry/api/endpoints/project_metrics_extraction_rules* @getsentry/telemetry-experience -/tests/sentry/api/endpoints/test_project_metrics_extraction_rules.py @getsentry/telemetry-experience -/src/sentry/api/serializers/models/metrics_extraction_rules.py @getsentry/telemetry-experience -/src/sentry/sentry_metrics/models/spanattributeextractionrules.py @getsentry/telemetry-experience +/src/sentry/api/endpoints/organization_sampling_project_span_counts.py @getsentry/telemetry-experience +/tests/sentry/api/endpoints/test_organization_sampling_project_span_counts.py @getsentry/telemetry-experience /src/sentry/dynamic_sampling/ @getsentry/telemetry-experience /tests/sentry/dynamic_sampling/ @getsentry/telemetry-experience /src/sentry/release_health/metrics_sessions_v2.py @getsentry/telemetry-experience @@ -484,7 +480,6 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge /tests/sentry/sentry_metrics/querying/ @getsentry/telemetry-experience /src/sentry/sentry_metrics/visibility/ @getsentry/telemetry-experience /tests/sentry/sentry_metrics/visibility/ @getsentry/telemetry-experience -/src/sentry/sentry_metrics/span_attribute_extraction_rules.py @getsentry/telemetry-experience /src/sentry/sentry_metrics/extraction_rules.py @getsentry/telemetry-experience /tests/sentry/sentry_metrics/test_extraction_rules.py @getsentry/telemetry-experience /src/sentry/snuba/metrics/ @getsentry/telemetry-experience @@ -505,6 +500,7 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge /static/app/views/performance/landing/dynamicSamplingMetricsAccuracy.spec.tsx @getsentry/telemetry-experience /static/app/views/performance/landing/dynamicSamplingMetricsAccuracyAlert.tsx @getsentry/telemetry-experience /static/app/views/settings/project/dynamicSampling/ @getsentry/telemetry-experience +/static/app/views/settings/dynamicSampling/ @getsentry/telemetry-experience /static/app/views/settings/projectMetrics/* @getsentry/telemetry-experience /static/app/views/onboarding* @getsentry/telemetry-experience diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 798862b1fc592f..8dd7ae295221bf 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -337,9 +337,11 @@ jobs: ! grep 'Incompatible types in "yield"' .artifacts/mypy-all ! grep 'Module "sentry.*has no attribute' .artifacts/mypy-all ! grep 'No return value expected' .artifacts/mypy-all + ! grep 'Return value expected' .artifacts/mypy-all ! grep 'Unpacking a string is disallowed' .artifacts/mypy-all ! grep 'base class .* defined the type as.*Permission' .artifacts/mypy-all ! grep 'does not explicitly export attribute' .artifacts/mypy-all + ! grep 'gets multiple values for' .artifacts/mypy-all - name: apply blocklist changes if: | diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml index c3480e807a39f2..fc1a79c0a975f3 100644 --- a/.github/workflows/frontend.yml +++ b/.github/workflows/frontend.yml @@ -129,7 +129,11 @@ jobs: # This quiets up the logs quite a bit. DEBUG_PRINT_LIMIT: 0 run: | - JEST_TESTS=$(yarn -s jest --listTests --json) yarn test-ci --forceExit + if [ ${{ github.ref }} = 'refs/heads/master' ]; then + JEST_TESTS=$(yarn -s jest --listTests --json) yarn test-ci --forceExit --coverage + else + JEST_TESTS=$(yarn -s jest --listTests --json) yarn test-ci --forceExit + fi # We only upload coverage data for FE changes since it conflicts with # codecov's carry forward functionality. diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 51f2df29192c3d..406fed5714067e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -17,14 +17,20 @@ jobs: runs-on: ubuntu-latest name: 'Release a new version' steps: + - name: Get auth token + id: token + uses: actions/create-github-app-token@5d869da34e18e7287c1daad50e0b8ea0f506ce69 # v1.11.0 + with: + app-id: ${{ vars.SENTRY_RELEASE_BOT_CLIENT_ID }} + private-key: ${{ secrets.SENTRY_RELEASE_BOT_PRIVATE_KEY }} - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: - token: ${{ secrets.GH_RELEASE_PAT }} + token: ${{ steps.token.outputs.token }} fetch-depth: 0 - name: Prepare release uses: getsentry/action-prepare-release@d2cc2db3db92bc5b79a90c316f588f2b13626a2b # v1.5.6 env: - GITHUB_TOKEN: ${{ secrets.GH_RELEASE_PAT }} + GITHUB_TOKEN: ${{ steps.token.outputs.token }} with: version: ${{ github.event.inputs.version }} force: ${{ github.event.inputs.force }} diff --git a/.github/workflows/test_docker_compose_acceptance.yml b/.github/workflows/test_docker_compose_acceptance.yml index bc5a852ae2f2ff..aa81794eb085ba 100644 --- a/.github/workflows/test_docker_compose_acceptance.yml +++ b/.github/workflows/test_docker_compose_acceptance.yml @@ -4,7 +4,7 @@ name: test-docker-compose-acceptance on: schedule: - - cron: '0 * * * *' + - cron: '0 0 * * *' # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value diff --git a/.github/workflows/test_docker_compose_backend.yml b/.github/workflows/test_docker_compose_backend.yml index 0108f97c489184..179b1efee17362 100644 --- a/.github/workflows/test_docker_compose_backend.yml +++ b/.github/workflows/test_docker_compose_backend.yml @@ -2,7 +2,7 @@ name: test-docker-compose-backend on: schedule: - - cron: '0 * * * *' + - cron: '0 0 * * *' # Cancel in progress workflows on pull_requests. # https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value diff --git a/.volta.json b/.volta.json index 6d767601866ac6..872e9f4f1d6b99 100644 --- a/.volta.json +++ b/.volta.json @@ -1,6 +1,6 @@ { "volta": { - "node": "20.13.1", + "node": "22.11.0", "yarn": "1.22.22" } } diff --git a/CHANGES b/CHANGES index 1fae1885ccc856..84afe672222eeb 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,67 @@ +24.11.1 +------- + +### Various fixes & improvements + +- feat(toolbar): Make the login-success page have styles that blend with the login flow (#81230) by @ryan953 +- fix(issues): Revert to app external issue name (#81277) by @scttcper +- Revert "chore(similarity): Do not send > 30 system frames to seer (#81259)" (104352cb) by @getsentry-bot +- :wrench: chore(slos): Update Halt to Success for Bot Commands (#81271) by @iamrajjoshi +- fix(auth): Adding scoping_organization_id to replica (#81213) by @sentaur-athena +- chore(similarity): Do not send > 30 system frames to seer (#81259) by @jangjodi +- fix(issues): Animate dropdown chevrons, button sizes (#81262) by @scttcper +- feat(eap): Add missing profile.id column to EAP (#81263) by @Zylphrex +- ref(dashboards): Modify how permissions are handled for editing/deleting dashboards (#80684) by @harshithadurai +- feat(explore): Format numeric tags nicely in explore (#81255) by @Zylphrex +- fix(explore): Preserve sort when adding group by (#81258) by @Zylphrex +- ref(insights): remove insights-domain-view flag part 1 (#81241) by @DominikB2014 +- chore(alerts): Drop included and excluded projects (#81250) by @ceorourke +- ref: fix flaky digests test (#81256) by @asottile-sentry +- chore(sentryapps) Remove option for sentryapp RPC transition (#81245) by @markstory +- fix(grouping): Only collect metadata timing metric when actually getting metadata (#81252) by @lobsterkatie +- chore(performance): Remove old anomaly detection backend (#80696) by @gggritso +- Revert "chore(similarity): Add logging for over 30 system frames (#81130)" (7b7e7955) by @getsentry-bot +- ♻️ chore(slo): SLOs for CommitContextIntegration (#81225) by @iamrajjoshi +- chore(widget-builder): Remove organization props (#81248) by @narsaynorath +- chore(integrations): SourceCodeSearchEndpoint metrics (#80956) by @mifu67 +- chore(vsts): vsts installation step metrics (#80789) by @cathteng +- Remove excluded_projects & include_all_projects columns (#81204) by @ceorourke +- fix(issues): Wrap solutions in error boundary (#81244) by @scttcper + +_Plus 338 more_ + +24.11.0 +------- + +### Various fixes & improvements + +- feat(issue-details): Add support link to dropdown (#80804) by @roggenkemper +- fix(issues): Show 50+ Replays whenever the count is maxed out like that (#80809) by @ryan953 +- ref(replay): Refactor extractDomNodes stepper strategy into extractDomNodes.tsx (#80810) by @ryan953 +- ref: make condition_data / action_data param names match for create_project_rule (#80781) by @asottile-sentry +- ref: explicitly install libexpat1 (#80742) by @asottile-sentry +- ref(dashboards): Fixes and refactoring for edit access selector button (#80633) by @harshithadurai +- feat(flamegraphs): Support functions flamegraphs for continuous profi… (#80822) by @Zylphrex +- feature(dashboards): added grid icon (#80806) by @doralchan +- fix(charts): Respect stacked prop in area chart (#80824) by @Zylphrex +- fix(insights): filter out ui.action in backend, and add to mobile (#80823) by @DominikB2014 +- chore(flamegraph): Remove transactions from differential flamegraphs (#80807) by @Zylphrex +- feat(functions): Use flamegraph as data source for slowest functions (#80791) by @Zylphrex +- feat(dashboards): Dashboards landing page layout toggle (#80790) by @nikkikapadia +- ref(grouping): Rename `GroupingComponent` to `BaseGroupingComponent` (#80725) by @lobsterkatie +- feat(dynamic-sampling): Show project as active if not 100 percent (#80819) by @ArthurKnaus +- feat(dynamic-sampling): Copy changes and doc links (#80818) by @ArthurKnaus +- fix(releases): For semver releases, get latest release as the resolving one (#80737) by @armenzg +- feat(dynamic-sampling): Use new span count endpoint (#80732) by @ArthurKnaus +- feat(dynamic-sampling): Show stored span per sub-project (#80816) by @ArthurKnaus +- fix(dynamic-sampling): set dynamic sampling project span count mql query limit (#80812) by @shellmayr +- chore(codeowners): update telemetry experience ownership (#80813) by @shellmayr +- fix(crons): Move limit back onto QuerySet for incident detection (#80805) by @evanpurkhiser +- fix(grouping): Small types fixes (#80724) by @lobsterkatie +- feat(backup): Add export checkpointer (#80711) by @azaslavsky + +_Plus 1276 more_ + 24.10.0 ------- diff --git a/Makefile b/Makefile index 015c139827f4ff..bd3a6150a492b4 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,6 @@ all: develop PIP := python -m pip --disable-pip-version-check WEBPACK := yarn build-acceptance -POSTGRES_CONTAINER := sentry_postgres freeze-requirements: @python3 -S -m tools.freeze_requirements diff --git a/api-docs/openapi.json b/api-docs/openapi.json index d2ed03342d473f..f7cba5f6d5345f 100644 --- a/api-docs/openapi.json +++ b/api-docs/openapi.json @@ -135,13 +135,13 @@ "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/issues/": { "$ref": "paths/events/project-issues.json" }, - "/api/0/issues/{issue_id}/tags/{key}/values/": { + "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/tags/{key}/values/": { "$ref": "paths/events/tag-values.json" }, - "/api/0/issues/{issue_id}/hashes/": { + "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/hashes/": { "$ref": "paths/events/issue-hashes.json" }, - "/api/0/issues/{issue_id}/": { + "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/": { "$ref": "paths/events/issue-details.json" }, "/api/0/organizations/{organization_id_or_slug}/releases/": { diff --git a/api-docs/package.json b/api-docs/package.json index f3a3cef97086e1..9e729d8af38a89 100644 --- a/api-docs/package.json +++ b/api-docs/package.json @@ -13,7 +13,7 @@ "js-yaml": "^3.14.0", "json-diff": "^0.7.1", "json-refs": "^3.0.15", - "openapi-examples-validator": "^5.0.0", + "openapi-examples-validator": "^6.0.0", "sane": "^5.0.1" }, "devDependencies": { @@ -21,7 +21,7 @@ "@types/json-diff": "^0.7.0" }, "volta": { - "node": "20.13.1", + "node": "22.11.0", "yarn": "1.22.22" } } diff --git a/api-docs/paths/events/issue-hashes.json b/api-docs/paths/events/issue-hashes.json index 77acd0241f8cb9..5a18525f8759ac 100644 --- a/api-docs/paths/events/issue-hashes.json +++ b/api-docs/paths/events/issue-hashes.json @@ -4,6 +4,15 @@ "description": "This endpoint lists an issue's hashes, which are the generated checksums used to aggregate individual events.", "operationId": "List an Issue's Hashes", "parameters": [ + { + "name": "organization_id_or_slug", + "in": "path", + "description": "The ID or slug of the organization the event belongs to.", + "required": true, + "schema": { + "type": "string" + } + }, { "name": "issue_id", "in": "path", @@ -13,6 +22,16 @@ "type": "string" } }, + { + "in": "query", + "name": "full", + "schema": { + "type": "boolean", + "default": true + }, + "description": "If this is set to true, the event payload will include the full event body, including the stacktrace. Set to 1 to enable.", + "required": false + }, { "$ref": "../../components/parameters/pagination-cursor.json#/PaginationCursor" } diff --git a/api-docs/paths/events/tag-values.json b/api-docs/paths/events/tag-values.json index 323b3d33bc8f8d..ecba8b30675bd1 100644 --- a/api-docs/paths/events/tag-values.json +++ b/api-docs/paths/events/tag-values.json @@ -4,6 +4,15 @@ "description": "Returns details for given tag key related to an issue. \n\nWhen [paginated](/api/pagination) can return at most 1000 values.", "operationId": "List a Tag's Values Related to an Issue", "parameters": [ + { + "name": "organization_id_or_slug", + "in": "path", + "description": "The ID or slug of the organization the event belongs to.", + "required": true, + "schema": { + "type": "string" + } + }, { "name": "issue_id", "in": "path", diff --git a/api-docs/yarn.lock b/api-docs/yarn.lock index 93b7bbe858a4ad..e0c3c84186dd76 100644 --- a/api-docs/yarn.lock +++ b/api-docs/yarn.lock @@ -25,10 +25,20 @@ resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796" integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg== +"@jsep-plugin/assignment@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@jsep-plugin/assignment/-/assignment-1.3.0.tgz#fcfc5417a04933f7ceee786e8ab498aa3ce2b242" + integrity sha512-VVgV+CXrhbMI3aSusQyclHkenWSAm95WaiKrMxRFam3JSUiIaQjoMIw2sEs/OX4XifnqeQUN4DYbJjlA8EfktQ== + +"@jsep-plugin/regex@^1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@jsep-plugin/regex/-/regex-1.0.4.tgz#cb2fc423220fa71c609323b9ba7f7d344a755fcc" + integrity sha512-q7qL4Mgjs1vByCaTnDFcBnV9HS7GVPJX5vyVoCgZHNSC9rjwIlmbXG5sUuorR5ndfHAIlJ8pVStxvjXHbNvtUg== + "@types/js-yaml@^4.0.5": - version "4.0.5" - resolved "https://registry.yarnpkg.com/@types/js-yaml/-/js-yaml-4.0.5.tgz#738dd390a6ecc5442f35e7f03fa1431353f7e138" - integrity sha512-FhpRzf927MNQdRZP0J5DLIdTXhjLYzeUTmLAu69mnVksLH9CJY3IuSeEgbKUki7GQZm0WqDkGzyxju2EZGD2wA== + version "4.0.9" + resolved "https://registry.yarnpkg.com/@types/js-yaml/-/js-yaml-4.0.9.tgz#cd82382c4f902fed9691a2ed79ec68c5898af4c2" + integrity sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg== "@types/json-diff@^0.7.0": version "0.7.0" @@ -36,9 +46,9 @@ integrity sha512-20IJqupGHywtIaE6fS30iygh3dVqVdzmsnrYn/VFuRaQKxLx/RGH5K9hhCfctVxgN7KzJlnD7gYFAOrNwiCgtA== "@types/json-schema@^7.0.6": - version "7.0.9" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.9.tgz#97edc9037ea0c38585320b28964dde3b39e4660d" - integrity sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ== + version "7.0.15" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" + integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== ajv-draft-04@^1.0.0: version "1.0.0" @@ -52,15 +62,15 @@ ajv-formats@^2.1.1: dependencies: ajv "^8.0.0" -ajv@^8.0.0, ajv@^8.12.0: - version "8.12.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1" - integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== +ajv@^8.0.0, ajv@^8.17.1: + version "8.17.1" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.17.1.tgz#37d9a5c776af6bc92d7f4f9510eba4c0a60d11a6" + integrity sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g== dependencies: - fast-deep-equal "^3.1.1" + fast-deep-equal "^3.1.3" + fast-uri "^3.0.1" json-schema-traverse "^1.0.0" require-from-string "^2.0.2" - uri-js "^4.2.2" anymatch@^3.1.1: version "3.1.3" @@ -82,10 +92,15 @@ argparse@^2.0.1: resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== +asap@^2.0.0: + version "2.0.6" + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== + asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== balanced-match@^1.0.0: version "1.0.2" @@ -113,18 +128,21 @@ bser@2.1.1: dependencies: node-int64 "^0.4.0" -call-bind@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" - integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== +call-bind@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" + integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w== dependencies: - function-bind "^1.1.1" - get-intrinsic "^1.0.2" + es-define-property "^1.0.0" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + set-function-length "^1.2.1" call-me-maybe@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b" - integrity sha1-JtII6onje1y95gJQoV8DHBak1ms= + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.2.tgz#03f964f19522ba643b1b0693acb9152fe2074baa" + integrity sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ== capture-exit@^2.0.0: version "2.0.0" @@ -134,17 +152,17 @@ capture-exit@^2.0.0: rsvp "^4.8.4" cli-color@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/cli-color/-/cli-color-2.0.1.tgz#93e3491308691f1e46beb78b63d0fb2585e42ba6" - integrity sha512-eBbxZF6fqPUNnf7CLAFOersUnyYzv83tHFLSlts+OAHsNendaqv2tHCq+/MO+b3Y+9JeoUlIvobyxG/Z8GNeOg== + version "2.0.4" + resolved "https://registry.yarnpkg.com/cli-color/-/cli-color-2.0.4.tgz#d658080290968816b322248b7306fad2346fb2c8" + integrity sha512-zlnpg0jNcibNrO7GG9IeHH7maWFeCz+Ja1wx/7tZNU5ASSSSZ+/qZciM0/LHCYxSdqv5h2sdbQ/PXYdOuetXvA== dependencies: d "^1.0.1" - es5-ext "^0.10.53" + es5-ext "^0.10.64" es6-iterator "^2.0.3" memoizee "^0.4.15" timers-ext "^0.1.7" -combined-stream@^1.0.6: +combined-stream@^1.0.8: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== @@ -161,61 +179,73 @@ commander@~4.1.1: resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== -component-emitter@^1.2.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" - integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== - -cookiejar@^2.1.0: - version "2.1.3" - resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.3.tgz#fc7a6216e408e74414b90230050842dacda75acc" - integrity sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ== +component-emitter@^1.3.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.1.tgz#ef1d5796f7d93f135ee6fb684340b26403c97d17" + integrity sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ== -core-util-is@~1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" - integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== +cookiejar@^2.1.3: + version "2.1.4" + resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.4.tgz#ee669c1fea2cf42dc31585469d193fef0d65771b" + integrity sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw== cross-spawn@^7.0.0: - version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + version "7.0.6" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f" + integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== dependencies: path-key "^3.1.0" shebang-command "^2.0.0" which "^2.0.1" -d@1, d@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" - integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== +d@1, d@^1.0.1, d@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/d/-/d-1.0.2.tgz#2aefd554b81981e7dccf72d6842ae725cb17e5de" + integrity sha512-MOqHvMWF9/9MX6nza0KgvFH4HpMU0EF5uUDXqX/BtxtU8NfB0QzRtJ8Oe/6SuS4kbhyzVJwjd97EA4PKrzJ8bw== dependencies: - es5-ext "^0.10.50" - type "^1.0.1" + es5-ext "^0.10.64" + type "^2.7.2" -debug@^3.1.0: - version "3.2.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" - integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== +debug@^4.3.4: + version "4.3.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.7.tgz#87945b4151a011d76d95a198d7111c865c360a52" + integrity sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ== dependencies: - ms "^2.1.1" + ms "^2.1.3" + +define-data-property@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + gopd "^1.0.1" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + +dezalgo@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/dezalgo/-/dezalgo-1.0.4.tgz#751235260469084c132157dfa857f386d4c33d81" + integrity sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig== + dependencies: + asap "^2.0.0" + wrappy "1" difflib@~0.2.1: version "0.2.4" resolved "https://registry.yarnpkg.com/difflib/-/difflib-0.2.4.tgz#b5e30361a6db023176d562892db85940a718f47e" - integrity sha1-teMDYabbAjF21WKJLbhZQKcY9H4= + integrity sha512-9YVwmMb0wQHQNr5J9m6BSj6fk4pfGITGQOOs+D9Fl+INODWFOfvhIU1hNv6GgR1RBoC/9NJcwu77zShxV0kT7w== dependencies: heap ">= 0.2.0" dreamopt@~0.8.0: version "0.8.0" resolved "https://registry.yarnpkg.com/dreamopt/-/dreamopt-0.8.0.tgz#5bcc80be7097e45fc489c342405ab68140a8c1d9" - integrity sha1-W8yAvnCX5F/EicNCQFq2gUCowdk= + integrity sha512-vyJTp8+mC+G+5dfgsY+r3ckxlz+QMX40VjPQsZc5gxVAxLmi64TBoVkP54A/pRAXMXsbu2GMMBrZPxNv23waMg== dependencies: wordwrap ">=0.0.2" @@ -233,31 +263,44 @@ errno@^1.0.0: dependencies: prr "~1.0.1" -es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@^0.10.53, es5-ext@~0.10.14, es5-ext@~0.10.2, es5-ext@~0.10.46: - version "0.10.56" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.56.tgz#fd76bc935212203a83fef35bb58cddde26ae6c3c" - integrity sha512-YUhqzoMnIjMW5y8FzaMxsCu0eWCwq32GrlwhOhbQmL5OiZReWFm/KvRiYuvqf3CaG/zZ36Kyb4KfVe674cafCQ== +es-define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" + integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== + dependencies: + get-intrinsic "^1.2.4" + +es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + +es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.62, es5-ext@^0.10.64, es5-ext@~0.10.14, es5-ext@~0.10.2: + version "0.10.64" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.64.tgz#12e4ffb48f1ba2ea777f1fcdd1918ef73ea21714" + integrity sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg== dependencies: es6-iterator "^2.0.3" es6-symbol "^3.1.3" + esniff "^2.0.1" next-tick "^1.1.0" es6-iterator@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" - integrity sha1-p96IkUGgWpSwhUQDstCg+/qY87c= + integrity sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g== dependencies: d "1" es5-ext "^0.10.35" es6-symbol "^3.1.1" es6-symbol@^3.1.1, es6-symbol@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" - integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== + version "3.1.4" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.4.tgz#f4e7d28013770b4208ecbf3e0bf14d3bcb557b8c" + integrity sha512-U9bFFjX8tFiATgtkJ1zg25+KviIXpgRvRHS8sau3GfhVzThRQrOeksPeT0BWW2MNZs1OEWJ1DPXOQMn0KKRkvg== dependencies: - d "^1.0.1" - ext "^1.1.2" + d "^1.0.2" + ext "^1.7.0" es6-weak-map@^2.0.3: version "2.0.3" @@ -269,6 +312,16 @@ es6-weak-map@^2.0.3: es6-iterator "^2.0.3" es6-symbol "^3.1.1" +esniff@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/esniff/-/esniff-2.0.1.tgz#a4d4b43a5c71c7ec51c51098c1d8a29081f9b308" + integrity sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg== + dependencies: + d "^1.0.1" + es5-ext "^0.10.62" + event-emitter "^0.3.5" + type "^2.7.2" + esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" @@ -277,7 +330,7 @@ esprima@^4.0.0: event-emitter@^0.3.5: version "0.3.5" resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" - integrity sha1-34xp7vFkeSPHFXuc6DhAYQsCzDk= + integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA== dependencies: d "1" es5-ext "~0.10.14" @@ -302,23 +355,28 @@ execa@^4.0.0: signal-exit "^3.0.2" strip-final-newline "^2.0.0" -ext@^1.1.2: - version "1.6.0" - resolved "https://registry.yarnpkg.com/ext/-/ext-1.6.0.tgz#3871d50641e874cc172e2b53f919842d19db4c52" - integrity sha512-sdBImtzkq2HpkdRLtlLWDa6w4DX22ijZLKx8BMPUuKe1c5lbN6xwQDQCxSfxBQnHZ13ls/FH0MQZx/q/gr6FQg== +ext@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" + integrity sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw== dependencies: - type "^2.5.0" + type "^2.7.2" -extend@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" - integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== - -fast-deep-equal@^3.1.1: +fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== +fast-safe-stringify@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884" + integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== + +fast-uri@^3.0.1: + version "3.0.3" + resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.0.3.tgz#892a1c91802d5d7860de728f18608a0573142241" + integrity sha512-aLrHthzCjH5He4Z2H9YZ+v6Ujb9ocRuW6ZzkJQOrTxleEijANq4v1TsaPaVG1PZcuurEzrLcWRyYBYXD5cEiaw== + fb-watchman@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" @@ -334,42 +392,49 @@ fill-range@^7.1.1: to-regex-range "^5.0.1" foreach@^2.0.4: - version "2.0.5" - resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" - integrity sha1-C+4AUBiusmDQo6865ljdATbsG5k= + version "2.0.6" + resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.6.tgz#87bcc8a1a0e74000ff2bf9802110708cfb02eb6e" + integrity sha512-k6GAGDyqLe9JaebCsFCoudPPWfihKu8pylYXRlqP1J7ms39iPoTtk2fviNglIeQEwdh0bQeKJ01ZPyuyQvKzwg== -form-data@^2.3.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.1.tgz#f2cbec57b5e59e23716e128fe44d4e5dd23895f4" - integrity sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA== +form-data@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.1.tgz#ba1076daaaa5bfd7e99c1a6cb02aa0a5cff90d48" + integrity sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw== dependencies: asynckit "^0.4.0" - combined-stream "^1.0.6" + combined-stream "^1.0.8" mime-types "^2.1.12" -formidable@^1.2.0: - version "1.2.6" - resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.2.6.tgz#d2a51d60162bbc9b4a055d8457a7c75315d1a168" - integrity sha512-KcpbcpuLNOwrEjnbpMC0gS+X8ciDoZE1kkqzat4a8vrprf+s9pKNQ/QIwWfbfs4ltgmFl3MD177SNTkve3BwGQ== +formidable@^2.0.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/formidable/-/formidable-2.1.2.tgz#fa973a2bec150e4ce7cac15589d7a25fc30ebd89" + integrity sha512-CM3GuJ57US06mlpQ47YcunuUZ9jpm8Vx+P2CGt2j7HpgkKZO/DJYQ0Bobim8G6PFQmK5lOqOOdUXboU+h73A4g== + dependencies: + dezalgo "^1.0.4" + hexoid "^1.0.0" + once "^1.4.0" + qs "^6.11.0" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== -get-intrinsic@^1.0.2: - version "1.1.1" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" - integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== +get-intrinsic@^1.1.3, get-intrinsic@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" + integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== dependencies: - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.1" + es-errors "^1.3.0" + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" get-stream@^5.0.0: version "5.2.0" @@ -389,6 +454,13 @@ glob@^8.1.0: minimatch "^5.0.1" once "^1.3.0" +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + dependencies: + get-intrinsic "^1.1.3" + graphlib@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/graphlib/-/graphlib-2.1.8.tgz#5761d414737870084c92ec7b5dbcb0592c9d35da" @@ -396,23 +468,40 @@ graphlib@^2.1.8: dependencies: lodash "^4.17.15" -has-symbols@^1.0.1: +has-property-descriptors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854" + integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== + dependencies: + es-define-property "^1.0.0" + +has-proto@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.3.tgz#b31ddfe9b0e6e9914536a6ab286426d0214f77fd" + integrity sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q== + +has-symbols@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== -has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== +hasown@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== dependencies: - function-bind "^1.1.1" + function-bind "^1.1.2" "heap@>= 0.2.0": version "0.2.7" resolved "https://registry.yarnpkg.com/heap/-/heap-0.2.7.tgz#1e6adf711d3f27ce35a81fe3b7bd576c2260a8fc" integrity sha512-2bsegYkkHO+h/9MGbn6KWcE45cHZgPANo5LXF7EvWdT0yT2EguSVO1nDgU5c8+ZOPwp2vMNa7YFsJhVcDR9Sdg== +hexoid@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/hexoid/-/hexoid-1.0.0.tgz#ad10c6573fb907de23d9ec63a711267d9dc9bc18" + integrity sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g== + human-signals@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3" @@ -421,12 +510,12 @@ human-signals@^1.1.1: inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== dependencies: once "^1.3.0" wrappy "1" -inherits@2, inherits@~2.0.3: +inherits@2, inherits@^2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -446,11 +535,6 @@ is-stream@^2.0.0: resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== -isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= - isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" @@ -471,10 +555,15 @@ js-yaml@^4.1.0: dependencies: argparse "^2.0.1" +jsep@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/jsep/-/jsep-1.4.0.tgz#19feccbfa51d8a79f72480b4b8e40ce2e17152f0" + integrity sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw== + json-diff@^0.7.1: - version "0.7.2" - resolved "https://registry.yarnpkg.com/json-diff/-/json-diff-0.7.2.tgz#237ade7cdfb882183b79f2066defdb458b995bba" - integrity sha512-m+rr5cvC8gML9iB8FatQpQ/NEBJ7LHUFMM4KoNfmCfhTElm42SnqslCGKpYB+Dt1NgkibRVrGP0ZAO3TOU1hpA== + version "0.7.4" + resolved "https://registry.yarnpkg.com/json-diff/-/json-diff-0.7.4.tgz#b9089e2d29dd1b99cf3529dc1a5b72ca2ac7a8dc" + integrity sha512-FJ2P+ShDbzu9epF+kCKgoSUhPIUW7Ta7A4XlIT0L5LzgaR/z1TBF1mm0XhRGj8RlA3Xm0j+c/FsWOHDtuoYejA== dependencies: cli-color "^2.0.0" difflib "~0.2.1" @@ -513,25 +602,29 @@ json-schema-traverse@^1.0.0: resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== -jsonpath-plus@^7.2.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/jsonpath-plus/-/jsonpath-plus-7.2.0.tgz#7ad94e147b3ed42f7939c315d2b9ce490c5a3899" - integrity sha512-zBfiUPM5nD0YZSBT/o/fbCUlCcepMIdP0CJZxM1+KgA4f2T206f6VAg9e7mX35+KlMaIc5qXW34f3BnwJ3w+RA== +jsonpath-plus@^10.2.0: + version "10.2.0" + resolved "https://registry.yarnpkg.com/jsonpath-plus/-/jsonpath-plus-10.2.0.tgz#84d680544d9868579cc7c8f59bbe153a5aad54c4" + integrity sha512-T9V+8iNYKFL2n2rF+w02LBOT2JjDnTjioaNFrxRy0Bv1y/hNsqR/EBK7Ojy2ythRHwmz2cRIls+9JitQGZC/sw== + dependencies: + "@jsep-plugin/assignment" "^1.3.0" + "@jsep-plugin/regex" "^1.0.4" + jsep "^1.4.0" lodash.clonedeep@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" - integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8= + integrity sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ== lodash.flatmap@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.flatmap/-/lodash.flatmap-4.5.0.tgz#ef8cbf408f6e48268663345305c6acc0b778702e" - integrity sha1-74y/QI9uSCaGYzRTBcaswLd4cC4= + integrity sha512-/OcpcAGWlrZyoHGeHh3cAoa6nGdX6QYtmzNP84Jqol6UEQQ2gIaU3H+0eICcjcKGl0/XF8LWOujNn9lffsnaOg== lodash.flatten@^4.4.0: version "4.4.0" resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f" - integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8= + integrity sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g== lodash.merge@^4.6.2: version "4.6.2" @@ -546,7 +639,7 @@ lodash@^4.17.15: lru-queue@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" - integrity sha1-Jzi9nw089PhEkMVzbEhpmsYyzaM= + integrity sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ== dependencies: es5-ext "~0.10.2" @@ -558,12 +651,12 @@ makeerror@1.0.12: tmpl "1.0.5" memoizee@^0.4.15: - version "0.4.15" - resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.4.15.tgz#e6f3d2da863f318d02225391829a6c5956555b72" - integrity sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ== + version "0.4.17" + resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.4.17.tgz#942a5f8acee281fa6fb9c620bddc57e3b7382949" + integrity sha512-DGqD7Hjpi/1or4F/aYAspXKNm5Yili0QDAFAY4QYvpqpgiY6+1jOfqpmByzjxbWd/T9mChbCArXAbDAsTm5oXA== dependencies: - d "^1.0.1" - es5-ext "^0.10.53" + d "^1.0.2" + es5-ext "^0.10.64" es6-weak-map "^2.0.3" event-emitter "^0.3.5" is-promise "^2.2.2" @@ -576,10 +669,10 @@ merge-stream@^2.0.0: resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== -methods@^1.1.1: +methods@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== micromatch@^4.0.2: version "4.0.8" @@ -589,22 +682,22 @@ micromatch@^4.0.2: braces "^3.0.3" picomatch "^2.3.1" -mime-db@1.51.0: - version "1.51.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.51.0.tgz#d9ff62451859b18342d960850dc3cfb77e63fb0c" - integrity sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g== +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== mime-types@^2.1.12: - version "2.1.34" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24" - integrity sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A== + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== dependencies: - mime-db "1.51.0" + mime-db "1.52.0" -mime@^1.4.1: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== +mime@2.6.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-2.6.0.tgz#a2a682a95cd4d0cb1d6257e28f83da7e35800367" + integrity sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg== mimic-fn@^2.1.0: version "2.1.0" @@ -623,7 +716,7 @@ minimist@^1.1.1, minimist@^1.2.0: resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== -ms@^2.1.1: +ms@^2.1.3: version "2.1.3" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== @@ -631,9 +724,9 @@ ms@^2.1.1: native-promise-only@^0.8.1: version "0.8.1" resolved "https://registry.yarnpkg.com/native-promise-only/-/native-promise-only-0.8.1.tgz#20a318c30cb45f71fe7adfbf7b21c99c1472ef11" - integrity sha1-IKMYwwy0X3H+et+/eyHJnBRy7xE= + integrity sha512-zkVhZUA3y8mbz652WrL5x0fB0ehrBkulWT3TomAQ9iDtyXZvzKeEA6GPxAItBYeNYl5yngKRX612qHOhvMkDeg== -next-tick@1, next-tick@^1.1.0: +next-tick@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb" integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ== @@ -655,15 +748,15 @@ npm-run-path@^4.0.0: dependencies: path-key "^3.0.0" -object-inspect@^1.9.0: - version "1.12.0" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.0.tgz#6e2c120e868fd1fd18cb4f18c31741d0d6e776f0" - integrity sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g== +object-inspect@^1.13.1: + version "1.13.3" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.3.tgz#f14c183de51130243d6d18ae149375ff50ea488a" + integrity sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA== once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== dependencies: wrappy "1" @@ -674,12 +767,12 @@ onetime@^5.1.0: dependencies: mimic-fn "^2.1.0" -openapi-examples-validator@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/openapi-examples-validator/-/openapi-examples-validator-5.0.0.tgz#d35cbffc5f0669961f8ce514c413d0b44a153b62" - integrity sha512-UbyFT66im7n8yJWBMHup2VBKzWjIX0v9y+RroExPHqArUx/1kCdb39BfDra+cPiiZIwU9YYGtxwBwYyCCJAVgg== +openapi-examples-validator@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/openapi-examples-validator/-/openapi-examples-validator-6.0.1.tgz#26ed3d425e5601f55c17b202eee22c17445d2aea" + integrity sha512-R8+E8hoc/s0s54O8VWWRM6FM7M7lPUPScEiESDEy1ZDvaCgb/ghiQCP6ArfU4GFu7lTaS31ujGUNC8eTZltgAQ== dependencies: - ajv "^8.12.0" + ajv "^8.17.1" ajv-draft-04 "^1.0.0" ajv-formats "^2.1.1" commander "^6.2.1" @@ -687,12 +780,12 @@ openapi-examples-validator@^5.0.0: glob "^8.1.0" json-pointer "^0.6.2" json-schema-ref-parser "^9.0.9" - jsonpath-plus "^7.2.0" + jsonpath-plus "^10.2.0" lodash.clonedeep "^4.5.0" lodash.flatmap "^4.5.0" lodash.flatten "^4.4.0" lodash.merge "^4.6.2" - yaml "^2.2.2" + yaml "^2.6.1" path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" @@ -700,60 +793,51 @@ path-key@^3.0.0, path-key@^3.1.0: integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== path-loader@^1.0.10: - version "1.0.10" - resolved "https://registry.yarnpkg.com/path-loader/-/path-loader-1.0.10.tgz#dd3d1bd54cb6f2e6423af2ad334a41cc0bce4cf6" - integrity sha512-CMP0v6S6z8PHeJ6NFVyVJm6WyJjIwFvyz2b0n2/4bKdS/0uZa/9sKUlYZzubrn3zuDRU0zIuEDX9DZYQ2ZI8TA== + version "1.0.12" + resolved "https://registry.yarnpkg.com/path-loader/-/path-loader-1.0.12.tgz#c5a99d464da27cfde5891d158a68807abbdfa5f5" + integrity sha512-n7oDG8B+k/p818uweWrOixY9/Dsr89o2TkCm6tOTex3fpdo2+BFDgR+KpB37mGKBRsBAlR8CIJMFN0OEy/7hIQ== dependencies: native-promise-only "^0.8.1" - superagent "^3.8.3" + superagent "^7.1.6" picomatch@^2.0.4, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" - integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= + integrity sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw== pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + version "3.0.2" + resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.2.tgz#836f3edd6bc2ee599256c924ffe0d88573ddcbf8" + integrity sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw== dependencies: end-of-stream "^1.1.0" once "^1.3.1" punycode@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + version "2.3.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" + integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== -qs@^6.5.1: - version "6.10.3" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" - integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== +qs@^6.10.3, qs@^6.11.0: + version "6.13.1" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.1.tgz#3ce5fc72bd3a8171b85c99b93c65dd20b7d1b16e" + integrity sha512-EJPeIn0CYrGu+hli1xilKAPXODtJ12T0sP63Ijx2/khC2JtuaN3JyNIpvmnkmaEtha9ocbG4A4cMcr+TvqvwQg== dependencies: - side-channel "^1.0.4" + side-channel "^1.0.6" -readable-stream@^2.3.5: - version "2.3.7" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== +readable-stream@^3.6.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" require-from-string@^2.0.2: version "2.0.2" @@ -765,10 +849,10 @@ rsvp@^4.8.4: resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-4.8.5.tgz#c8f155311d167f68f21e168df71ec5b083113734" integrity sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA== -safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== +safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== sane@^5.0.1: version "5.0.1" @@ -785,6 +869,23 @@ sane@^5.0.1: minimist "^1.1.1" walker "~1.0.5" +semver@^7.3.7: + version "7.6.3" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143" + integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== + +set-function-length@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449" + integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg== + dependencies: + define-data-property "^1.1.4" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + gopd "^1.0.1" + has-property-descriptors "^1.0.2" + shebang-command@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" @@ -797,14 +898,15 @@ shebang-regex@^3.0.0: resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== -side-channel@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== +side-channel@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2" + integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" + call-bind "^1.0.7" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + object-inspect "^1.13.1" signal-exit@^3.0.2: version "3.0.7" @@ -819,43 +921,44 @@ slash@^3.0.0: sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== dependencies: - safe-buffer "~5.1.0" + safe-buffer "~5.2.0" strip-final-newline@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== -superagent@^3.8.3: - version "3.8.3" - resolved "https://registry.yarnpkg.com/superagent/-/superagent-3.8.3.tgz#460ea0dbdb7d5b11bc4f78deba565f86a178e128" - integrity sha512-GLQtLMCoEIK4eDv6OGtkOoSMt3D+oq0y3dsxMuYuDvaNUvuT8eFBuLmfR0iYYzHC1e8hpzC6ZsxbuP6DIalMFA== - dependencies: - component-emitter "^1.2.0" - cookiejar "^2.1.0" - debug "^3.1.0" - extend "^3.0.0" - form-data "^2.3.1" - formidable "^1.2.0" - methods "^1.1.1" - mime "^1.4.1" - qs "^6.5.1" - readable-stream "^2.3.5" +superagent@^7.1.6: + version "7.1.6" + resolved "https://registry.yarnpkg.com/superagent/-/superagent-7.1.6.tgz#64f303ed4e4aba1e9da319f134107a54cacdc9c6" + integrity sha512-gZkVCQR1gy/oUXr+kxJMLDjla434KmSOKbx5iGD30Ql+AkJQ/YlPKECJy2nhqOsHLjGHzoDTXNSjhnvWhzKk7g== + dependencies: + component-emitter "^1.3.0" + cookiejar "^2.1.3" + debug "^4.3.4" + fast-safe-stringify "^2.1.1" + form-data "^4.0.0" + formidable "^2.0.1" + methods "^1.1.2" + mime "2.6.0" + qs "^6.10.3" + readable-stream "^3.6.0" + semver "^7.3.7" timers-ext@^0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.7.tgz#6f57ad8578e07a3fb9f91d9387d65647555e25c6" - integrity sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ== + version "0.1.8" + resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.8.tgz#b4e442f10b7624a29dd2aa42c295e257150cf16c" + integrity sha512-wFH7+SEAcKfJpfLPkrgMPvvwnEtj8W4IurvEyrKsDleXnKLCDw71w8jltvfLa8Rm4qQxxT4jmDBYbJG/z7qoww== dependencies: - es5-ext "~0.10.46" - next-tick "1" + es5-ext "^0.10.64" + next-tick "^1.1.0" tmpl@1.0.5: version "1.0.5" @@ -869,15 +972,10 @@ to-regex-range@^5.0.1: dependencies: is-number "^7.0.0" -type@^1.0.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" - integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== - -type@^2.5.0: - version "2.6.0" - resolved "https://registry.yarnpkg.com/type/-/type-2.6.0.tgz#3ca6099af5981d36ca86b78442973694278a219f" - integrity sha512-eiDBDOmkih5pMbo9OqsqPRGMljLodLcwd5XD5JbtNB0o89xZAwynY9EdCDsJU7LtcVCClu9DvM7/0Ep1hYX3EQ== +type@^2.7.2: + version "2.7.3" + resolved "https://registry.yarnpkg.com/type/-/type-2.7.3.tgz#436981652129285cc3ba94f392886c2637ea0486" + integrity sha512-8j+1QmAbPvLZow5Qpi6NCaN8FB60p/6x8/vfNqOk/hC+HuvFZhL4+WfekuhQLiqFZXOgQdrs3B+XxEmCc6b3FQ== uri-js@^4.2.2: version "4.4.1" @@ -886,10 +984,10 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" -util-deprecate@~1.0.1: +util-deprecate@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== walker@~1.0.5: version "1.0.8" @@ -908,14 +1006,14 @@ which@^2.0.1: wordwrap@>=0.0.2: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= + integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== -yaml@^2.2.2: - version "2.3.1" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.1.tgz#02fe0975d23cd441242aa7204e09fc28ac2ac33b" - integrity sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ== +yaml@^2.6.1: + version "2.6.1" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.6.1.tgz#42f2b1ba89203f374609572d5349fb8686500773" + integrity sha512-7r0XPzioN/Q9kXBro/XPnA6kznR73DHq+GXh5ON7ZozRO6aMjbmiBuKste2wslTFkC5d1dw0GooOCepZXJ2SAg== diff --git a/bin/split-silo-database b/bin/split-silo-database index 50f0e0a5a593c1..c2c68b86b1725b 100755 --- a/bin/split-silo-database +++ b/bin/split-silo-database @@ -1,4 +1,6 @@ #!/usr/bin/env python +import os + import click from django.apps import apps @@ -30,7 +32,12 @@ def split_database(tables: list[str], source: str, destination: str, reset: bool command.extend([">", f"/tmp/{destination}-tables.sql"]) with get_docker_client() as client: - postgres = client.containers.get("sentry_postgres") + postgres_container = ( + "sentry_postgres" + if os.environ.get("USE_NEW_DEVSERVICES") != "1" + else "sentry-postgres-1" + ) + postgres = client.containers.get(postgres_container) if verbose: click.echo(f">> Running {' '.join(command)}") diff --git a/codecov.yml b/codecov.yml index 5b1985cf566b28..953f7b57c20e87 100644 --- a/codecov.yml +++ b/codecov.yml @@ -4,10 +4,9 @@ coverage: project: false patch: frontend: - # codecov will not fail status checks for master - only_pulls: true - informational: false # Fail the check - target: 60% + informational: true + branches: + - master flags: - frontend backend: diff --git a/devenv/config.ini b/devenv/config.ini index 7a4a63f89afa2e..0d2654d091d8f0 100644 --- a/devenv/config.ini +++ b/devenv/config.ini @@ -32,14 +32,14 @@ linux_arm64_sha256 = e8378c0162b2e0e4cc1f62b29443a3305d116d09583304dbb0149fecaff [node] # upstream (https://nodejs.org/dist/) is not reliable enough so we've mirrored it to GCS -darwin_x86_64 = https://storage.googleapis.com/sentry-dev-infra-assets/node/node-v20.13.1-darwin-x64.tar.xz -darwin_x86_64_sha256 = c83bffeb4eb793da6cb61a44c422b399048a73d7a9c5eb735d9c7f5b0e8659b6 -darwin_arm64 = https://storage.googleapis.com/sentry-dev-infra-assets/node/node-v20.13.1-darwin-arm64.tar.xz -darwin_arm64_sha256 = e8a8e78b91485bc95d20f2aa86201485593685c828ee609245ce21c5680d07ce -linux_x86_64 = https://storage.googleapis.com/sentry-dev-infra-assets/node/node-v20.13.1-linux-x64.tar.xz -linux_x86_64_sha256 = efc0f295dd878e510ab12ea36bbadc3db03c687ab30c07e86c7cdba7eed879a9 +darwin_x86_64 = https://storage.googleapis.com/sentry-dev-infra-assets/node/node-v22.11.0-darwin-x64.tar.xz +darwin_x86_64_sha256 = ab28d1784625d151e3f608a9412a009118f376118ed842ae643f8c2efdfb0af6 +darwin_arm64 = https://storage.googleapis.com/sentry-dev-infra-assets/node/node-v22.11.0-darwin-arm64.tar.xz +darwin_arm64_sha256 = c379a90c6aa605b74042a233ddcda4247b347ba5732007d280e44422cc8f9ecb +linux_x86_64 = https://storage.googleapis.com/sentry-dev-infra-assets/node/node-v22.11.0-linux-x64.tar.xz +linux_x86_64_sha256 = 83bf07dd343002a26211cf1fcd46a9d9534219aad42ee02847816940bf610a72 # used for autoupdate -version = v20.13.1 +version = v22.11.0 yarn_version = 1.22.22 [colima] diff --git a/devenv/sync.py b/devenv/sync.py index 431c9e68cc9045..8d42398400905a 100644 --- a/devenv/sync.py +++ b/devenv/sync.py @@ -72,21 +72,26 @@ def run_procs( # Temporary, see https://github.com/getsentry/sentry/pull/78881 -def check_minimum_version(minimum_version: str): +def check_minimum_version(minimum_version: str) -> bool: version = importlib.metadata.version("sentry-devenv") parsed_version = tuple(map(int, version.split("."))) parsed_minimum_version = tuple(map(int, minimum_version.split("."))) - if parsed_version < parsed_minimum_version: + return parsed_version >= parsed_minimum_version + + +def main(context: dict[str, str]) -> int: + minimum_version = "1.13.0" + if not check_minimum_version(minimum_version): raise SystemExit( f""" Hi! To reduce potential breakage we've defined a minimum devenv version ({minimum_version}) to run sync. -Please run the following to update your global devenv to the minimum: +Please run the following to update your global devenv: -{constants.root}/venv/bin/pip install -U 'sentry-devenv=={minimum_version}' +devenv update Then, use it to run sync this one time. @@ -94,10 +99,6 @@ def check_minimum_version(minimum_version: str): """ ) - -def main(context: dict[str, str]) -> int: - check_minimum_version("1.13.0") - repo = context["repo"] reporoot = context["reporoot"] repo_config = config.get_config(f"{reporoot}/devenv/config.ini") @@ -107,6 +108,8 @@ def main(context: dict[str, str]) -> int: FRONTEND_ONLY = os.environ.get("SENTRY_DEVENV_FRONTEND_ONLY") is not None + USE_NEW_DEVSERVICES = os.environ.get("USE_NEW_DEVSERVICES") == "1" + from devenv.lib import node node.install( @@ -128,18 +131,26 @@ def main(context: dict[str, str]) -> int: venv.ensure(venv_dir, python_version, url, sha256) if constants.DARWIN: - colima.install( - repo_config["colima"]["version"], - repo_config["colima"][constants.SYSTEM_MACHINE], - repo_config["colima"][f"{constants.SYSTEM_MACHINE}_sha256"], - reporoot, - ) - limactl.install( - repo_config["lima"]["version"], - repo_config["lima"][constants.SYSTEM_MACHINE], - repo_config["lima"][f"{constants.SYSTEM_MACHINE}_sha256"], - reporoot, - ) + if check_minimum_version("1.14.2"): + # `devenv update`ing to >=1.14.0 will install global colima + # so if it's there, uninstall the repo local stuff + if os.path.exists(f"{constants.root}/bin/colima"): + binroot = f"{reporoot}/.devenv/bin" + colima.uninstall(binroot) + limactl.uninstall(binroot) + else: + colima.install( + repo_config["colima"]["version"], + repo_config["colima"][constants.SYSTEM_MACHINE], + repo_config["colima"][f"{constants.SYSTEM_MACHINE}_sha256"], + reporoot, + ) + limactl.install( + repo_config["lima"]["version"], + repo_config["lima"][constants.SYSTEM_MACHINE], + repo_config["lima"][f"{constants.SYSTEM_MACHINE}_sha256"], + reporoot, + ) if not run_procs( repo, @@ -253,18 +264,35 @@ def main(context: dict[str, str]) -> int: print("Skipping python migrations since SENTRY_DEVENV_FRONTEND_ONLY is set.") return 0 - # TODO: check healthchecks for redis and postgres to short circuit this - proc.run( - ( - f"{venv_dir}/bin/{repo}", - "devservices", - "up", - "redis", - "postgres", - ), - pathprepend=f"{reporoot}/.devenv/bin", - exit=True, - ) + if USE_NEW_DEVSERVICES: + # Ensure old sentry devservices is not being used, otherwise ports will conflict + proc.run( + ( + f"{venv_dir}/bin/{repo}", + "devservices", + "down", + ), + pathprepend=f"{reporoot}/.devenv/bin", + exit=True, + ) + proc.run( + (f"{venv_dir}/bin/devservices", "up", "--mode", "migrations"), + pathprepend=f"{reporoot}/.devenv/bin", + exit=True, + ) + else: + # TODO: check healthchecks for redis and postgres to short circuit this + proc.run( + ( + f"{venv_dir}/bin/{repo}", + "devservices", + "up", + "redis", + "postgres", + ), + pathprepend=f"{reporoot}/.devenv/bin", + exit=True, + ) if not run_procs( repo, @@ -281,12 +309,16 @@ def main(context: dict[str, str]) -> int: ): return 1 + postgres_container = ( + "sentry_postgres" if os.environ.get("USE_NEW_DEVSERVICES") != "1" else "sentry-postgres-1" + ) + # faster prerequisite check than starting up sentry and running createuser idempotently stdout = proc.run( ( "docker", "exec", - "sentry_postgres", + postgres_container, "psql", "sentry", "postgres", diff --git a/devservices/config.yml b/devservices/config.yml index 49edafe801c617..c7c73ccb1528aa 100644 --- a/devservices/config.yml +++ b/devservices/config.yml @@ -19,8 +19,15 @@ x-sentry-service-config: mode: containerized postgres: description: Database used to store Sentry data + redis: + description: Shared instance of redis used by sentry services + remote: + repo_name: sentry-shared-redis + branch: main + repo_link: git@github.com:getsentry/sentry-shared-redis.git modes: default: [snuba, postgres, relay] + migrations: [postgres, redis] services: postgres: @@ -46,10 +53,12 @@ services: - 127.0.0.1:5432:5432 extra_hosts: - host.docker.internal:host-gateway + restart: unless-stopped networks: devservices: name: devservices + external: true volumes: postgres-data: diff --git a/fixtures/backup/model_dependencies/detailed.json b/fixtures/backup/model_dependencies/detailed.json index 01b5d5f9bf02a2..7cd2989f951e07 100644 --- a/fixtures/backup/model_dependencies/detailed.json +++ b/fixtures/backup/model_dependencies/detailed.json @@ -49,6 +49,35 @@ "table_name": "flags_audit_log", "uniques": [] }, + "flags.flagwebhooksigningsecretmodel": { + "dangling": false, + "foreign_keys": { + "created_by": { + "kind": "HybridCloudForeignKey", + "model": "sentry.user", + "nullable": true + }, + "organization": { + "kind": "FlexibleForeignKey", + "model": "sentry.organization", + "nullable": false + } + }, + "model": "flags.flagwebhooksigningsecretmodel", + "relocation_dependencies": [], + "relocation_scope": "Excluded", + "silos": [ + "Region" + ], + "table_name": "flags_webhooksigningsecret", + "uniques": [ + [ + "organization", + "provider", + "secret" + ] + ] + }, "hybridcloud.apikeyreplica": { "dangling": false, "foreign_keys": { @@ -90,6 +119,11 @@ "model": "sentry.organization", "nullable": true }, + "scoping_organization_id": { + "kind": "HybridCloudForeignKey", + "model": "sentry.organization", + "nullable": true + }, "user_id": { "kind": "HybridCloudForeignKey", "model": "sentry.user", @@ -452,34 +486,6 @@ "table_name": "sentry_alertruleactivity", "uniques": [] }, - "sentry.alertruleexcludedprojects": { - "dangling": false, - "foreign_keys": { - "alert_rule": { - "kind": "FlexibleForeignKey", - "model": "sentry.alertrule", - "nullable": false - }, - "project": { - "kind": "FlexibleForeignKey", - "model": "sentry.project", - "nullable": false - } - }, - "model": "sentry.alertruleexcludedprojects", - "relocation_dependencies": [], - "relocation_scope": "Organization", - "silos": [ - "Region" - ], - "table_name": "sentry_alertruleexcludedprojects", - "uniques": [ - [ - "alert_rule", - "project" - ] - ] - }, "sentry.alertruleprojects": { "dangling": false, "foreign_keys": { @@ -559,34 +565,6 @@ "table_name": "sentry_alertruletriggeraction", "uniques": [] }, - "sentry.alertruletriggerexclusion": { - "dangling": false, - "foreign_keys": { - "alert_rule_trigger": { - "kind": "FlexibleForeignKey", - "model": "sentry.alertruletrigger", - "nullable": false - }, - "query_subscription": { - "kind": "FlexibleForeignKey", - "model": "sentry.querysubscription", - "nullable": false - } - }, - "model": "sentry.alertruletriggerexclusion", - "relocation_dependencies": [], - "relocation_scope": "Organization", - "silos": [ - "Region" - ], - "table_name": "sentry_alertruletriggerexclusion", - "uniques": [ - [ - "alert_rule_trigger", - "query_subscription" - ] - ] - }, "sentry.apiapplication": { "dangling": false, "foreign_keys": { @@ -639,6 +617,11 @@ ], "table_name": "sentry_apiauthorization", "uniques": [ + [ + "application", + "organization_id", + "user" + ], [ "application", "user" @@ -1407,6 +1390,34 @@ ] ] }, + "sentry.dashboardfavoriteuser": { + "dangling": false, + "foreign_keys": { + "dashboard": { + "kind": "FlexibleForeignKey", + "model": "sentry.dashboard", + "nullable": false + }, + "user_id": { + "kind": "HybridCloudForeignKey", + "model": "sentry.user", + "nullable": false + } + }, + "model": "sentry.dashboardfavoriteuser", + "relocation_dependencies": [], + "relocation_scope": "Organization", + "silos": [ + "Region" + ], + "table_name": "sentry_dashboardfavoriteuser", + "uniques": [ + [ + "dashboard", + "user_id" + ] + ] + }, "sentry.dashboardpermissions": { "dangling": false, "foreign_keys": { @@ -2971,7 +2982,7 @@ }, "model": "sentry.incident", "relocation_dependencies": [], - "relocation_scope": "Organization", + "relocation_scope": "Global", "silos": [ "Region" ], @@ -2999,7 +3010,7 @@ }, "model": "sentry.incidentactivity", "relocation_dependencies": [], - "relocation_scope": "Organization", + "relocation_scope": "Global", "silos": [ "Region" ], @@ -3034,34 +3045,6 @@ ] ] }, - "sentry.incidentseen": { - "dangling": false, - "foreign_keys": { - "incident": { - "kind": "FlexibleForeignKey", - "model": "sentry.incident", - "nullable": false - }, - "user_id": { - "kind": "HybridCloudForeignKey", - "model": "sentry.user", - "nullable": false - } - }, - "model": "sentry.incidentseen", - "relocation_dependencies": [], - "relocation_scope": "Excluded", - "silos": [ - "Region" - ], - "table_name": "sentry_incidentseen", - "uniques": [ - [ - "incident", - "user_id" - ] - ] - }, "sentry.incidentsnapshot": { "dangling": false, "foreign_keys": { @@ -3078,7 +3061,7 @@ }, "model": "sentry.incidentsnapshot", "relocation_dependencies": [], - "relocation_scope": "Organization", + "relocation_scope": "Global", "silos": [ "Region" ], @@ -3089,34 +3072,6 @@ ] ] }, - "sentry.incidentsubscription": { - "dangling": false, - "foreign_keys": { - "incident": { - "kind": "FlexibleForeignKey", - "model": "sentry.incident", - "nullable": false - }, - "user_id": { - "kind": "HybridCloudForeignKey", - "model": "sentry.user", - "nullable": false - } - }, - "model": "sentry.incidentsubscription", - "relocation_dependencies": [], - "relocation_scope": "Organization", - "silos": [ - "Region" - ], - "table_name": "sentry_incidentsubscription", - "uniques": [ - [ - "incident", - "user_id" - ] - ] - }, "sentry.incidenttrigger": { "dangling": false, "foreign_keys": { @@ -3133,7 +3088,7 @@ }, "model": "sentry.incidenttrigger", "relocation_dependencies": [], - "relocation_scope": "Organization", + "relocation_scope": "Global", "silos": [ "Region" ], @@ -4062,7 +4017,7 @@ }, "model": "sentry.pendingincidentsnapshot", "relocation_dependencies": [], - "relocation_scope": "Organization", + "relocation_scope": "Global", "silos": [ "Region" ], @@ -6025,7 +5980,7 @@ "relocation_dependencies": [ "sentry.incident" ], - "relocation_scope": "Organization", + "relocation_scope": "Global", "silos": [ "Region" ], @@ -6471,12 +6426,17 @@ ] }, "workflow_engine.detector": { - "dangling": false, + "dangling": true, "foreign_keys": { + "created_by_id": { + "kind": "HybridCloudForeignKey", + "model": "sentry.user", + "nullable": true + }, "organization": { "kind": "FlexibleForeignKey", "model": "sentry.organization", - "nullable": false + "nullable": true }, "owner_team": { "kind": "FlexibleForeignKey", @@ -6488,6 +6448,11 @@ "model": "sentry.user", "nullable": true }, + "project": { + "kind": "FlexibleForeignKey", + "model": "sentry.project", + "nullable": true + }, "workflow_condition_group": { "kind": "FlexibleForeignKey", "model": "workflow_engine.dataconditiongroup", @@ -6557,11 +6522,31 @@ "workflow_engine.workflow": { "dangling": false, "foreign_keys": { + "created_by_id": { + "kind": "HybridCloudForeignKey", + "model": "sentry.user", + "nullable": true + }, + "environment": { + "kind": "FlexibleForeignKey", + "model": "sentry.environment", + "nullable": true + }, "organization": { "kind": "FlexibleForeignKey", "model": "sentry.organization", "nullable": false }, + "owner_team": { + "kind": "FlexibleForeignKey", + "model": "sentry.team", + "nullable": true + }, + "owner_user_id": { + "kind": "HybridCloudForeignKey", + "model": "sentry.user", + "nullable": true + }, "when_condition_group": { "kind": "FlexibleForeignKey", "model": "workflow_engine.dataconditiongroup", diff --git a/fixtures/backup/model_dependencies/flat.json b/fixtures/backup/model_dependencies/flat.json index 642f0dac079596..c8a043ef9367c2 100644 --- a/fixtures/backup/model_dependencies/flat.json +++ b/fixtures/backup/model_dependencies/flat.json @@ -7,6 +7,10 @@ "flags.flagauditlogmodel": [ "sentry.organization" ], + "flags.flagwebhooksigningsecretmodel": [ + "sentry.organization", + "sentry.user" + ], "hybridcloud.apikeyreplica": [ "sentry.apikey", "sentry.organization" @@ -64,10 +68,6 @@ "sentry.alertrule", "sentry.user" ], - "sentry.alertruleexcludedprojects": [ - "sentry.alertrule", - "sentry.project" - ], "sentry.alertruleprojects": [ "sentry.alertrule", "sentry.project" @@ -80,10 +80,6 @@ "sentry.integration", "sentry.sentryapp" ], - "sentry.alertruletriggerexclusion": [ - "sentry.alertruletrigger", - "sentry.querysubscription" - ], "sentry.apiapplication": [ "sentry.user" ], @@ -193,6 +189,10 @@ "sentry.organization", "sentry.user" ], + "sentry.dashboardfavoriteuser": [ + "sentry.dashboard", + "sentry.user" + ], "sentry.dashboardpermissions": [ "sentry.dashboard" ], @@ -424,18 +424,10 @@ "sentry.incident", "sentry.project" ], - "sentry.incidentseen": [ - "sentry.incident", - "sentry.user" - ], "sentry.incidentsnapshot": [ "sentry.incident", "sentry.timeseriessnapshot" ], - "sentry.incidentsubscription": [ - "sentry.incident", - "sentry.user" - ], "sentry.incidenttrigger": [ "sentry.alertruletrigger", "sentry.incident" @@ -895,6 +887,7 @@ ], "workflow_engine.detector": [ "sentry.organization", + "sentry.project", "sentry.team", "sentry.user", "workflow_engine.dataconditiongroup" @@ -907,7 +900,10 @@ "workflow_engine.workflow" ], "workflow_engine.workflow": [ + "sentry.environment", "sentry.organization", + "sentry.team", + "sentry.user", "workflow_engine.dataconditiongroup" ], "workflow_engine.workflowdataconditiongroup": [ diff --git a/fixtures/backup/model_dependencies/sorted.json b/fixtures/backup/model_dependencies/sorted.json index 853b78707b5191..0d3184b41b5679 100644 --- a/fixtures/backup/model_dependencies/sorted.json +++ b/fixtures/backup/model_dependencies/sorted.json @@ -53,12 +53,6 @@ "workflow_engine.dataconditiongroup", "workflow_engine.dataconditiongroupaction", "workflow_engine.datasource", - "workflow_engine.detector", - "workflow_engine.detectorstate", - "workflow_engine.workflow", - "workflow_engine.workflowdataconditiongroup", - "workflow_engine.detectorworkflow", - "workflow_engine.datasourcedetector", "workflow_engine.datacondition", "sentry.savedsearch", "sentry.rollbackuser", @@ -116,8 +110,12 @@ "hybridcloud.organizationslugreservationreplica", "hybridcloud.externalactorreplica", "hybridcloud.apikeyreplica", + "flags.flagwebhooksigningsecretmodel", "flags.flagauditlogmodel", "feedback.feedback", + "workflow_engine.workflow", + "workflow_engine.detector", + "workflow_engine.datasourcedetector", "uptime.projectuptimesubscription", "sentry.userreport", "sentry.useroption", @@ -191,6 +189,7 @@ "sentry.dashboardwidget", "sentry.dashboardproject", "sentry.dashboardpermissions", + "sentry.dashboardfavoriteuser", "sentry.customdynamicsamplingruleproject", "sentry.commitfilechange", "sentry.broadcastseen", @@ -204,6 +203,9 @@ "sentry.apiauthorization", "sentry.alertrule", "hybridcloud.apitokenreplica", + "workflow_engine.workflowdataconditiongroup", + "workflow_engine.detectorworkflow", + "workflow_engine.detectorstate", "sentry.teamkeytransaction", "sentry.snubaqueryeventtype", "sentry.sentryappinstallation", @@ -224,7 +226,6 @@ "sentry.dashboardpermissionsteam", "sentry.alertruletrigger", "sentry.alertruleprojects", - "sentry.alertruleexcludedprojects", "sentry.alertruleactivity", "sentry.alertruleactivations", "sentry.alertruleactivationcondition", @@ -233,16 +234,13 @@ "sentry.sentryappinstallationforprovider", "sentry.incident", "sentry.dashboardwidgetqueryondemand", - "sentry.alertruletriggerexclusion", "sentry.alertruletriggeraction", "sentry.timeseriessnapshot", "sentry.servicehookproject", "sentry.pendingincidentsnapshot", "sentry.notificationmessage", "sentry.incidenttrigger", - "sentry.incidentsubscription", "sentry.incidentsnapshot", - "sentry.incidentseen", "sentry.incidentproject", "sentry.incidentactivity" ] \ No newline at end of file diff --git a/fixtures/backup/model_dependencies/truncate.json b/fixtures/backup/model_dependencies/truncate.json index 06a3a97f4b8f4f..5dd22a1e6a9e44 100644 --- a/fixtures/backup/model_dependencies/truncate.json +++ b/fixtures/backup/model_dependencies/truncate.json @@ -53,12 +53,6 @@ "workflow_engine_dataconditiongroup", "workflow_engine_dataconditiongroupaction", "workflow_engine_datasource", - "workflow_engine_detector", - "workflow_engine_detectorstate", - "workflow_engine_workflow", - "workflow_engine_workflowdataconditiongroup", - "workflow_engine_detectorworkflow", - "workflow_engine_datasourcedetector", "workflow_engine_datacondition", "sentry_savedsearch", "sentry_rollbackuser", @@ -116,8 +110,12 @@ "hybridcloud_organizationslugreservationreplica", "hybridcloud_externalactorreplica", "hybridcloud_apikeyreplica", + "flags_webhooksigningsecret", "flags_audit_log", "feedback_feedback", + "workflow_engine_workflow", + "workflow_engine_detector", + "workflow_engine_datasourcedetector", "uptime_projectuptimesubscription", "sentry_userreport", "sentry_useroption", @@ -191,6 +189,7 @@ "sentry_dashboardwidget", "sentry_dashboardproject", "sentry_dashboardpermissions", + "sentry_dashboardfavoriteuser", "sentry_customdynamicsamplingruleproject", "sentry_commitfilechange", "sentry_broadcastseen", @@ -204,6 +203,9 @@ "sentry_apiauthorization", "sentry_alertrule", "hybridcloud_apitokenreplica", + "workflow_engine_workflowdataconditiongroup", + "workflow_engine_detectorworkflow", + "workflow_engine_detectorstate", "sentry_performanceteamkeytransaction", "sentry_snubaqueryeventtype", "sentry_sentryappinstallation", @@ -224,7 +226,6 @@ "sentry_dashboardpermissionsteam", "sentry_alertruletrigger", "sentry_alertruleprojects", - "sentry_alertruleexcludedprojects", "sentry_alertruleactivity", "sentry_alertruleactivations", "sentry_alertruleactivationcondition", @@ -233,16 +234,13 @@ "sentry_sentryappinstallationforprovider", "sentry_incident", "sentry_dashboardwidgetqueryondemand", - "sentry_alertruletriggerexclusion", "sentry_alertruletriggeraction", "sentry_timeseriessnapshot", "sentry_servicehookproject", "sentry_pendingincidentsnapshot", "sentry_notificationmessage", "sentry_incidenttrigger", - "sentry_incidentsubscription", "sentry_incidentsnapshot", - "sentry_incidentseen", "sentry_incidentproject", "sentry_incidentactivity" ] \ No newline at end of file diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..27b7cdf8c1b768 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0001_initial.py @@ -0,0 +1,27 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("field", models.IntegerField(null=True)), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0002_delete_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0002_delete_pending.py new file mode 100644 index 00000000000000..c4933d32cfcede --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0002_delete_pending.py @@ -0,0 +1,18 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_field_double_pending_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0003_double_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0003_double_pending.py new file mode 100644 index 00000000000000..ce220395342ff9 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/0003_double_pending.py @@ -0,0 +1,18 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_field_double_pending_app", "0002_delete_pending"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_double_pending_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..4446837fa33671 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py @@ -0,0 +1,45 @@ +import django +from django.db import migrations, models + +import sentry +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="FkTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "fk_table", + sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="bad_flow_delete_field_pending_with_fk_constraint_app.fktable", + db_index=False, + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..f036396f10a590 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,19 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + atomic = False + + dependencies = [ + ("bad_flow_delete_field_pending_with_fk_constraint_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="fk_table", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/models.py new file mode 100644 index 00000000000000..d936d5039213f7 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_fk_constraint_app/models.py @@ -0,0 +1,12 @@ +from django.db import models + +from sentry.db.models import FlexibleForeignKey + + +class FkTable(models.Model): + field = models.IntegerField(default=0, null=False) + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) + fk_table = FlexibleForeignKey(FkTable, db_index=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..fd1e8c68b7800c --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py @@ -0,0 +1,25 @@ +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("field", models.IntegerField()), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..6502d91d97cd39 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("bad_flow_delete_field_pending_with_not_null_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_pending_with_not_null_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..482ebcd0ab28f4 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0001_initial.py @@ -0,0 +1,27 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("field", models.IntegerField()), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..b4cad5043fc589 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,18 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_field_without_pending_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_field_without_pending_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..69de29635772a4 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0001_initial.py @@ -0,0 +1,26 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0002_delete_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0002_delete_pending.py new file mode 100644 index 00000000000000..917b83001e8a8d --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0002_delete_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_model_double_pending_app", "0001_initial"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0003_double_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0003_double_pending.py new file mode 100644 index 00000000000000..e52a64272a2020 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/0003_double_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_model_double_pending_app", "0002_delete_pending"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_double_pending_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..69de29635772a4 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0001_initial.py @@ -0,0 +1,26 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..10d83c4d9c3e39 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + + dependencies = [ + ("bad_flow_delete_model_without_pending_app", "0001_initial"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_model_without_pending_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..2747f841d8a570 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py @@ -0,0 +1,45 @@ +import django +from django.db import migrations, models + +import sentry +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="FkTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "fk_table", + sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="bad_flow_delete_pending_with_fk_constraints_app.fktable", + db_index=False, + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..721c22b2798a0e --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,18 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + atomic = False + + dependencies = [ + ("bad_flow_delete_pending_with_fk_constraints_app", "0001_initial"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/models.py b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/models.py new file mode 100644 index 00000000000000..d936d5039213f7 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_delete_pending_with_fk_constraints_app/models.py @@ -0,0 +1,12 @@ +from django.db import models + +from sentry.db.models import FlexibleForeignKey + + +class FkTable(models.Model): + field = models.IntegerField(default=0, null=False) + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) + fk_table = FlexibleForeignKey(FkTable, db_index=False) diff --git a/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/__init__.py b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..8d119afd028622 --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/migrations/0001_initial.py @@ -0,0 +1,16 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + allow_run_sql = False + + operations = [migrations.RunSQL("select 1;")] diff --git a/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/migrations/__init__.py b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/models.py b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/bad_flow_run_sql_disabled_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..8c7786cb85d438 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/migrations/0001_initial.py @@ -0,0 +1,26 @@ +# Generated by Django 3.1 on 2019-09-22 21:41 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/migrations/0002_add_field_notnull_db_default.py b/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/migrations/0002_add_field_notnull_db_default.py new file mode 100644 index 00000000000000..8ddfade9aa173b --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/migrations/0002_add_field_notnull_db_default.py @@ -0,0 +1,20 @@ +# Generated by Django 3.1 on 2019-09-22 21:43 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + dependencies = [ + ("good_flow_add_column_with_notnull_db_default_app", "0001_initial"), + ] + + operations = [ + migrations.AddField( + model_name="testtable", + name="field", + field=models.IntegerField(db_default=0), + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/models.py b/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/models.py new file mode 100644 index 00000000000000..dcead4b757d81f --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_add_column_with_notnull_db_default_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(db_default=0) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..85f25fffd727f5 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0001_initial.py @@ -0,0 +1,47 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 +import django +from django.db import migrations, models + +import sentry +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="FkTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "fk_table", + sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="good_flow_delete_field_pending_with_fk_constraint_app.fktable", + db_index=False, + null=True, + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_remove_constraints_and_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_remove_constraints_and_pending.py new file mode 100644 index 00000000000000..767c38e610f98b --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0002_remove_constraints_and_pending.py @@ -0,0 +1,34 @@ +import django +from django.db import migrations + +import sentry +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + atomic = False + + dependencies = [ + ("good_flow_delete_field_pending_with_fk_constraint_app", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="TestTable", + name="fk_table", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="good_flow_delete_field_pending_with_fk_constraint_app.fktable", + db_index=False, + db_constraint=False, + null=True, + ), + ), + SafeRemoveField( + model_name="testtable", + name="fk_table", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0003_delete.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0003_delete.py new file mode 100644 index 00000000000000..adae7ffa190165 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/0003_delete.py @@ -0,0 +1,20 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ( + "good_flow_delete_field_pending_with_fk_constraint_app", + "0002_remove_constraints_and_pending", + ), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="fk_table", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/models.py new file mode 100644 index 00000000000000..e78bdd99a9322d --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_fk_constraint_app/models.py @@ -0,0 +1,12 @@ +from django.db import models + +from sentry.db.models import FlexibleForeignKey + + +class FkTable(models.Model): + field = models.IntegerField(default=0, null=False) + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) + fk_table = FlexibleForeignKey(FkTable, null=True, db_index=False) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..1a0f0a785f5159 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0001_initial.py @@ -0,0 +1,37 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="FkTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("field", models.IntegerField(null=True)), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0002_remove_not_null_and_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0002_remove_not_null_and_pending.py new file mode 100644 index 00000000000000..e413d67d2a23b6 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0002_remove_not_null_and_pending.py @@ -0,0 +1,24 @@ +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_field_pending_with_not_null_app", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="TestTable", + name="field", + field=models.IntegerField(null=True), + ), + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0003_delete.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0003_delete.py new file mode 100644 index 00000000000000..98ed7ffbe84c48 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/0003_delete.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_field_pending_with_not_null_app", "0002_remove_not_null_and_pending"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..0c0b198c43b88f --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0001_initial.py @@ -0,0 +1,55 @@ +from django.db import migrations, models + +from sentry.db.models import FlexibleForeignKey +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + checked = False + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="OtherTable", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False)), + ], + ), + migrations.CreateModel( + name="M2MTable", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False)), + ( + "alert_rule", + FlexibleForeignKey( + on_delete=models.deletion.CASCADE, + to="good_flow_delete_field_pending_with_not_null_m2m_app.othertable", + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False)), + ( + "excluded_projects", + models.ManyToManyField( + through="good_flow_delete_field_pending_with_not_null_m2m_app.M2MTable", + to="good_flow_delete_field_pending_with_not_null_m2m_app.othertable", + ), + ), + ], + ), + migrations.AddField( + model_name="m2mtable", + name="test_table", + field=FlexibleForeignKey( + on_delete=models.deletion.CASCADE, + to="good_flow_delete_field_pending_with_not_null_m2m_app.testtable", + ), + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0002_delete_without_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0002_delete_without_pending.py new file mode 100644 index 00000000000000..7cd3b1a05e1e07 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/0002_delete_without_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_field_pending_with_not_null_m2m_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="excluded_projects", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/models.py new file mode 100644 index 00000000000000..90c1bc2b9e0f5e --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_pending_with_not_null_m2m_app/models.py @@ -0,0 +1,18 @@ +from django.db import models + +from sentry.db.models import FlexibleForeignKey + + +class OtherTable(models.Model): + pass + + +class M2MTable(models.Model): + alert_rule = FlexibleForeignKey(OtherTable) + test_table = FlexibleForeignKey( + "good_flow_delete_field_pending_with_not_null_m2m_app.TestTable" + ) + + +class TestTable(models.Model): + excluded_projects = models.ManyToManyField(OtherTable, through=M2MTable) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..2540b245ec0644 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0001_initial.py @@ -0,0 +1,23 @@ +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + initial = True + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("field", models.IntegerField(null=True)), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0002_set_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0002_set_pending.py new file mode 100644 index 00000000000000..48b502df50c460 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0002_set_pending.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_field_simple_app", "0001_initial"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0003_delete.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0003_delete.py new file mode 100644 index 00000000000000..e57524bf66d90d --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/0003_delete.py @@ -0,0 +1,17 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_field_simple_app", "0002_set_pending"), + ] + + operations = [ + SafeRemoveField( + model_name="testtable", + name="field", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/models.py new file mode 100644 index 00000000000000..f472e7d8e90506 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_field_simple_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=True) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0002_delete_model_state.py b/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0002_delete_model_state.py index aa7a0b3105125d..4d9779f1c977f0 100644 --- a/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0002_delete_model_state.py +++ b/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0002_delete_model_state.py @@ -1,8 +1,7 @@ # Generated by Django 3.1 on 2019-09-22 21:47 - -from django.db import migrations - from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction class Migration(CheckedMigration): @@ -12,11 +11,5 @@ class Migration(CheckedMigration): ] operations = [ - migrations.SeparateDatabaseAndState( - state_operations=[ - migrations.DeleteModel( - name="TestTable", - ), - ] - ) + SafeDeleteModel(name="TestTable", deletion_action=DeletionAction.MOVE_TO_PENDING), ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0003_delete_table.py b/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0003_delete_table.py index f0a85b49040a3e..8a6e05080a31bc 100644 --- a/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0003_delete_table.py +++ b/fixtures/safe_migrations_apps/good_flow_delete_model_state_app/migrations/0003_delete_table.py @@ -1,16 +1,15 @@ # Generated by Django 3.1 on 2019-09-22 21:47 - -from django.db import migrations - from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction class Migration(CheckedMigration): dependencies = [ - ("good_flow_delete_model_state_app", "0001_initial"), + ("good_flow_delete_model_state_app", "0002_delete_model_state"), ] operations = [ - migrations.RunSQL('DROP TABLE "good_flow_delete_model_state_app_testtable";'), + SafeDeleteModel(name="TestTable", deletion_action=DeletionAction.DELETE), ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..6f55cccc2e683d --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0001_initial.py @@ -0,0 +1,46 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 +import django +from django.db import migrations, models + +import sentry +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="FkTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "fk_table", + sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="good_flow_delete_pending_with_fk_constraints_app.fktable", + db_index=False, + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0002_remove_constraints_and_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0002_remove_constraints_and_pending.py new file mode 100644 index 00000000000000..cdb0408e096d99 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0002_remove_constraints_and_pending.py @@ -0,0 +1,32 @@ +import django +from django.db import migrations + +import sentry +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + atomic = False + + dependencies = [ + ("good_flow_delete_pending_with_fk_constraints_app", "0001_initial"), + ] + + operations = [ + migrations.AlterField( + model_name="TestTable", + name="fk_table", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="good_flow_delete_pending_with_fk_constraints_app.fktable", + db_index=False, + db_constraint=False, + ), + ), + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0003_delete.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0003_delete.py new file mode 100644 index 00000000000000..ac2813a8d7f014 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/0003_delete.py @@ -0,0 +1,16 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_pending_with_fk_constraints_app", "0002_remove_constraints_and_pending"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/models.py new file mode 100644 index 00000000000000..d936d5039213f7 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_pending_with_fk_constraints_app/models.py @@ -0,0 +1,12 @@ +from django.db import models + +from sentry.db.models import FlexibleForeignKey + + +class FkTable(models.Model): + field = models.IntegerField(default=0, null=False) + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) + fk_table = FlexibleForeignKey(FkTable, db_index=False) diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..2b6d293ee049e6 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0001_initial.py @@ -0,0 +1,22 @@ +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + initial = True + dependencies = [] + + operations = [ + migrations.CreateModel( + name="TestTable", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ], + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0002_set_pending.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0002_set_pending.py new file mode 100644 index 00000000000000..c7475e451e0bcd --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0002_set_pending.py @@ -0,0 +1,16 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_simple_app", "0001_initial"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0003_delete.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0003_delete.py new file mode 100644 index 00000000000000..796cf774758675 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/0003_delete.py @@ -0,0 +1,16 @@ +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + dependencies = [ + ("good_flow_delete_simple_app", "0002_set_pending"), + ] + + operations = [ + SafeDeleteModel( + name="TestTable", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_delete_simple_app/models.py b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_delete_simple_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/__init__.py b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/migrations/0001_initial.py b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/migrations/0001_initial.py new file mode 100644 index 00000000000000..8939a5ba3393b7 --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/migrations/0001_initial.py @@ -0,0 +1,16 @@ +# Generated by Django 3.1 on 2019-09-22 21:47 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + + initial = True + + dependencies = [] + + allow_run_sql = True + + operations = [migrations.RunSQL("select 1;")] diff --git a/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/migrations/__init__.py b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/models.py b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/models.py new file mode 100644 index 00000000000000..770fa149c355ce --- /dev/null +++ b/fixtures/safe_migrations_apps/good_flow_run_sql_enabled_app/models.py @@ -0,0 +1,5 @@ +from django.db import models + + +class TestTable(models.Model): + field = models.IntegerField(default=0, null=False) diff --git a/fixtures/sdk_crash_detection/crash_event_android.py b/fixtures/sdk_crash_detection/crash_event_android.py index fcac4afddd29f8..bf6634215561a9 100644 --- a/fixtures/sdk_crash_detection/crash_event_android.py +++ b/fixtures/sdk_crash_detection/crash_event_android.py @@ -44,7 +44,9 @@ def get_frames( def get_crash_event( - sdk_frame_module="io.sentry.Hub", system_frame_module="java.lang.reflect.Method", **kwargs + sdk_frame_module="io.sentry.Hub", + system_frame_module="java.lang.reflect.Method", + **kwargs, ) -> dict[str, object]: return get_crash_event_with_frames( get_frames(sdk_frame_module, system_frame_module), @@ -52,6 +54,48 @@ def get_crash_event( ) +def get_apex_frames( + apex_frame_function: str, + apex_frame_package: str, + system_frame_package: str, +) -> Sequence[MutableMapping[str, str]]: + frames = [ + { + "function": "__pthread_start", + "raw_function": "__pthread_start(void*)", + "symbol": "_ZL15__pthread_startPv", + "package": "/apex/com.android.runtime/lib/bionic/libc.so", + }, + { + "function": "__start_thread", + "symbol": "__start_thread", + "package": "/apex/com.android.art/lib64/bionic/libc.so", + }, + { + "function": apex_frame_function, + "symbol": apex_frame_function, + "package": apex_frame_package, + }, + { + "function": "invoke", + "package": system_frame_package, + }, + ] + return frames + + +def get_apex_crash_event( + apex_frame_function="__start_thread", + apex_frame_package="/apex/com.android.art/lib64/bionic/libc.so", + system_frame_package="/apex/com.android.art/lib64/libart.so", + **kwargs, +) -> dict[str, object]: + return get_crash_event_with_frames( + get_apex_frames(apex_frame_function, apex_frame_package, system_frame_package), + **kwargs, + ) + + def get_crash_event_with_frames(frames: Sequence[Mapping[str, str]], **kwargs) -> dict[str, object]: result = { "event_id": "0a52a8331d3b45089ebd74f8118d4fa1", diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index f8dd6093bdd97b..196ba3eb4f87f1 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -6,11 +6,19 @@ To resolve this, rebase against latest master and regenerate your migration. Thi will then be regenerated, and you should be able to merge without conflicts. feedback: 0004_index_together -hybridcloud: 0016_add_control_cacheversion + +hybridcloud: 0017_add_scoping_organization_apitokenreplica + nodestore: 0002_nodestore_no_dictfield + remote_subscriptions: 0003_drop_remote_subscription + replays: 0004_index_together -sentry: 0789_add_unique_constraint_to_rollbackorganization + +sentry: 0802_remove_grouping_auto_update_option + social_auth: 0002_default_auto_field -uptime: 0017_unique_on_timeout -workflow_engine: 0012_data_source_type_change + +uptime: 0018_add_trace_sampling_field_to_uptime + +workflow_engine: 0014_model_additions_for_milestones diff --git a/package.json b/package.json index 6a20d793a6507d..0a27d0d93efa85 100644 --- a/package.json +++ b/package.json @@ -56,13 +56,13 @@ "@sentry-internal/rrweb": "2.26.0", "@sentry-internal/rrweb-player": "2.26.0", "@sentry-internal/rrweb-snapshot": "2.26.0", - "@sentry/core": "8.39.0-dev.0", - "@sentry/node": "8.39.0-dev.0", - "@sentry/react": "8.39.0-dev.0", + "@sentry/core": "8.39.0-beta.0", + "@sentry/node": "8.39.0-beta.0", + "@sentry/react": "8.39.0-beta.0", "@sentry/release-parser": "^1.3.1", "@sentry/status-page-list": "^0.3.0", - "@sentry/types": "8.39.0-dev.0", - "@sentry/utils": "8.39.0-dev.0", + "@sentry/types": "8.39.0-beta.0", + "@sentry/utils": "8.39.0-beta.0", "@sentry/webpack-plugin": "^2.22.4", "@spotlightjs/spotlight": "^2.0.0-alpha.1", "@tanstack/react-query": "^5.56.2", @@ -176,30 +176,43 @@ "devDependencies": { "@biomejs/biome": "^1.9.1", "@codecov/webpack-plugin": "^1.2.0", + "@emotion/eslint-plugin": "^11.12.0", "@pmmmwh/react-refresh-webpack-plugin": "0.5.15", "@sentry/jest-environment": "6.0.0", - "@sentry/profiling-node": "8.39.0-dev.0", + "@sentry/profiling-node": "8.39.0-beta.0", "@styled/typescript-styled-plugin": "^1.0.1", "@testing-library/dom": "10.1.0", "@testing-library/jest-dom": "6.4.5", "@testing-library/react": "16.0.0", "@testing-library/user-event": "14.5.2", - "@types/node": "^20.14.9", + "@types/node": "^22.9.1", + "@typescript-eslint/eslint-plugin": "^8.8.1", + "@typescript-eslint/parser": "^8.8.1", "babel-gettext-extractor": "^4.1.3", "babel-jest": "29.7.0", "benchmark": "^2.1.4", "eslint": "8.57.1", - "eslint-config-sentry-app": "2.9.0", + "eslint-import-resolver-typescript": "^3.6.3", + "eslint-plugin-import": "^2.31.0", + "eslint-plugin-jest": "^28.8.3", + "eslint-plugin-jest-dom": "^5.4.0", + "eslint-plugin-no-lookahead-lookbehind-regexp": "0.1.0", + "eslint-plugin-react": "^7.37.1", + "eslint-plugin-react-hooks": "^4.6.2", + "eslint-plugin-sentry": "^2.10.0", + "eslint-plugin-simple-import-sort": "^12.1.1", + "eslint-plugin-testing-library": "^6.3.0", + "eslint-plugin-typescript-sort-keys": "^3.3.0", "html-webpack-plugin": "^5.6.0", "jest": "29.7.0", "jest-canvas-mock": "^2.5.2", "jest-environment-jsdom": "29.7.0", "jest-fail-on-console": "3.3.0", "jest-junit": "16.0.0", - "postcss-styled-syntax": "0.6.4", + "postcss-styled-syntax": "0.7.0", "react-refresh": "0.14.0", - "stylelint": "16.2.1", - "stylelint-config-recommended": "^14.0.0", + "stylelint": "16.10.0", + "stylelint-config-recommended": "^14.0.1", "terser": "5.31.6", "tsconfig-paths": "^4.2.0", "webpack-dev-server": "5.1.0" @@ -214,7 +227,7 @@ "proxyURL": "http://localhost:8000", "scripts": { "test": "node scripts/test.js --watch", - "test-ci": "node scripts/test.js --ci --coverage --maxWorkers=100% --colors", + "test-ci": "node scripts/test.js --ci --maxWorkers=100% --colors", "test-debug": "node --inspect-brk scripts/test.js --runInBand", "test-precommit": "node scripts/test.js --bail --findRelatedTests -u", "test-staged": "node scripts/test.js --findRelatedTests $(git diff --name-only --cached)", diff --git a/pyproject.toml b/pyproject.toml index bc3ac9fa22f982..4747d1799e7acb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -121,7 +121,6 @@ module = [ "sentry.api.base", "sentry.api.bases.organization_events", "sentry.api.bases.organizationmember", - "sentry.api.bases.project", "sentry.api.bases.team", "sentry.api.endpoints.accept_organization_invite", "sentry.api.endpoints.auth_config", @@ -162,7 +161,6 @@ module = [ "sentry.api.endpoints.user_subscriptions", "sentry.api.event_search", "sentry.api.helpers.group_index.index", - "sentry.api.helpers.group_index.update", "sentry.api.invite_helper", "sentry.api.issue_search", "sentry.api.paginator", @@ -180,7 +178,6 @@ module = [ "sentry.api.serializers.models.rule", "sentry.api.serializers.models.team", "sentry.api.serializers.rest_framework.mentions", - "sentry.api.serializers.rest_framework.notification_action", "sentry.auth.helper", "sentry.auth.provider", "sentry.auth.system", @@ -190,20 +187,16 @@ module = [ "sentry.eventstore.models", "sentry.grouping.strategies.legacy", "sentry.identity.bitbucket.provider", - "sentry.identity.github_enterprise.provider", "sentry.identity.gitlab.provider", "sentry.identity.oauth2", "sentry.identity.pipeline", "sentry.identity.providers.dummy", - "sentry.identity.slack.provider", "sentry.incidents.endpoints.bases", "sentry.incidents.endpoints.organization_alert_rule_details", "sentry.incidents.endpoints.organization_alert_rule_index", - "sentry.incidents.endpoints.organization_incident_comment_details", "sentry.incidents.endpoints.organization_incident_index", "sentry.incidents.subscription_processor", "sentry.incidents.tasks", - "sentry.integrations.api.bases.integration", "sentry.integrations.aws_lambda.integration", "sentry.integrations.bitbucket.client", "sentry.integrations.bitbucket.installed", @@ -269,7 +262,6 @@ module = [ "sentry.net.http", "sentry.net.socket", "sentry.notifications.notifications.activity.base", - "sentry.notifications.notifications.activity.release", "sentry.notifications.notifications.integration_nudge", "sentry.pipeline.base", "sentry.pipeline.views.base", @@ -283,7 +275,6 @@ module = [ "sentry.receivers.releases", "sentry.release_health.metrics_sessions_v2", "sentry.replays.endpoints.project_replay_clicks_index", - "sentry.replays.lib.query", "sentry.rules.actions.integrations.base", "sentry.rules.actions.integrations.create_ticket.form", "sentry.rules.actions.integrations.create_ticket.utils", @@ -319,11 +310,9 @@ module = [ "sentry.tasks.auth", "sentry.tasks.base", "sentry.testutils.cases", - "sentry.testutils.fixtures", "sentry.testutils.helpers.notifications", "sentry.utils.auth", "sentry.utils.committers", - "sentry.utils.services", "sentry.web.forms.accounts", "sentry.web.frontend.auth_login", "sentry.web.frontend.auth_organization_login", @@ -397,6 +386,7 @@ module = [ "sentry.grouping.ingest.*", "sentry.grouping.parameterization", "sentry.hybridcloud.*", + "sentry.identity.github_enterprise.*", "sentry.ingest.slicing", "sentry.integrations.models.integration_feature", "sentry.issues", @@ -500,6 +490,7 @@ module = [ "sentry.utils.redis", "sentry.utils.redis_metrics", "sentry.utils.sentry_apps.*", + "sentry.utils.services", "sentry.utils.sms", "sentry.utils.snowflake", "sentry.utils.urls", diff --git a/requirements-base.txt b/requirements-base.txt index bfe7d332e54808..61d5cbf7b5c977 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -13,7 +13,7 @@ cssselect>=1.0.3 datadog>=0.49 django-crispy-forms>=1.14.0 django-csp>=3.8 -django-pg-zero-downtime-migrations>=0.13 +django-pg-zero-downtime-migrations>=0.16 Django>=5.1.1 djangorestframework>=3.15.1 drf-spectacular>=0.26.3 @@ -60,18 +60,18 @@ rb>=1.9.0 redis-py-cluster>=2.1.0 redis>=3.4.1 requests-oauthlib>=1.2.0 -requests>=2.25.1 +requests>=2.32.3 # [start] jsonschema format validators rfc3339-validator>=0.1.2 rfc3986-validator>=0.1.1 # [end] jsonschema format validators -sentry-arroyo>=2.16.5 -sentry-kafka-schemas>=0.1.118 +sentry-arroyo>=2.18.2 +sentry-kafka-schemas>=0.1.122 sentry-ophio==1.0.0 -sentry-protos>=0.1.34 +sentry-protos>=0.1.37 sentry-redis-tools>=0.1.7 -sentry-relay>=0.9.2 -sentry-sdk[http2]>=2.18.0 +sentry-relay>=0.9.3 +sentry-sdk[http2]>=2.19.2 slack-sdk>=3.27.2 snuba-sdk>=3.0.43 simplejson>=3.17.6 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index 47eaa7f9d4ddf0..4f4e234cd10eb8 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -20,7 +20,7 @@ brotli==1.1.0 build==0.8.0 cachetools==5.3.0 celery==5.3.5 -certifi==2023.7.22 +certifi==2024.7.4 cffi==1.17.1 cfgv==3.3.1 charset-normalizer==3.4.0 @@ -36,16 +36,16 @@ cryptography==43.0.1 cssselect==1.0.3 cssutils==2.9.0 datadog==0.49.1 -devservices==1.0.2 +devservices==1.0.4 distlib==0.3.8 distro==1.8.0 django==5.1.1 django-crispy-forms==1.14.0 django-csp==3.8 -django-pg-zero-downtime-migrations==0.13 +django-pg-zero-downtime-migrations==0.16 django-stubs-ext==5.1.1 djangorestframework==3.15.2 -docker==6.1.3 +docker==7.1.0 drf-spectacular==0.26.3 email-reply-parser==0.5.12 execnet==1.9.0 @@ -71,7 +71,7 @@ googleapis-common-protos==1.63.2 grpc-google-iam-v1==0.13.1 grpc-stubs==1.53.0.5 grpcio==1.67.0 -grpcio-status==1.60.1 +grpcio-status==1.66.1 h11==0.13.0 h2==4.1.0 hiredis==2.3.2 @@ -152,7 +152,7 @@ pytest-django==4.9.0 pytest-fail-slow==0.3.0 pytest-json-report==1.5.0 pytest-metadata==3.1.1 -pytest-rerunfailures==14.0 +pytest-rerunfailures==15.0 pytest-sentry==0.3.0 pytest-xdist==3.0.2 python-dateutil==2.9.0 @@ -170,7 +170,7 @@ redis-py-cluster==2.1.0 referencing==0.30.2 regex==2022.9.13 reportlab==4.0.7 -requests==2.31.0 +requests==2.32.3 requests-file==2.1.0 requests-oauthlib==1.2.0 responses==0.23.1 @@ -180,17 +180,18 @@ rpds-py==0.20.0 rsa==4.8 s3transfer==0.10.0 selenium==4.16.0 -sentry-arroyo==2.16.5 +sentry-arroyo==2.18.2 sentry-cli==2.16.0 -sentry-devenv==1.13.0 +sentry-covdefaults-disable-branch-coverage==1.0.2 +sentry-devenv==1.14.2 sentry-forked-django-stubs==5.1.1.post1 sentry-forked-djangorestframework-stubs==3.15.1.post2 -sentry-kafka-schemas==0.1.118 +sentry-kafka-schemas==0.1.122 sentry-ophio==1.0.0 -sentry-protos==0.1.34 +sentry-protos==0.1.37 sentry-redis-tools==0.1.7 -sentry-relay==0.9.2 -sentry-sdk==2.18.0 +sentry-relay==0.9.3 +sentry-sdk==2.19.2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 @@ -226,7 +227,7 @@ types-python-dateutil==2.8.19 types-pytz==2022.1.2 types-pyyaml==6.0.11 types-redis==3.5.18 -types-requests==2.31.0.20240406 +types-requests==2.32.0.20241016 types-setuptools==69.0.0.0 types-simplejson==3.17.7.2 typing-extensions==4.12.0 @@ -238,8 +239,7 @@ urllib3==2.2.2 vine==5.1.0 virtualenv==20.25.0 wcwidth==0.2.10 -websocket-client==1.3.2 -werkzeug==3.0.3 +werkzeug==3.0.6 wheel==0.38.4 wrapt==1.17.0rc1 wsproto==1.1.0 diff --git a/requirements-dev.txt b/requirements-dev.txt index 4a393a332f9598..999e42e802ae62 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,10 +1,11 @@ --index-url https://pypi.devinfra.sentry.io/simple -sentry-devenv>=1.13.0 -devservices>=1.0.2 +sentry-devenv>=1.14.2 +devservices>=1.0.4 covdefaults>=2.3.0 -docker>=6 +sentry-covdefaults-disable-branch-coverage>=1.0.2 +docker>=7 time-machine>=2.16.0 honcho>=2 openapi-core>=0.18.2 @@ -14,7 +15,7 @@ pytest-cov>=4.0.0 pytest-django>=4.9.0 pytest-fail-slow>=0.3.0 pytest-json-report>=1.5.0 -pytest-rerunfailures>=14 +pytest-rerunfailures>=15 pytest-sentry>=0.3.0 pytest-xdist>=3 responses>=0.23.1 @@ -55,6 +56,6 @@ types-pytz types-pyyaml # make sure to match close-enough to redis== types-redis<4 -types-requests>=2.31.0.20240406 +types-requests>=2.32.0.20241016 types-setuptools>=68 types-simplejson>=3.17.7.2 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 67eab6c19f0449..82269811b9df59 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -18,7 +18,7 @@ botocore==1.34.128 brotli==1.1.0 cachetools==5.3.0 celery==5.3.5 -certifi==2023.7.22 +certifi==2024.7.4 cffi==1.17.1 charset-normalizer==3.4.0 click==8.1.7 @@ -35,7 +35,7 @@ distro==1.8.0 django==5.1.1 django-crispy-forms==1.14.0 django-csp==3.8 -django-pg-zero-downtime-migrations==0.13 +django-pg-zero-downtime-migrations==0.16 djangorestframework==3.15.2 drf-spectacular==0.26.3 email-reply-parser==0.5.12 @@ -58,7 +58,7 @@ googleapis-common-protos==1.63.2 grpc-google-iam-v1==0.13.1 grpc-stubs==1.53.0.5 grpcio==1.67.0 -grpcio-status==1.60.1 +grpcio-status==1.66.1 h11==0.14.0 h2==4.1.0 hiredis==2.3.2 @@ -116,7 +116,7 @@ redis-py-cluster==2.1.0 referencing==0.30.2 regex==2022.9.13 reportlab==4.0.7 -requests==2.31.0 +requests==2.32.3 requests-file==2.1.0 requests-oauthlib==1.2.0 rfc3339-validator==0.1.2 @@ -124,13 +124,13 @@ rfc3986-validator==0.1.1 rpds-py==0.20.0 rsa==4.8 s3transfer==0.10.0 -sentry-arroyo==2.16.5 -sentry-kafka-schemas==0.1.118 +sentry-arroyo==2.18.2 +sentry-kafka-schemas==0.1.122 sentry-ophio==1.0.0 -sentry-protos==0.1.34 +sentry-protos==0.1.37 sentry-redis-tools==0.1.7 -sentry-relay==0.9.2 -sentry-sdk==2.18.0 +sentry-relay==0.9.3 +sentry-sdk==2.19.2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 diff --git a/scripts/lib.sh b/scripts/lib.sh index e180294c46c91c..b8d054bea45d6c 100755 --- a/scripts/lib.sh +++ b/scripts/lib.sh @@ -7,6 +7,12 @@ # shellcheck disable=SC2034 # Unused variables # shellcheck disable=SC2001 # https://github.com/koalaman/shellcheck/wiki/SC2001 +POSTGRES_CONTAINER="sentry_postgres" +USE_NEW_DEVSERVICES=${USE_NEW_DEVSERVICES:-"0"} +if [ "$USE_NEW_DEVSERVICES" == "1" ]; then + POSTGRES_CONTAINER="sentry-postgres-1" +fi + # This block is a safe-guard since in CI calling tput will fail and abort scripts if [ -z "${CI+x}" ]; then bold="$(tput bold)" @@ -87,7 +93,7 @@ run-dependent-services() { } create-db() { - container_name=${POSTGRES_CONTAINER:-sentry_postgres} + container_name=${POSTGRES_CONTAINER} echo "--> Creating 'sentry' database" docker exec "${container_name}" createdb -h 127.0.0.1 -U postgres -E utf-8 sentry || true echo "--> Creating 'control', 'region' and 'secondary' database" @@ -132,7 +138,7 @@ clean() { } drop-db() { - container_name=${POSTGRES_CONTAINER:-sentry_postgres} + container_name=${POSTGRES_CONTAINER} echo "--> Dropping existing 'sentry' database" docker exec "${container_name}" dropdb --if-exists -h 127.0.0.1 -U postgres sentry echo "--> Dropping 'control' and 'region' database" diff --git a/scripts/upgrade-postgres.sh b/scripts/upgrade-postgres.sh index 2378e54cc709b4..a793e0214d80dc 100755 --- a/scripts/upgrade-postgres.sh +++ b/scripts/upgrade-postgres.sh @@ -1,5 +1,11 @@ #!/bin/bash +POSTGRES_CONTAINER="sentry_postgres" +USE_NEW_DEVSERVICES=${USE_NEW_DEVSERVICES:-"0"} +if [ "$USE_NEW_DEVSERVICES" == "1" ]; then + POSTGRES_CONTAINER="sentry-postgres-1" +fi + OLD_VERSION="9.6" NEW_VERSION="14" PG_IMAGE="ghcr.io/getsentry/image-mirror-library-postgres:${NEW_VERSION}-alpine" @@ -10,7 +16,7 @@ TMP_VOLUME_NAME="${VOLUME_NAME}_${NEW_VERSION}" TMP_CONTAINER="${PROJECT}_pg_migration" echo "Stop the container" -docker stop sentry_postgres +docker stop "${POSTGRES_CONTAINER}" echo "Check existence of a volume" if [[ -z "$(docker volume ls -q --filter name="^${VOLUME_NAME}$")" ]] diff --git a/scripts/use-colima.sh b/scripts/use-colima.sh deleted file mode 100755 index 375f9defd3d63b..00000000000000 --- a/scripts/use-colima.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -if ! [[ -x ~/.local/share/sentry-devenv/bin/colima ]]; then - echo "You need to install devenv! https://github.com/getsentry/devenv/#install" - exit 1 -fi - -if [[ "$(sysctl -n machdep.cpu.brand_string)" != Intel* ]]; then - case "$(sw_vers -productVersion)" in - *12.*|*13.*) - echo "Your ARM Mac is on a version incompatible with colima." - echo "Use Docker Desktop for now until you upgrade to at least MacOS 14." - exit 1 - ;; - esac -fi - -echo "Copying your postgres volume for use with colima. Will take a few minutes." -tmpdir=$(mktemp -d) -docker context use desktop-linux -docker run --rm -v sentry_postgres:/from -v "${tmpdir}:/to" alpine ash -c "cd /from ; cp -a . /to" || { echo "You need to start Docker Desktop."; exit 1; } - -echo "Stopping Docker.app. If a 'process terminated unexpectedly' dialog appears, dismiss it." -osascript - <<'EOF' || exit -quit application "Docker" -EOF - -# We aren't uninstalling for now - this makes rolling back to docker desktop faster. -# Also, less breakage as people may be using things like docker-credential-desktop. -# echo "Uninstalling docker cask (which includes Docker Desktop)." -# brew uninstall --cask docker - -# We do want to get people on just the docker cli though, to enable uninstalling the cask. -echo "Installing docker (cli only)." -brew install docker -# Unlinks docker (cask). -brew unlink docker -brew link --overwrite docker - -# This removes credsStore, saving it under oldCredsStore so it can be restored later. -# The right value under colima for this is "colima", but I think vast majority of people -# are authing their docker through gcloud, not docker cli. -python3 <<'EOF' -import os -import json -with open(os.path.expanduser("~/.docker/config.json"), "rb") as f: - config = json.loads(f.read()) - credsStore = config.get("credsStore") - if credsStore is None: - exit(0) - config["oldCredsStore"] = credsStore - del config["credsStore"] -with open(os.path.expanduser("~/.docker/config.json"), "w") as f: - f.write(json.dumps(config)) -EOF - -echo "Starting colima." -devenv colima start - -echo "Recreating your postgres volume for use with colima. May take a few minutes." -docker volume create --name sentry_postgres -docker run --rm -v "${tmpdir}:/from" -v sentry_postgres:/to alpine ash -c "cd /from ; cp -a . /to" -rm -rf "$tmpdir" - -echo "-----------------------------------------------" -echo "All done. Start devservices at your discretion." diff --git a/scripts/use-docker-desktop.sh b/scripts/use-docker-desktop.sh deleted file mode 100755 index f926cf27e9df66..00000000000000 --- a/scripts/use-docker-desktop.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -set -e - -~/.local/share/sentry-devenv/bin/colima stop - -echo "Using docker cli from cask. You may be prompted for your password." -# brew --prefix doesn't seem to apply here - it's just /usr/local -sudo ln -svf /Applications/Docker.app/Contents/Resources/bin/docker "/usr/local/bin/docker" - -# this restores the old credsStore value -python3 <<'EOF' -import os -import json -with open(os.path.expanduser("~/.docker/config.json"), "rb") as f: - config = json.loads(f.read()) - oldCredsStore = config.get("oldCredsStore") - if oldCredsStore is None: - exit(0) - config["credsStore"] = oldCredsStore - del config["oldCredsStore"] -with open(os.path.expanduser("~/.docker/config.json"), "w") as f: - f.write(json.dumps(config)) -EOF - -echo "Starting Docker." -open -a /Applications/Docker.app --args --unattended - -echo "-----------------------------------------------" -echo "All done. Start devservices at your discretion." diff --git a/self-hosted/Dockerfile b/self-hosted/Dockerfile index 940b815ed64ead..d22d64e05bcd5f 100644 --- a/self-hosted/Dockerfile +++ b/self-hosted/Dockerfile @@ -11,6 +11,13 @@ LABEL org.opencontainers.image.authors="oss@sentry.io" # add our user and group first to make sure their IDs get assigned consistently RUN groupadd -r sentry && useradd -r -m -g sentry sentry +RUN : \ + && apt-get update \ + && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + libexpat1 \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + ARG GOSU_VERSION=1.17 ARG GOSU_SHA256=bbc4136d03ab138b1ad66fa4fc051bafc6cc7ffae632b069a53657279a450de3 ARG TINI_VERSION=0.19.0 diff --git a/setup.cfg b/setup.cfg index 35564b11752f7e..aae014f9aa7768 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = sentry -version = 24.11.0.dev0 +version = 24.12.0.dev0 description = A realtime logging and aggregation server. long_description = file: README.md long_description_content_type = text/markdown @@ -112,7 +112,9 @@ extension = [coverage:run] omit = src/sentry/migrations/* -plugins = covdefaults +plugins = + covdefaults + sentry_covdefaults_disable_branch_coverage [coverage:report] # Setting this to 0 makes it falsy, and it gets ignored, so we set it to diff --git a/src/sentry/api/authentication.py b/src/sentry/api/authentication.py index b55aeb8d5b1716..eb0fd888294f29 100644 --- a/src/sentry/api/authentication.py +++ b/src/sentry/api/authentication.py @@ -422,11 +422,11 @@ def authenticate_token(self, request: Request, token_str: str) -> tuple[Any, Any if application_is_inactive: raise AuthenticationFailed("UserApplication inactive or deleted") - if token.organization_id: + if token.scoping_organization_id: # We need to make sure the organization to which the token has access is the same as the one in the URL organization = None organization_context = organization_service.get_organization_by_id( - id=token.organization_id + id=token.organization_id, include_projects=False, include_teams=False ) if organization_context: organization = organization_context.organization @@ -439,29 +439,15 @@ def authenticate_token(self, request: Request, token_str: str) -> tuple[Any, Any organization.slug != target_org_id_or_slug and organization.id != target_org_id_or_slug ): - # TODO (@athena): We want to raise auth excecption here but to be sure - # I soft launch this by only logging the error for now - # raise AuthenticationFailed("Unauthorized organization access") - logger.info( - "Token has access to organization %s but wants to get access to organization %s", - organization.slug, - target_org_id_or_slug, - ) - else: - # TODO (@athena): We want to limit org level token's access to org level endpoints only - # so in the future this will be an auth exception but for now we soft launch by logging an error - logger.info( - "Token has only access to organization %s but is calling an endpoint for multiple organizations: %s", - organization.slug, - request.path_info, + raise AuthenticationFailed("Unauthorized organization access.") + # We want to limit org scoped tokens access to org level endpoints only + # Except some none-org level endpoints that we added special treatments for + elif resolved_url.url_name not in ["sentry-api-0-organizations"]: + raise AuthenticationFailed( + "This token access is limited to organization endpoints." ) else: - # TODO (@athena): If there is an organization token we should be able to fetch organization context - # Otherwise we should raise an exception - # For now adding logging to investigate if this is a valid case we need to address - logger.info( - "Token has access to an unknown organization: %s", token.organization_id - ) + raise AuthenticationFailed("Cannot resolve organization from token.") return self.transform_auth( user, diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py index f71c6b2e4f9465..fddcd7dd555142 100644 --- a/src/sentry/api/base.py +++ b/src/sentry/api/base.py @@ -298,7 +298,7 @@ def permission_denied(self, request, message=None, code=None): super().permission_denied(request, message, code) - def handle_exception( # type: ignore[override] + def handle_exception_with_details( self, request: Request, exc: Exception, @@ -321,7 +321,7 @@ def handle_exception( # type: ignore[override] # Django REST Framework's built-in exception handler. If `settings.EXCEPTION_HANDLER` # exists and returns a response, that's used. Otherwise, `exc` is just re-raised # and caught below. - response = super().handle_exception(exc) + response = self.handle_exception(exc) except Exception as err: import sys import traceback @@ -456,7 +456,7 @@ def dispatch(self, request: Request, *args, **kwargs) -> Response: response = handler(request, *args, **kwargs) except Exception as exc: - response = self.handle_exception(request, exc) + response = self.handle_exception_with_details(request, exc) if origin: self.add_cors_headers(request, response) diff --git a/src/sentry/api/bases/organization_events.py b/src/sentry/api/bases/organization_events.py index facfbf747984e5..ff51f813e5b458 100644 --- a/src/sentry/api/bases/organization_events.py +++ b/src/sentry/api/bases/organization_events.py @@ -342,6 +342,8 @@ def handle_results_with_meta( if not data: return {"data": [], "meta": meta} + if "confidence" in results: + return {"data": data, "meta": meta, "confidence": results["confidence"]} return {"data": data, "meta": meta} def handle_data( @@ -405,7 +407,7 @@ def get_event_stats_data( request: Request, organization: Organization, get_event_stats: Callable[ - [Sequence[str], str, SnubaParams, int, bool, timedelta | None], SnubaTSResult + [list[str], str, SnubaParams, int, bool, timedelta | None], SnubaTSResult ], top_events: int = 0, query_column: str = "count()", @@ -427,7 +429,7 @@ def get_event_stats_data( columns = request.GET.getlist("yAxis", _columns) if query is None: - query = request.GET.get("query") + query = request.GET.get("query", "") if snuba_params is None: try: # events-stats is still used by events v1 which doesn't require global views @@ -567,7 +569,7 @@ def serialize_multiple_axis( event_result: SnubaTSResult, snuba_params: SnubaParams, columns: Sequence[str], - query_columns: Sequence[str], + query_columns: list[str], allow_partial_buckets: bool, zerofill_results: bool = True, dataset: Any | None = None, diff --git a/src/sentry/api/bases/project.py b/src/sentry/api/bases/project.py index d69deaeed98340..693828c767af83 100644 --- a/src/sentry/api/bases/project.py +++ b/src/sentry/api/bases/project.py @@ -1,5 +1,6 @@ from __future__ import annotations +import http from collections.abc import Mapping from typing import Any @@ -8,7 +9,7 @@ from rest_framework.response import Response from sentry.api.base import Endpoint -from sentry.api.exceptions import ProjectMoved, ResourceDoesNotExist +from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.helpers.environments import get_environments from sentry.api.permissions import StaffPermissionMixin from sentry.api.utils import get_date_range_from_params @@ -25,6 +26,13 @@ class ProjectEventsError(Exception): pass +class ProjectMoved(Exception): + def __init__(self, new_url: str, slug: str): + self.new_url = new_url + self.slug = slug + super().__init__(new_url, slug) + + class ProjectPermission(OrganizationPermission): scope_map = { "GET": ["project:read", "project:write", "project:admin"], @@ -40,6 +48,7 @@ def has_object_permission(self, request: Request, view, project): if has_org_scope and request.access.has_project_access(project): return has_org_scope + assert request.method is not None allowed_scopes = set(self.scope_map.get(request.method, [])) return request.access.has_any_project_scope(project, allowed_scopes) @@ -145,8 +154,7 @@ def convert_args( try: # Project may have been renamed # This will only happen if the passed in project_id_or_slug is a slug and not an id - redirect = ProjectRedirect.objects.select_related("project") - redirect = redirect.get( + redirect = ProjectRedirect.objects.select_related("project").get( organization__slug__id_or_slug=organization_id_or_slug, redirect_slug=project_id_or_slug, ) @@ -178,7 +186,7 @@ def convert_args( bind_organization_context(project.organization) - request._request.organization = project.organization + request._request.organization = project.organization # type: ignore[attr-defined] # XXX: we should not be stuffing random attributes into HttpRequest kwargs["project"] = project return (args, kwargs) @@ -199,7 +207,7 @@ def get_filter_params(self, request: Request, project, date_filter_optional=Fals return params - def handle_exception( + def handle_exception_with_details( self, request: Request, exc: Exception, @@ -208,9 +216,9 @@ def handle_exception( ) -> Response: if isinstance(exc, ProjectMoved): response = Response( - {"slug": exc.detail["detail"]["extra"]["slug"], "detail": exc.detail["detail"]}, - status=exc.status_code, + {"slug": exc.slug, "detail": {"extra": {"url": exc.new_url, "slug": exc.slug}}}, + status=http.HTTPStatus.FOUND.value, ) - response["Location"] = exc.detail["detail"]["extra"]["url"] + response["Location"] = exc.new_url return response - return super().handle_exception(request, exc, handler_context, scope) + return super().handle_exception_with_details(request, exc, handler_context, scope) diff --git a/src/sentry/api/endpoints/event_ai_suggested_fix.py b/src/sentry/api/endpoints/event_ai_suggested_fix.py deleted file mode 100644 index 213ec900ac702c..00000000000000 --- a/src/sentry/api/endpoints/event_ai_suggested_fix.py +++ /dev/null @@ -1,398 +0,0 @@ -from __future__ import annotations - -import logging -import random -from typing import Any - -import orjson -from django.conf import settings -from django.dispatch import Signal -from django.http import HttpResponse, StreamingHttpResponse -from openai import OpenAI, RateLimitError - -from sentry import eventstore -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases.project import ProjectEndpoint -from sentry.api.exceptions import ResourceDoesNotExist -from sentry.types.ratelimit import RateLimit, RateLimitCategory -from sentry.utils.cache import cache - -logger = logging.getLogger(__name__) - -from rest_framework.request import Request - -openai_policy_check = Signal() - -# How many stacktrace frames do we want per exception? -MAX_STACKTRACE_FRAMES = 15 - -# How many exceptions do we want? -MAX_EXCEPTIONS = 3 - -# Do we want tags? They don't seem particularly useful -ADD_TAGS = False - -FUN_PROMPT_CHOICES = [ - "[haiku about the error]", - "[hip hop rhyme about the error]", - "[4 line rhyme about the error]", - "[2 stanza rhyme about the error]", -] - -PROMPT = """\ -You are an assistant that analyses software errors, describing the problem with the following rules: - -* Be helpful, playful and a bit snarky and sarcastic -* Do not talk about the rules in explanations -* Use emojis frequently in the snarky greeting and closing prompt -* The frames of a stack trace is shown with most recent call first -* Stack frames are either from app code or third party libraries -* Never show code examples as diff -* When describing the problem: - * Explain the error and message - * Explain where in the code the issue happend - * Explain the nature of the issue -* When proposing a solution: - * Explain what code changes are necessary to resolve it - * Explain where the solution should be - * Mention best practices for preventing this -* Remember Sentry's marketing message: "Sentry can't fix this" - -Write the answers into the following template: - -``` -[snarky greeting] - -#### Problem Description - -[detailed description of the problem] - -#### Proposed Solution - -[proposed solution to fix this issue] - -[fixed code example] - -#### What Else - -[uplifting closing statements] - -___FUN_PROMPT___ -``` -""" - -# Theset tags are removed because they are quite unstable between different events -# of the same issue, and typically unrelated to something that the AI assistant -# can answer. -BLOCKED_TAGS = frozenset( - [ - "user", - "server_name", - "host", - "release", - "handled", - "client_os", - "client_os.name", - "browser", - "browser.name", - "environment", - "runtime", - "device", - "device.family", - "gpu", - "gpu.name", - "gpu.vendor", - "url", - "trace", - "otel", - ] -) - -openai_client: OpenAI | None = None - - -def get_openai_client() -> OpenAI: - global openai_client - - if openai_client: - return openai_client - - # this will raise if OPENAI_API_KEY is not set - openai_client = OpenAI(api_key=settings.OPENAI_API_KEY) - - return openai_client - - -def get_openai_policy(organization, user, pii_certified): - """Uses a signal to determine what the policy for OpenAI should be.""" - results = openai_policy_check.send( - sender=EventAiSuggestedFixEndpoint, - organization=organization, - user=user, - pii_certified=pii_certified, - ) - result = "allowed" - - # Last one wins - for _, new_result in results: - if new_result is not None: - result = new_result - - return result - - -def set_if_value(d, key, value): - if value is not None: - d[key] = value - - -def trim_frames(frames, frame_allowance=MAX_STACKTRACE_FRAMES): - frames_len = 0 - app_frames = [] - system_frames = [] - - for frame in frames: - frames_len += 1 - if frame.get("in_app"): - app_frames.append(frame) - else: - system_frames.append(frame) - - if frames_len <= frame_allowance: - return frames - - remaining = frames_len - frame_allowance - app_count = len(app_frames) - system_allowance = max(frame_allowance - app_count, 0) - if system_allowance: - half_max = int(system_allowance / 2) - # prioritize trimming system frames - for frame in system_frames[half_max:-half_max]: - frame["delete"] = True - remaining -= 1 - else: - for frame in system_frames: - frame["delete"] = True - remaining -= 1 - - if remaining: - app_allowance = app_count - remaining - half_max = int(app_allowance / 2) - - for frame in app_frames[half_max:-half_max]: - frame["delete"] = True - - return [x for x in frames if not x.get("delete")] - - -def describe_event_for_ai(event, model): - detailed = model.startswith("gpt-4") - data = {} - - msg = event.get("logentry") - if msg: - data["message"] = msg - - platform = event.get("platform") - if platform and platform != "other": - data["language"] = platform - - exceptions = data.setdefault("exceptions", []) - for idx, exc in enumerate( - reversed((event.get("exception", {})).get("values", ())[:MAX_EXCEPTIONS]) - ): - exception: dict[str, Any] = {} - if idx > 0: - exception["raised_during_handling_of_previous_exception"] = True - exception["num"] = idx + 1 - exc_type = exc.get("type") - if exc_type: - exception["type"] = exc_type - exception["message"] = exc.get("value") - mechanism = exc.get("mechanism") or {} - exc_meta = mechanism.get("meta") - if exc_meta: - exception["exception_info"] = exc_meta - if mechanism.get("handled") is False: - exception["unhandled"] = True - - frames = exc.get("stacktrace", {}).get("frames") - first_in_app = True - if frames: - stacktrace = [] - for frame in reversed(frames): - if frame is None: - continue - stack_frame: dict[str, Any] = {} - set_if_value(stack_frame, "func", frame.get("function")) - set_if_value(stack_frame, "module", frame.get("module")) - set_if_value(stack_frame, "file", frame.get("filename")) - set_if_value(stack_frame, "line", frame.get("lineno")) - if frame.get("in_app"): - stack_frame["in_app"] = True - crashed_here = False - if first_in_app: - crashed_here = True - stack_frame["crash"] = "here" - first_in_app = False - line = frame.get("context_line") or "" - if (crashed_here and idx == 0) or detailed: - pre_context = frame.get("pre_context") - if pre_context: - stack_frame["code_before"] = pre_context - stack_frame["code"] = line - post_context = frame.get("post_context") - if post_context: - stack_frame["code_after"] = post_context - # {snip} usually appears in minified lines. skip that - elif "{snip}" not in line: - set_if_value(stack_frame, "code", line.strip()) - stacktrace.append(stack_frame) - if stacktrace: - exception["stacktrace"] = trim_frames(stacktrace) - exceptions.append(exception) - - if ADD_TAGS: - tags = data.setdefault("tags", {}) - for tag_key, tag_value in sorted(event["tags"]): - if tag_key not in BLOCKED_TAGS: - tags[tag_key] = tag_value - - return data - - -def suggest_fix(event_data, model=settings.SENTRY_AI_SUGGESTED_FIX_MODEL, stream=False): - """Runs an OpenAI request to suggest a fix.""" - prompt = PROMPT.replace("___FUN_PROMPT___", random.choice(FUN_PROMPT_CHOICES)) - event_info = describe_event_for_ai(event_data, model=model) - - client = get_openai_client() - - response = client.chat.completions.create( - model=model, - temperature=0.7, - messages=[ - {"role": "system", "content": prompt}, - {"role": "user", "content": orjson.dumps(event_info).decode()}, - ], - stream=stream, - ) - if stream: - return reduce_stream(response) - return response.choices[0].message.content - - -def reduce_stream(response): - for chunk in response: - delta = chunk["choices"][0]["delta"] - if "content" in delta: - yield delta["content"] - - -@region_silo_endpoint -class EventAiSuggestedFixEndpoint(ProjectEndpoint): - owner = ApiOwner.ML_AI - publish_status = { - "GET": ApiPublishStatus.PRIVATE, - } - enforce_rate_limit = True - rate_limits = { - "GET": { - RateLimitCategory.IP: RateLimit(limit=5, window=1), - RateLimitCategory.USER: RateLimit(limit=5, window=1), - RateLimitCategory.ORGANIZATION: RateLimit(limit=5, window=1), - }, - } - - def get(self, request: Request, project, event_id) -> HttpResponse | StreamingHttpResponse: - """ - Makes AI make suggestions about an event - ```````````````````````````````````````` - - This endpoint returns a JSON response that provides helpful suggestions about how to - understand or resolve an event. - """ - # To use this feature you need openai to be configured - if not settings.OPENAI_API_KEY: - raise ResourceDoesNotExist - - event = eventstore.backend.get_event_by_id(project.id, event_id) - if event is None: - raise ResourceDoesNotExist - - policy_failure = None - # If the option has specifically been set to False, - if not bool(request.organization.get_option("sentry:ai_suggested_solution", default=False)): - policy_failure = "organization_consent_required" - else: - # Check the OpenAI access policy - policy = get_openai_policy( - request.organization, - request.user, - pii_certified=request.GET.get("pii_certified") == "yes", - ) - stream = request.GET.get("stream") == "yes" - - if policy == "subprocessor": - policy_failure = "subprocessor" - elif policy == "individual_consent": - if request.GET.get("consent") != "yes": - policy_failure = "individual_consent" - elif policy == "pii_certification_required": - policy_failure = "pii_certification_required" - elif policy == "allowed": - pass - else: - logger.warning("Unknown OpenAI policy state") - - if policy_failure is not None: - return HttpResponse( - orjson.dumps({"restriction": policy_failure}), - content_type="application/json", - status=403, - ) - - # Cache the suggestion for a certain amount by primary hash, so even when new events - # come into the group, we are sharing the same response. - cache_key = "ai:" + event.get_primary_hash() - suggestion = cache.get(cache_key) - if suggestion is None: - try: - suggestion = suggest_fix(event.data, stream=stream) - except RateLimitError as err: - return HttpResponse( - orjson.dumps({"error": err.response.json()["error"]}), - content_type="text/plain; charset=utf-8", - status=429, - ) - - if stream: - - def stream_response(): - buffer = [] - for item in suggestion: - buffer.append(item) - yield item.encode("utf-8") - cache.set(cache_key, "".join(buffer), 300) - - resp = StreamingHttpResponse(stream_response(), content_type="text/event-stream") - # make nginx happy - resp["x-accel-buffering"] = "no" - # make webpack devserver happy - resp["cache-control"] = "no-transform" - return resp - - cache.set(cache_key, suggestion, 300) - - if stream: - return HttpResponse( - suggestion, - content_type="text/plain; charset=utf-8", - ) - - return HttpResponse( - orjson.dumps({"suggestion": suggestion}), - content_type="application/json", - ) diff --git a/src/sentry/api/endpoints/group_ai_autofix.py b/src/sentry/api/endpoints/group_ai_autofix.py index 780946cd7000d8..7e5b6855cd1f5d 100644 --- a/src/sentry/api/endpoints/group_ai_autofix.py +++ b/src/sentry/api/endpoints/group_ai_autofix.py @@ -111,11 +111,6 @@ def _call_autofix( else None ), "options": { - "disable_codebase_indexing": features.has( - "organizations:autofix-disable-codebase-indexing", - group.organization, - actor=user, - ), "comment_on_pr_with_url": pr_to_comment_on_url, }, }, @@ -161,9 +156,8 @@ def post(self, request: Request, group: Group) -> Response: created_at = datetime.now().isoformat() if not ( - features.has("projects:ai-autofix", group.project) - or features.has("organizations:autofix", group.organization) - or group.organization.get_option("sentry:gen_ai_consent", False) + features.has("organizations:gen-ai-features", group.organization, actor=request.user) + and group.organization.get_option("sentry:gen_ai_consent_v2024_11_14", False) ): return self._respond_with_error("AI Autofix is not enabled for this project.", 403) diff --git a/src/sentry/api/endpoints/group_ai_summary.py b/src/sentry/api/endpoints/group_ai_summary.py index 729414a37705f2..b540a46d545347 100644 --- a/src/sentry/api/endpoints/group_ai_summary.py +++ b/src/sentry/api/endpoints/group_ai_summary.py @@ -170,7 +170,9 @@ def _get_trace_connected_issues(self, event: GroupEvent) -> list[Group]: return connected_issues def post(self, request: Request, group: Group) -> Response: - if not features.has("organizations:ai-summary", group.organization, actor=request.user): + if not features.has( + "organizations:gen-ai-features", group.organization, actor=request.user + ): return Response({"detail": "Feature flag not enabled"}, status=400) cache_key = "ai-group-summary-v2:" + str(group.id) diff --git a/src/sentry/api/endpoints/group_attachments.py b/src/sentry/api/endpoints/group_attachments.py index 4ae767fe5527cf..bb83c1cd3d0789 100644 --- a/src/sentry/api/endpoints/group_attachments.py +++ b/src/sentry/api/endpoints/group_attachments.py @@ -1,3 +1,7 @@ +from datetime import datetime, timedelta + +from django.utils import timezone +from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response @@ -5,9 +9,64 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import EnvironmentMixin, region_silo_endpoint from sentry.api.bases.group import GroupEndpoint +from sentry.api.exceptions import ResourceDoesNotExist +from sentry.api.helpers.environments import get_environments +from sentry.api.helpers.events import get_query_builder_for_group from sentry.api.paginator import DateTimePaginator from sentry.api.serializers import EventAttachmentSerializer, serialize +from sentry.api.utils import get_date_range_from_params +from sentry.exceptions import InvalidParams, InvalidSearchQuery from sentry.models.eventattachment import EventAttachment, event_attachment_screenshot_filter +from sentry.models.group import Group +from sentry.search.events.types import ParamsType + + +def get_event_ids_from_filters( + request: Request, + group: Group, + start: datetime | None, + end: datetime | None, +) -> list[str] | None: + """ + Returns a list of Event IDs matching the environment/query filters. + If neither are provided it will return `None`, skipping the filter by `EventAttachment.event_id` matches. + If at least one is provided, but nothing is matched, it will return `[]`, which will result in no attachment matches (as expected). + """ + default_end = timezone.now() + default_start = default_end - timedelta(days=90) + try: + environments = get_environments(request, group.project.organization) + except ResourceDoesNotExist: + environments = [] + query = request.GET.get("query", "") + + # Exit early if no query or environment is specified + if not query and not environments: + return None + + params: ParamsType = { + "project_id": [group.project_id], + "organization_id": group.project.organization_id, + "start": start if start else default_start, + "end": end if end else default_end, + } + + if environments: + params["environment"] = [env.name for env in environments] + + try: + snuba_query = get_query_builder_for_group( + query=query, + snuba_params=params, + group=group, + limit=10000, + offset=0, + ) + except InvalidSearchQuery as e: + raise ParseError(detail=str(e)) + referrer = f"api.group-attachments.{group.issue_category.name.lower()}" + results = snuba_query.run_query(referrer=referrer) + return [evt["id"] for evt in results["data"]] @region_silo_endpoint @@ -25,6 +84,11 @@ def get(self, request: Request, group) -> Response: :pparam string issue_id: the ID of the issue to retrieve. :pparam list types: a list of attachment types to filter for. + :qparam string start: Beginning date. You must also provide ``end``. + :qparam string end: End date. You must also provide ``start``. + :qparam string statsPeriod: An optional stat period (defaults to ``"90d"``). + :qparam string query: If set, will filter to only attachments from events matching that query. + :qparam string environment: If set, will filter to only attachments from events within a specific environment. :auth: required """ @@ -36,18 +100,36 @@ def get(self, request: Request, group) -> Response: attachments = EventAttachment.objects.filter(group_id=group.id) types = request.GET.getlist("types") or () - event_ids = request.GET.getlist("event_id") or () + event_ids = request.GET.getlist("event_id") or None screenshot = "screenshot" in request.GET + try: + start, end = get_date_range_from_params(request.GET, optional=True) + except InvalidParams as e: + raise ParseError(detail=str(e)) + + if start: + attachments = attachments.filter(date_added__gte=start) + if end: + attachments = attachments.filter(date_added__lte=end) + + if not event_ids: + event_ids = get_event_ids_from_filters( + request=request, + group=group, + start=start, + end=end, + ) + if screenshot: attachments = event_attachment_screenshot_filter(attachments) if types: attachments = attachments.filter(type__in=types) - if event_ids: + # If event_ids is [], we still want attachments to filter to an empty list. + if event_ids is not None: attachments = attachments.filter(event_id__in=event_ids) return self.paginate( - default_per_page=20, request=request, queryset=attachments, order_by="-date_added", diff --git a/src/sentry/api/endpoints/group_autofix_setup_check.py b/src/sentry/api/endpoints/group_autofix_setup_check.py index 96f1d5c20ee123..8cb0167f8fc238 100644 --- a/src/sentry/api/endpoints/group_autofix_setup_check.py +++ b/src/sentry/api/endpoints/group_autofix_setup_check.py @@ -7,15 +7,10 @@ from django.conf import settings from rest_framework.response import Response -from sentry import features from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.group import GroupEndpoint -from sentry.api.helpers.autofix import ( - AutofixCodebaseIndexingStatus, - get_project_codebase_indexing_status, -) from sentry.autofix.utils import get_autofix_repos_from_project_code_mappings from sentry.constants import ObjectStatus from sentry.integrations.services.integration import integration_service @@ -71,6 +66,11 @@ def get_repos_and_access(project: Project) -> list[dict]: repos_and_access: list[dict] = [] path = "/v1/automation/codebase/repo/check-access" for repo in repos: + # We only support github for now. + provider = repo.get("provider") + if provider != "integrations:github" and provider != "github": + continue + body = orjson.dumps( { "repo": repo, @@ -109,13 +109,7 @@ def get(self, request: Request, group: Group) -> Response: Checks if we are able to run Autofix on the given group. """ org: Organization = request.organization - has_gen_ai_consent = org.get_option("sentry:gen_ai_consent", False) - - is_codebase_indexing_disabled = features.has( - "organizations:autofix-disable-codebase-indexing", - group.organization, - actor=request.user, - ) + has_gen_ai_consent = org.get_option("sentry:gen_ai_consent_v2024_11_14", False) integration_check = None # This check is to skip using the GitHub integration for Autofix in s4s. @@ -125,16 +119,14 @@ def get(self, request: Request, group: Group) -> Response: organization=org, project=group.project ) - repos = get_repos_and_access(group.project) - write_access_ok = len(repos) > 0 and all(repo["ok"] for repo in repos) - - codebase_indexing_ok = is_codebase_indexing_disabled - if not codebase_indexing_ok: - codebase_indexing_status = get_project_codebase_indexing_status(group.project) - codebase_indexing_ok = ( - codebase_indexing_status == AutofixCodebaseIndexingStatus.UP_TO_DATE - or codebase_indexing_status == AutofixCodebaseIndexingStatus.INDEXING - ) + write_integration_check = None + if request.query_params.get("check_write_access", False): + repos = get_repos_and_access(group.project) + write_access_ok = len(repos) > 0 and all(repo["ok"] for repo in repos) + write_integration_check = { + "ok": write_access_ok, + "repos": repos, + } return Response( { @@ -146,12 +138,6 @@ def get(self, request: Request, group: Group) -> Response: "ok": integration_check is None, "reason": integration_check, }, - "githubWriteIntegration": { - "ok": write_access_ok, - "repos": repos, - }, - "codebaseIndexing": { - "ok": codebase_indexing_ok, - }, + "githubWriteIntegration": write_integration_check, } ) diff --git a/src/sentry/api/endpoints/group_integration_details.py b/src/sentry/api/endpoints/group_integration_details.py index 6698a685d65b7f..7663f366a458c0 100644 --- a/src/sentry/api/endpoints/group_integration_details.py +++ b/src/sentry/api/endpoints/group_integration_details.py @@ -13,6 +13,10 @@ from sentry.integrations.api.serializers.models.integration import IntegrationSerializer from sentry.integrations.base import IntegrationFeatures, IntegrationInstallation from sentry.integrations.models.external_issue import ExternalIssue +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, +) from sentry.integrations.services.integration import RpcIntegration, integration_service from sentry.models.activity import Activity from sentry.models.group import Group @@ -162,62 +166,72 @@ def put(self, request: Request, group, integration_id) -> Response: if not integration or not org_integration: return Response(status=404) - if not self._has_issue_feature_on_integration(integration): - return Response( - {"detail": "This feature is not supported for this integration."}, status=400 - ) + with ProjectManagementEvent( + action_type=ProjectManagementActionType.LINK_EXTERNAL_ISSUE, + integration=integration, + ).capture() as lifecycle: + if not self._has_issue_feature_on_integration(integration): + return Response( + {"detail": "This feature is not supported for this integration."}, status=400 + ) - installation = integration.get_installation(organization_id=organization_id) - try: - data = installation.get_issue(external_issue_id, data=request.data) - except IntegrationFormError as exc: - return Response(exc.field_errors, status=400) - except IntegrationError as e: - return Response({"non_field_errors": [str(e)]}, status=400) + installation = integration.get_installation(organization_id=organization_id) - defaults = { - "title": data.get("title"), - "description": data.get("description"), - "metadata": data.get("metadata"), - } + try: + data = installation.get_issue(external_issue_id, data=request.data) + except IntegrationFormError as exc: + lifecycle.record_halt(exc) + return Response(exc.field_errors, status=400) + except IntegrationError as e: + lifecycle.record_failure(e) + return Response({"non_field_errors": [str(e)]}, status=400) - external_issue_key = installation.make_external_key(data) - external_issue, created = ExternalIssue.objects.get_or_create( - organization_id=organization_id, - integration_id=integration.id, - key=external_issue_key, - defaults=defaults, - ) + defaults = { + "title": data.get("title"), + "description": data.get("description"), + "metadata": data.get("metadata"), + } - if created: - integration_issue_linked.send_robust( - integration=integration, - organization=group.project.organization, - user=request.user, - sender=self.__class__, + external_issue_key = installation.make_external_key(data) + external_issue, created = ExternalIssue.objects.get_or_create( + organization_id=organization_id, + integration_id=integration.id, + key=external_issue_key, + defaults=defaults, ) - else: - external_issue.update(**defaults) - installation.store_issue_last_defaults(group.project, request.user, request.data) - try: - installation.after_link_issue(external_issue, data=request.data) - except IntegrationFormError as exc: - return Response(exc.field_errors, status=400) - except IntegrationError as e: - return Response({"non_field_errors": [str(e)]}, status=400) - - try: - with transaction.atomic(router.db_for_write(GroupLink)): - GroupLink.objects.create( - group_id=group.id, - project_id=group.project_id, - linked_type=GroupLink.LinkedType.issue, - linked_id=external_issue.id, - relationship=GroupLink.Relationship.references, + if created: + integration_issue_linked.send_robust( + integration=integration, + organization=group.project.organization, + user=request.user, + sender=self.__class__, ) - except IntegrityError: - return Response({"non_field_errors": ["That issue is already linked"]}, status=400) + else: + external_issue.update(**defaults) + + installation.store_issue_last_defaults(group.project, request.user, request.data) + try: + installation.after_link_issue(external_issue, data=request.data) + except IntegrationFormError as exc: + lifecycle.record_halt(exc) + return Response(exc.field_errors, status=400) + except IntegrationError as e: + lifecycle.record_failure(e) + return Response({"non_field_errors": [str(e)]}, status=400) + + try: + with transaction.atomic(router.db_for_write(GroupLink)): + GroupLink.objects.create( + group_id=group.id, + project_id=group.project_id, + linked_type=GroupLink.LinkedType.issue, + linked_id=external_issue.id, + relationship=GroupLink.Relationship.references, + ) + except IntegrityError as exc: + lifecycle.record_halt(exc) + return Response({"non_field_errors": ["That issue is already linked"]}, status=400) self.create_issue_activity(request, group, installation, external_issue, new=False) diff --git a/src/sentry/api/endpoints/notifications/notification_actions_available.py b/src/sentry/api/endpoints/notifications/notification_actions_available.py index 5c9727ee4af7ea..a3b7d6757f4ebc 100644 --- a/src/sentry/api/endpoints/notifications/notification_actions_available.py +++ b/src/sentry/api/endpoints/notifications/notification_actions_available.py @@ -7,8 +7,8 @@ from sentry.api.bases.organization import OrganizationEndpoint from sentry.constants import ObjectStatus from sentry.integrations.services.integration import integration_service -from sentry.models.notificationaction import NotificationAction from sentry.models.organization import Organization +from sentry.notifications.models.notificationaction import NotificationAction @region_silo_endpoint diff --git a/src/sentry/api/endpoints/notifications/notification_actions_details.py b/src/sentry/api/endpoints/notifications/notification_actions_details.py index 49ded9395c66cd..00173565a95946 100644 --- a/src/sentry/api/endpoints/notifications/notification_actions_details.py +++ b/src/sentry/api/endpoints/notifications/notification_actions_details.py @@ -22,8 +22,8 @@ from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_NO_CONTENT from sentry.apidocs.examples.notification_examples import NotificationActionExamples from sentry.apidocs.parameters import GlobalParams, NotificationParams -from sentry.models.notificationaction import NotificationAction from sentry.models.organization import Organization +from sentry.notifications.models.notificationaction import NotificationAction logger = logging.getLogger(__name__) diff --git a/src/sentry/api/endpoints/notifications/notification_actions_index.py b/src/sentry/api/endpoints/notifications/notification_actions_index.py index cc95b12c610ec0..57f31941465ee0 100644 --- a/src/sentry/api/endpoints/notifications/notification_actions_index.py +++ b/src/sentry/api/endpoints/notifications/notification_actions_index.py @@ -19,8 +19,8 @@ from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN from sentry.apidocs.examples.notification_examples import NotificationActionExamples from sentry.apidocs.parameters import GlobalParams, NotificationParams, OrganizationParams -from sentry.models.notificationaction import NotificationAction from sentry.models.organization import Organization +from sentry.notifications.models.notificationaction import NotificationAction logger = logging.getLogger(__name__) diff --git a/src/sentry/api/endpoints/organization_dashboard_details.py b/src/sentry/api/endpoints/organization_dashboard_details.py index 20241d1612703c..cbce7c14fd0f06 100644 --- a/src/sentry/api/endpoints/organization_dashboard_details.py +++ b/src/sentry/api/endpoints/organization_dashboard_details.py @@ -3,7 +3,6 @@ from django.db.models import F from django.utils import timezone from drf_spectacular.utils import extend_schema -from rest_framework.permissions import BasePermission from rest_framework.request import Request from rest_framework.response import Response @@ -31,32 +30,9 @@ READ_FEATURE = "organizations:dashboards-basic" -class DashboardPermissions(BasePermission): - """ - Django Permissions Class for managing Dashboard Edit - permissions defined in the DashboardPermissions Model - """ - - scope_map = { - "GET": ["org:read", "org:write", "org:admin"], - "POST": ["org:read", "org:write", "org:admin"], - "PUT": ["org:read", "org:write", "org:admin"], - "DELETE": ["org:read", "org:write", "org:admin"], - } - - def has_object_permission(self, request: Request, view, obj): - if isinstance(obj, Dashboard) and features.has( - "organizations:dashboards-edit-access", obj.organization, actor=request.user - ): - # Check if user has permissions to edit dashboard - if hasattr(obj, "permissions"): - return obj.permissions.has_edit_permissions(request.user.id) - return True - - class OrganizationDashboardBase(OrganizationEndpoint): owner = ApiOwner.PERFORMANCE - permission_classes = (OrganizationDashboardsPermission, DashboardPermissions) + permission_classes = (OrganizationDashboardsPermission,) def convert_args( self, request: Request, organization_id_or_slug, dashboard_id, *args, **kwargs @@ -221,3 +197,42 @@ def post(self, request: Request, organization, dashboard) -> Response: dashboard.save(update_fields=["visits", "last_visited"]) return Response(status=204) + + +@region_silo_endpoint +class OrganizationDashboardFavoriteEndpoint(OrganizationDashboardBase): + """ + Endpoint for managing the favorite status of dashboards for users + """ + + publish_status = { + "PUT": ApiPublishStatus.PRIVATE, + } + + def put(self, request: Request, organization, dashboard) -> Response: + """ + Toggle favorite status for current user by adding or removing + current user from dashboard favorites + """ + if not features.has("organizations:dashboards-favourite", organization, actor=request.user): + return Response(status=404) + + if not features.has(EDIT_FEATURE, organization, actor=request.user): + return Response(status=404) + + if isinstance(dashboard, dict): + return Response(status=204) + + is_favorited = request.data.get("isFavorited") + current_favorites = set(dashboard.favorited_by) + + if is_favorited and request.user.id not in current_favorites: + current_favorites.add(request.user.id) + elif not is_favorited and request.user.id in current_favorites: + current_favorites.remove(request.user.id) + else: + return Response(status=204) + + dashboard.favorited_by = current_favorites + + return Response(status=204) diff --git a/src/sentry/api/endpoints/organization_dashboard_widget_details.py b/src/sentry/api/endpoints/organization_dashboard_widget_details.py index 9b1d56ac27393e..e4ceffa702b60e 100644 --- a/src/sentry/api/endpoints/organization_dashboard_widget_details.py +++ b/src/sentry/api/endpoints/organization_dashboard_widget_details.py @@ -37,6 +37,7 @@ def post(self, request: Request, organization) -> Response: "projects": self.get_projects(request, organization), "displayType": request.data.get("displayType"), "environment": request.GET.getlist("environment"), + "request": request, }, ) if not serializer.is_valid(): diff --git a/src/sentry/api/endpoints/organization_dashboards.py b/src/sentry/api/endpoints/organization_dashboards.py index da0ffa59be03c5..baf206f9ff4e59 100644 --- a/src/sentry/api/endpoints/organization_dashboards.py +++ b/src/sentry/api/endpoints/organization_dashboards.py @@ -48,25 +48,40 @@ def has_object_permission(self, request: Request, view, obj): return super().has_object_permission(request, view, obj) if isinstance(obj, Dashboard): - # 1. Dashboard contains certain projects - if obj.projects.exists(): - return request.access.has_projects_access(obj.projects.all()) + if features.has( + "organizations:dashboards-edit-access", obj.organization, actor=request.user + ): + # allow for Managers and Owners + if request.access.has_scope("org:write"): + return True + + # check if user is restricted from editing dashboard + if hasattr(obj, "permissions"): + return obj.permissions.has_edit_permissions(request.user.id) + + # if no permissions are assigned, it is considered accessible to all users + return True - # 2. Dashboard covers all projects or all my projects + else: + # 1. Dashboard contains certain projects + if obj.projects.exists(): + return request.access.has_projects_access(obj.projects.all()) - # allow when Open Membership - if obj.organization.flags.allow_joinleave: - return True + # 2. Dashboard covers all projects or all my projects - # allow for Managers and Owners - if request.access.has_scope("org:write"): - return True + # allow when Open Membership + if obj.organization.flags.allow_joinleave: + return True - # allow for creator - if request.user.id == obj.created_by_id: - return True + # allow for Managers and Owners + if request.access.has_scope("org:write"): + return True - return False + # allow for creator + if request.user.id == obj.created_by_id: + return True + + return False return True @@ -102,7 +117,21 @@ def get(self, request: Request, organization) -> Response: if not features.has("organizations:dashboards-basic", organization, actor=request.user): return Response(status=404) - dashboards = Dashboard.objects.filter(organization_id=organization.id) + if features.has("organizations:dashboards-favourite", organization, actor=request.user): + filter_by = request.query_params.get("filter") + if filter_by == "onlyFavorites": + dashboards = Dashboard.objects.filter( + organization_id=organization.id, dashboardfavoriteuser__user_id=request.user.id + ) + elif filter_by == "excludeFavorites": + dashboards = Dashboard.objects.exclude( + organization_id=organization.id, dashboardfavoriteuser__user_id=request.user.id + ) + else: + dashboards = Dashboard.objects.filter(organization_id=organization.id) + else: + dashboards = Dashboard.objects.filter(organization_id=organization.id) + query = request.GET.get("query") if query: dashboards = dashboards.filter(title__icontains=query) @@ -176,7 +205,15 @@ def handle_results(results): return self.paginate( request=request, - sources=[prebuilt, dashboards], + sources=( + [dashboards] + if features.has( + "organizations:dashboards-favourite", organization, actor=request.user + ) + and filter_by + and filter_by == "onlyFavorites" + else [prebuilt, dashboards] + ), paginator_cls=ChainPaginator, on_results=handle_results, ) diff --git a/src/sentry/api/endpoints/organization_derive_code_mappings.py b/src/sentry/api/endpoints/organization_derive_code_mappings.py index 110654113c28c6..485dec0e3f3755 100644 --- a/src/sentry/api/endpoints/organization_derive_code_mappings.py +++ b/src/sentry/api/endpoints/organization_derive_code_mappings.py @@ -11,6 +11,7 @@ OrganizationIntegrationsLoosePermission, ) from sentry.api.serializers import serialize +from sentry.integrations.github.integration import GitHubIntegration from sentry.integrations.utils.code_mapping import ( CodeMapping, CodeMappingTreesHelper, @@ -45,7 +46,7 @@ def get(self, request: Request, organization: Organization) -> Response: return Response(status=status.HTTP_403_FORBIDDEN) stacktrace_filename = request.GET.get("stacktraceFilename") - installation, _ = get_installation(organization) + installation, _ = get_installation(organization) # only returns GitHub integrations if not installation: return self.respond( {"text": "Could not find this integration installed on your organization"}, @@ -53,7 +54,14 @@ def get(self, request: Request, organization: Organization) -> Response: ) # This method is specific to the GithubIntegration - trees = installation.get_trees_for_org() # type: ignore[attr-defined] + if not isinstance(installation, GitHubIntegration): + return self.respond( + { + "text": f"The {installation.model.provider} integration does not support derived code mappings" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + trees = installation.get_trees_for_org() trees_helper = CodeMappingTreesHelper(trees) possible_code_mappings: list[dict[str, str]] = [] resp_status: int = status.HTTP_204_NO_CONTENT diff --git a/src/sentry/api/endpoints/organization_details.py b/src/sentry/api/endpoints/organization_details.py index 22c97e291220b2..21050245f03704 100644 --- a/src/sentry/api/endpoints/organization_details.py +++ b/src/sentry/api/endpoints/organization_details.py @@ -42,7 +42,6 @@ from sentry.auth.staff import is_active_staff from sentry.constants import ( ACCOUNT_RATE_LIMIT_DEFAULT, - AI_SUGGESTED_SOLUTION, ALERTS_MEMBER_WRITE_DEFAULT, ATTACHMENTS_ROLE_DEFAULT, DEBUG_FILES_ROLE_DEFAULT, @@ -180,12 +179,6 @@ ("relayPiiConfig", "sentry:relay_pii_config", str, None), ("allowJoinRequests", "sentry:join_requests", bool, JOIN_REQUESTS_DEFAULT), ("apdexThreshold", "sentry:apdex_threshold", int, None), - ( - "aiSuggestedSolution", - "sentry:ai_suggested_solution", - bool, - AI_SUGGESTED_SOLUTION, - ), ( "hideAiFeatures", "sentry:hide_ai_features", @@ -282,7 +275,6 @@ class OrganizationSerializer(BaseOrganizationSerializer): scrubIPAddresses = serializers.BooleanField(required=False) scrapeJavaScript = serializers.BooleanField(required=False) isEarlyAdopter = serializers.BooleanField(required=False) - aiSuggestedSolution = serializers.BooleanField(required=False) hideAiFeatures = serializers.BooleanField(required=False) codecovAccess = serializers.BooleanField(required=False) githubOpenPRBot = serializers.BooleanField(required=False) @@ -665,10 +657,6 @@ class OrganizationDetailsPutSerializer(serializers.Serializer): help_text="Specify `true` to opt-in to new features before they're released to the public.", required=False, ) - aiSuggestedSolution = serializers.BooleanField( - help_text="Specify `true` to opt-in to [AI Suggested Solution](/product/issues/issue-details/ai-suggested-solution/) to get AI help on how to solve an issue.", - required=False, - ) hideAiFeatures = serializers.BooleanField( help_text="Specify `true` to hide AI features from the organization.", required=False, @@ -1048,7 +1036,7 @@ def _compute_project_target_sample_rates(self, organization): error_sample_rate_fallback=None, ) if current_rate: - project.update_option("sentry:target_sample_rate", current_rate) + project.update_option("sentry:target_sample_rate", round(current_rate, 4)) def handle_delete(self, request: Request, organization: Organization): """ diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index ca38e88b020406..938f8609eb01e1 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -70,12 +70,24 @@ class DiscoverDatasetSplitException(Exception): Referrer.API_PERFORMANCE_BROWSER_WEB_VITALS_TRANSACTIONS_SCORES.value, Referrer.API_PERFORMANCE_CACHE_LANDING_CACHE_TRANSACTION_LIST.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_APDEX_AREA.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_COLD_STARTUP_AREA.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_WARM_STARTUP_AREA.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_FAILURE_RATE_AREA.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_FROZEN_FRAMES_AREA.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_FRAMES_AREA.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_SCREENS_BY_COLD_START.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_SCREENS_BY_WARM_START.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_HIGHEST_CACHE_MISS_RATE_TRANSACTIONS.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_FROZEN_FRAMES.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_RELATED_ISSUES.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_SLOW_FRAMES.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_TIME_CONSUMING_DOMAINS.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_TIME_CONSUMING_RESOURCES.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_MOST_TIME_SPENT_DB_QUERIES.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_P50_DURATION_AREA.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_P75_DURATION_AREA.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_P95_DURATION_AREA.value, + Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_P99_DURATION_AREA.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_DB_OPS.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_HTTP_OPS.value, Referrer.API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_RESOURCE_OPS.value, @@ -422,7 +434,7 @@ def get(self, request: Request, organization) -> Response: def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]: if use_rpc and dataset == spans_eap: - spans_rpc.run_table_query( + return spans_rpc.run_table_query( params=snuba_params, query_string=query, selected_columns=self.get_field_list(organization, request), diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index caa829904bf217..c2c55d1f84f076 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -1,5 +1,5 @@ -from collections.abc import Mapping, Sequence -from datetime import datetime, timedelta +from collections.abc import Mapping +from datetime import timedelta from typing import Any import sentry_sdk @@ -14,6 +14,7 @@ from sentry.constants import MAX_TOP_EVENTS from sentry.models.dashboard_widget import DashboardWidget, DashboardWidgetTypes from sentry.models.organization import Organization +from sentry.search.eap.types import SearchResolverConfig from sentry.search.events.types import SnubaParams from sentry.snuba import ( discover, @@ -21,10 +22,10 @@ functions, metrics_enhanced_performance, metrics_performance, - profile_functions_metrics, spans_eap, spans_indexed, spans_metrics, + spans_rpc, transactions, ) from sentry.snuba.metrics.extraction import MetricSpecType @@ -251,7 +252,6 @@ def get(self, request: Request, organization: Organization) -> Response: functions, metrics_performance, metrics_enhanced_performance, - profile_functions_metrics, spans_indexed, spans_metrics, spans_eap, @@ -274,17 +274,33 @@ def get(self, request: Request, organization: Organization) -> Response: return Response({"detail": f"Metric type must be one of: {metric_types}"}, status=400) force_metrics_layer = request.GET.get("forceMetricsLayer") == "true" + use_rpc = request.GET.get("useRpc", "0") == "1" def _get_event_stats( scoped_dataset: Any, - query_columns: Sequence[str], + query_columns: list[str], query: str, snuba_params: SnubaParams, rollup: int, zerofill_results: bool, - comparison_delta: datetime | None, + comparison_delta: timedelta | None, ) -> SnubaTSResult | dict[str, SnubaTSResult]: if top_events > 0: + if use_rpc and dataset == spans_eap: + return spans_rpc.run_top_events_timeseries_query( + params=snuba_params, + query_string=query, + y_axes=query_columns, + raw_groupby=self.get_field_list(organization, request), + orderby=self.get_orderby(request), + limit=top_events, + referrer=referrer, + granularity_secs=rollup, + config=SearchResolverConfig( + auto_fields=False, + use_aggregate_conditions=False, + ), + ) return scoped_dataset.top_events_timeseries( timeseries_columns=query_columns, selected_columns=self.get_field_list(organization, request), @@ -309,6 +325,22 @@ def _get_event_stats( ), ) + if use_rpc and dataset == spans_eap: + return spans_rpc.run_timeseries_query( + params=snuba_params, + query_string=query, + y_axes=query_columns, + granularity_secs=rollup, + referrer=referrer, + config=SearchResolverConfig( + auto_fields=False, + use_aggregate_conditions=False, + ), + comparison_delta=comparison_delta, + ) + + transform_alias_to_input_format = request.GET.get("transformAliasToInputFormat") == "1" + return scoped_dataset.timeseries_query( selected_columns=query_columns, query=query, @@ -338,6 +370,7 @@ def _get_event_stats( organization, actor=request.user, ), + transform_alias_to_input_format=transform_alias_to_input_format, ) def get_event_stats_factory(scoped_dataset): @@ -350,12 +383,12 @@ def get_event_stats_factory(scoped_dataset): dashboard_widget_id = request.GET.get("dashboardWidgetId", None) def fn( - query_columns: Sequence[str], + query_columns: list[str], query: str, snuba_params: SnubaParams, rollup: int, zerofill_results: bool, - comparison_delta: datetime | None, + comparison_delta: timedelta | None, ) -> SnubaTSResult | dict[str, SnubaTSResult]: if not (metrics_enhanced and dashboard_widget_id): @@ -519,6 +552,12 @@ def fn( return fn get_event_stats = get_event_stats_factory(dataset) + zerofill_results = not ( + request.GET.get("withoutZerofill") == "1" and has_chart_interpolation + ) + if use_rpc: + # The rpc will usually zerofill for us so we don't need to do it ourselves + zerofill_results = False try: return Response( @@ -528,9 +567,7 @@ def fn( get_event_stats, top_events, allow_partial_buckets=allow_partial_buckets, - zerofill_results=not ( - request.GET.get("withoutZerofill") == "1" and has_chart_interpolation - ), + zerofill_results=zerofill_results, comparison_delta=comparison_delta, dataset=dataset, ), diff --git a/src/sentry/api/endpoints/organization_fork.py b/src/sentry/api/endpoints/organization_fork.py index 52ed47960682c5..5a1aff9d7e50a7 100644 --- a/src/sentry/api/endpoints/organization_fork.py +++ b/src/sentry/api/endpoints/organization_fork.py @@ -145,13 +145,14 @@ def post(self, request: Request, organization_id_or_slug) -> Response: # We do not create a `RelocationFile` yet. Instead, we trigger a series of RPC calls (via # `uploading_start`, scheduled below) to create an export of the organization we are seeking # duplicate from the foreign region. + provenance = Relocation.Provenance.SAAS_TO_SAAS with atomic_transaction(using=(router.db_for_write(Relocation))): new_relocation: Relocation = Relocation.objects.create( creator_id=request.user.id, owner_id=owner.id, step=Relocation.Step.UPLOADING.value, - scheduled_pause_at_step=get_autopause_value(), - provenance=Relocation.Provenance.SAAS_TO_SAAS, + scheduled_pause_at_step=get_autopause_value(provenance), + provenance=provenance, want_org_slugs=[org_mapping.slug], ) diff --git a/src/sentry/api/endpoints/organization_index.py b/src/sentry/api/endpoints/organization_index.py index 0cf1493ab8776f..45dfb2131681c4 100644 --- a/src/sentry/api/endpoints/organization_index.py +++ b/src/sentry/api/endpoints/organization_index.py @@ -1,3 +1,5 @@ +import logging + from django.conf import settings from django.db import IntegrityError from django.db.models import Count, Q, Sum @@ -35,6 +37,8 @@ from sentry.signals import org_setup_complete, terms_accepted from sentry.users.services.user.service import user_service +logger = logging.getLogger(__name__) + class OrganizationPostSerializer(BaseOrganizationSerializer): defaultTeam = serializers.BooleanField(required=False) @@ -120,6 +124,9 @@ def get(self, request: Request) -> Response: "organization" ) ) + if request.auth and request.auth.organization_id is not None and queryset.count() > 1: + # If a token is limited to one organization, this endpoint should only return that one organization + queryset = queryset.filter(id=request.auth.organization_id) query = request.GET.get("query") if query: diff --git a/src/sentry/api/endpoints/organization_member/index.py b/src/sentry/api/endpoints/organization_member/index.py index d92ac5988194c2..a1c2654784fadb 100644 --- a/src/sentry/api/endpoints/organization_member/index.py +++ b/src/sentry/api/endpoints/organization_member/index.py @@ -137,6 +137,10 @@ def validate_role(self, role): return self.validate_orgRole(role) def validate_orgRole(self, role): + if role == "billing" and features.has( + "organizations:invite-billing", self.context["organization"] + ): + return role role_obj = next((r for r in self.context["allowed_roles"] if r.id == role), None) if role_obj is None: raise serializers.ValidationError( @@ -314,13 +318,18 @@ def post(self, request: Request, organization) -> Response: """ Add or invite a member to an organization. """ - if not features.has("organizations:invite-members", organization, actor=request.user): + assigned_org_role = request.data.get("orgRole") or request.data.get("role") + billing_bypass = assigned_org_role == "billing" and features.has( + "organizations:invite-billing", organization + ) + if not billing_bypass and not features.has( + "organizations:invite-members", organization, actor=request.user + ): return Response( {"organization": "Your organization is not allowed to invite members"}, status=403 ) allowed_roles = get_allowed_org_roles(request, organization, creating_org_invite=True) - assigned_org_role = request.data.get("orgRole") or request.data.get("role") # We allow requests from integration tokens to invite new members as the member role only if not allowed_roles and request.access.is_integration_token: diff --git a/src/sentry/api/endpoints/organization_member_unreleased_commits.py b/src/sentry/api/endpoints/organization_member_unreleased_commits.py deleted file mode 100644 index 4d49426ab48613..00000000000000 --- a/src/sentry/api/endpoints/organization_member_unreleased_commits.py +++ /dev/null @@ -1,92 +0,0 @@ -from django.db import connections - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases import OrganizationMemberEndpoint -from sentry.api.serializers import serialize -from sentry.models.commit import Commit -from sentry.models.repository import Repository -from sentry.users.services.user.service import user_service - -# TODO(dcramer): once LatestRepoReleaseEnvironment is backfilled, change this query to use the new -# schema [performance] -query = """ -select c1.* -from sentry_commit c1 -join ( - select max(c2.date_added) as date_added, c2.repository_id - from sentry_commit as c2 - join ( - select distinct commit_id from sentry_releasecommit - where organization_id = %%s - ) as rc2 - on c2.id = rc2.commit_id - group by c2.repository_id -) as cmax -on c1.repository_id = cmax.repository_id -where c1.date_added > cmax.date_added -and c1.author_id IN ( - select id - from sentry_commitauthor - where organization_id = %%s - and upper(email) IN (%s) -) -order by c1.date_added desc -""" - -quote_name = connections["default"].ops.quote_name - - -from rest_framework.request import Request -from rest_framework.response import Response - - -@region_silo_endpoint -class OrganizationMemberUnreleasedCommitsEndpoint(OrganizationMemberEndpoint): - owner = ApiOwner.UNOWNED - publish_status = { - "GET": ApiPublishStatus.UNKNOWN, - } - - def get(self, request: Request, organization, member) -> Response: - user = user_service.get_user(member.user_id) - if user is None: - email_list = [] - else: - email_list = [e.email for e in user.useremails if e.is_verified] - - if not email_list: - return self.respond( - {"commits": [], "repositories": {}, "errors": {"missing_emails": True}} - ) - - params = [organization.id, organization.id] - for e in email_list: - params.append(e.upper()) - - queryset = Commit.objects.raw(query % (", ".join("%s" for _ in email_list),), params) - - results = list(queryset) - - if results: - repos = list(Repository.objects.filter(id__in={r.repository_id for r in results})) - else: - repos = [] - - return self.respond( - { - "commits": [ - { - "id": c.key, - "message": c.message, - "dateCreated": c.date_added, - "repositoryID": str(c.repository_id), - } - for c in results - ], - "repositories": { - str(r.id): d for r, d in zip(repos, serialize(repos, request.user)) - }, - } - ) diff --git a/src/sentry/api/endpoints/organization_metrics_samples.py b/src/sentry/api/endpoints/organization_metrics_samples.py deleted file mode 100644 index f81916c0159c8d..00000000000000 --- a/src/sentry/api/endpoints/organization_metrics_samples.py +++ /dev/null @@ -1,107 +0,0 @@ -import sentry_sdk -from rest_framework import serializers -from rest_framework.exceptions import ParseError -from rest_framework.request import Request -from rest_framework.response import Response - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases import NoProjects, OrganizationEventsV2EndpointBase -from sentry.api.paginator import GenericOffsetPaginator -from sentry.api.utils import handle_query_errors -from sentry.exceptions import InvalidSearchQuery -from sentry.models.organization import Organization -from sentry.sentry_metrics.querying.samples_list import get_sample_list_executor_cls -from sentry.snuba.metrics.naming_layer.mri import is_mri -from sentry.snuba.referrer import Referrer -from sentry.utils.dates import get_rollup_from_request -from sentry.utils.snuba import SnubaError - - -class MetricsSamplesSerializer(serializers.Serializer): - mri = serializers.CharField(required=True) - field = serializers.ListField(required=True, allow_empty=False, child=serializers.CharField()) - max = serializers.FloatField(required=False) - min = serializers.FloatField(required=False) - operation = serializers.CharField(required=False) - query = serializers.CharField(required=False) - referrer = serializers.CharField(required=False) - sort = serializers.CharField(required=False) - - def validate_mri(self, mri: str) -> str: - if not is_mri(mri): - raise serializers.ValidationError(f"Invalid MRI: {mri}") - - return mri - - -@region_silo_endpoint -class OrganizationMetricsSamplesEndpoint(OrganizationEventsV2EndpointBase): - publish_status = { - "GET": ApiPublishStatus.EXPERIMENTAL, - } - owner = ApiOwner.TELEMETRY_EXPERIENCE - snuba_methods = ["GET"] - - def get(self, request: Request, organization: Organization) -> Response: - try: - snuba_params = self.get_snuba_params(request, organization) - except NoProjects: - return Response(status=404) - - try: - rollup = get_rollup_from_request( - request, - snuba_params.end_date - snuba_params.start_date, - default_interval=None, - error=InvalidSearchQuery(), - ) - except InvalidSearchQuery: - rollup = 3600 # use a default of 1 hour - - serializer = MetricsSamplesSerializer(data=request.GET) - if not serializer.is_valid(): - return Response(serializer.errors, status=400) - - serialized = serializer.validated_data - - executor_cls = get_sample_list_executor_cls(serialized["mri"]) - if not executor_cls: - raise ParseError(f"Unsupported MRI: {serialized['mri']}") - - sort = serialized.get("sort") - if sort is not None: - column = sort[1:] if sort.startswith("-") else sort - if not executor_cls.supports_sort(column): - raise ParseError(f"Unsupported sort: {sort} for MRI") - - executor = executor_cls( - mri=serialized["mri"], - snuba_params=snuba_params, - fields=serialized["field"], - operation=serialized.get("operation"), - query=serialized.get("query", ""), - min=serialized.get("min"), - max=serialized.get("max"), - sort=serialized.get("sort"), - rollup=rollup, - referrer=Referrer.API_ORGANIZATION_METRICS_SAMPLES, - ) - - with handle_query_errors(): - try: - return self.paginate( - request=request, - paginator=GenericOffsetPaginator(data_fn=executor.get_matching_spans), - on_results=lambda results: self.handle_results_with_meta( - request, - organization, - snuba_params.project_ids, - results, - standard_meta=True, - ), - ) - except SnubaError as exc: - sentry_sdk.capture_exception(exc) - raise diff --git a/src/sentry/api/endpoints/organization_onboarding_tasks.py b/src/sentry/api/endpoints/organization_onboarding_tasks.py index bdb047816d7dbf..7d72ba0339af03 100644 --- a/src/sentry/api/endpoints/organization_onboarding_tasks.py +++ b/src/sentry/api/endpoints/organization_onboarding_tasks.py @@ -43,7 +43,7 @@ def post(self, request: Request, organization) -> Response: # Cannot skip unskippable tasks if ( status == OnboardingTaskStatus.SKIPPED - and task_id not in onboarding_tasks.get_skippable_tasks(organization) + and task_id not in onboarding_tasks.get_skippable_tasks(organization, request.user) ): return Response(status=422) @@ -63,7 +63,7 @@ def post(self, request: Request, organization) -> Response: ) if rows_affected or created: - onboarding_tasks.try_mark_onboarding_complete(organization.id) + onboarding_tasks.try_mark_onboarding_complete(organization.id, request.user) return Response(status=204) diff --git a/src/sentry/api/endpoints/organization_profiling_profiles.py b/src/sentry/api/endpoints/organization_profiling_profiles.py index 46fb9e0217b46f..3dbd6da5031f69 100644 --- a/src/sentry/api/endpoints/organization_profiling_profiles.py +++ b/src/sentry/api/endpoints/organization_profiling_profiles.py @@ -1,4 +1,3 @@ -import sentry_sdk from django.http import HttpResponse from rest_framework import serializers from rest_framework.exceptions import ParseError @@ -15,13 +14,7 @@ from sentry.api.bases import NoProjects, OrganizationEventsV2EndpointBase from sentry.api.utils import handle_query_errors from sentry.models.organization import Organization -from sentry.profiles.flamegraph import ( - FlamegraphExecutor, - get_chunks_from_spans_metadata, - get_profile_ids, - get_profiles_with_function, - get_spans_from_group, -) +from sentry.profiles.flamegraph import FlamegraphExecutor from sentry.profiles.profile_chunks import get_chunk_ids from sentry.profiles.utils import proxy_profiling_service from sentry.snuba.dataset import Dataset, StorageKey @@ -71,36 +64,6 @@ def get(self, request: Request, organization: Organization) -> HttpResponse: if not features.has("organizations:profiling", organization, actor=request.user): return Response(status=404) - if not features.has( - "organizations:continuous-profiling-compat", organization, actor=request.user - ): - snuba_params = self.get_snuba_params(request, organization) - - project_ids = snuba_params.project_ids - if len(project_ids) > 1: - raise ParseError(detail="You cannot get a flamegraph from multiple projects.") - - if request.query_params.get("fingerprint"): - sentry_sdk.set_tag("data source", "functions") - function_fingerprint = int(request.query_params["fingerprint"]) - - profile_ids = get_profiles_with_function( - organization.id, - project_ids[0], - function_fingerprint, - snuba_params, - request.GET.get("query", ""), - ) - else: - sentry_sdk.set_tag("data source", "profiles") - profile_ids = get_profile_ids(snuba_params, request.query_params.get("query", None)) - - return proxy_profiling_service( - method="POST", - path=f"/organizations/{organization.id}/projects/{project_ids[0]}/flamegraph", - json_data=profile_ids, - ) - try: snuba_params = self.get_snuba_params(request, organization) except NoProjects: @@ -163,38 +126,6 @@ def get(self, request: Request, organization: Organization) -> HttpResponse: ) -@region_silo_endpoint -class OrganizationProfilingChunksFlamegraphEndpoint(OrganizationProfilingBaseEndpoint): - def get(self, request: Request, organization: Organization) -> HttpResponse: - if not features.has("organizations:profiling", organization, actor=request.user): - return Response(status=404) - - snuba_params = self.get_snuba_params(request, organization) - - project_ids = snuba_params.project_ids - if len(project_ids) != 1: - raise ParseError(detail="one project_id must be specified.") - - span_group = request.query_params.get("span_group") - if span_group is None: - raise ParseError(detail="span_group must be specified.") - - spans = get_spans_from_group( - organization.id, - project_ids[0], - snuba_params, - span_group, - ) - - chunksMetadata = get_chunks_from_spans_metadata(organization.id, project_ids[0], spans) - - return proxy_profiling_service( - method="POST", - path=f"/organizations/{organization.id}/projects/{project_ids[0]}/chunks-flamegraph", - json_data={"chunks_metadata": chunksMetadata}, - ) - - @region_silo_endpoint class OrganizationProfilingHasChunksEndpoint(OrganizationProfilingBaseEndpoint): def get(self, request: Request, organization: Organization) -> HttpResponse: diff --git a/src/sentry/api/endpoints/organization_sampling_project_rates.py b/src/sentry/api/endpoints/organization_sampling_project_rates.py index c02bb7c931378d..7036c4d604000c 100644 --- a/src/sentry/api/endpoints/organization_sampling_project_rates.py +++ b/src/sentry/api/endpoints/organization_sampling_project_rates.py @@ -123,7 +123,7 @@ def put(self, request: Request, organization: Organization) -> Response: project_ids = {int(d["id"]) for d in serializer.data} projects = self.get_projects(request, organization, project_ids=project_ids) - rate_by_project = {d["id"]: d["sampleRate"] for d in serializer.data} + rate_by_project = {d["id"]: round(d["sampleRate"], 4) for d in serializer.data} with transaction.atomic(router.db_for_write(ProjectOption)): for project in projects: project.update_option(OPTION_KEY, rate_by_project[project.id]) diff --git a/src/sentry/api/endpoints/organization_sampling_project_span_counts.py b/src/sentry/api/endpoints/organization_sampling_project_span_counts.py index 3d4f79bc8d3ee4..177c107f1939b4 100644 --- a/src/sentry/api/endpoints/organization_sampling_project_span_counts.py +++ b/src/sentry/api/endpoints/organization_sampling_project_span_counts.py @@ -43,7 +43,7 @@ def get(self, request: Request, organization: Organization) -> Response: Project.objects.filter(organization=organization, status=ObjectStatus.ACTIVE) ) mql = f"sum({SpanMRI.COUNT_PER_ROOT_PROJECT.value}) by (project,target_project_id)" - query = MQLQuery(mql=mql, order=QueryOrder.DESC) + query = MQLQuery(mql=mql, order=QueryOrder.DESC, limit=10000) results = run_queries( mql_queries=[query], start=start, diff --git a/src/sentry/api/endpoints/organization_spans_fields.py b/src/sentry/api/endpoints/organization_spans_fields.py index e96b3ca45efed5..dc3f0a5f03add3 100644 --- a/src/sentry/api/endpoints/organization_spans_fields.py +++ b/src/sentry/api/endpoints/organization_spans_fields.py @@ -1,5 +1,6 @@ from abc import ABC, abstractmethod from datetime import timedelta +from typing import Literal import sentry_sdk from google.protobuf.timestamp_pb2 import Timestamp @@ -7,15 +8,11 @@ from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response -from sentry_protos.snuba.v1.trace_item_attribute_pb2 import AttributeKey -from sentry_protos.snuba.v1alpha.endpoint_tags_list_pb2 import ( - AttributeValuesRequest, - AttributeValuesResponse, - TraceItemAttributesRequest, - TraceItemAttributesResponse, +from sentry_protos.snuba.v1.endpoint_trace_item_attributes_pb2 import ( + TraceItemAttributeNamesRequest, + TraceItemAttributeValuesRequest, ) -from sentry_protos.snuba.v1alpha.request_common_pb2 import RequestMeta, TraceItemName -from sentry_protos.snuba.v1alpha.trace_item_attribute_pb2 import AttributeKey as AlphaAttributeKey +from sentry_protos.snuba.v1.trace_item_attribute_pb2 import AttributeKey from sentry_relay.consts import SPAN_STATUS_CODE_TO_NAME from snuba_sdk import Condition, Op @@ -29,6 +26,7 @@ from sentry.api.serializers import serialize from sentry.api.utils import handle_query_errors from sentry.models.organization import Organization +from sentry.search.eap.columns import translate_internal_to_public_alias from sentry.search.eap.spans import SearchResolver from sentry.search.eap.types import SearchResolverConfig from sentry.search.events.builder.base import BaseQueryBuilder @@ -39,15 +37,23 @@ from sentry.tagstore.types import TagKey, TagValue from sentry.utils import snuba_rpc -# This causes problems if a user sends an attribute with any of these values -# but the meta table currently can't handle that anyways -# More users will see the 3 of these since they're on everything so lets try to make -# the common usecase more reasonable -TAG_NAME_MAPPING = { - "segment_name": "transaction", - "name": "span.description", - "service": "project", -} + +def as_tag_key(name: str, type: Literal["string", "number"]): + key = translate_internal_to_public_alias(name, type) + + if key is not None: + name = key + elif type == "number": + key = f"tags[{name},number]" + else: + key = name + + return { + # key is what will be used to query the API + "key": key, + # name is what will be used to display the tag nicely in the UI + "name": name, + } class OrganizationSpansFieldsEndpointBase(OrganizationEventsV2EndpointBase): @@ -62,13 +68,7 @@ class OrganizationSpansFieldsEndpointSerializer(serializers.Serializer): ["spans", "spansIndexed"], required=False, default="spansIndexed" ) type = serializers.ChoiceField(["string", "number"], required=False) - - def validate_type(self, value): - if value == "string": - return AlphaAttributeKey.Type.TYPE_STRING - if value == "number": - return AlphaAttributeKey.Type.TYPE_FLOAT - raise NotImplementedError + process = serializers.BooleanField(required=False) def validate(self, attrs): if attrs["dataset"] == "spans" and attrs.get("type") is None: @@ -101,42 +101,40 @@ def get(self, request: Request, organization: Organization) -> Response: max_span_tags = options.get("performance.spans-tags-key.max") - if serialized["dataset"] == "spans" and features.has( - "organizations:visibility-explore-dataset", organization, actor=request.user - ): - start_timestamp = Timestamp() - start_timestamp.FromDatetime( - snuba_params.start_date.replace(hour=0, minute=0, second=0, microsecond=0) + if serialized["dataset"] == "spans": + snuba_params.start = snuba_params.start_date.replace( + hour=0, minute=0, second=0, microsecond=0 ) + snuba_params.end = snuba_params.end_date.replace( + hour=0, minute=0, second=0, microsecond=0 + ) + timedelta(days=1) - end_timestamp = Timestamp() - end_timestamp.FromDatetime( - snuba_params.end_date.replace(hour=0, minute=0, second=0, microsecond=0) - + timedelta(days=1) - ) + resolver = SearchResolver(params=snuba_params, config=SearchResolverConfig()) + meta = resolver.resolve_meta(referrer=Referrer.API_SPANS_TAG_KEYS_RPC.value) - rpc_request = TraceItemAttributesRequest( - meta=RequestMeta( - organization_id=organization.id, - cogs_category="performance", - referrer=Referrer.API_SPANS_TAG_KEYS_RPC.value, - project_ids=snuba_params.project_ids, - start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - trace_item_name=TraceItemName.TRACE_ITEM_NAME_EAP_SPANS, - ), + rpc_request = TraceItemAttributeNamesRequest( + meta=meta, limit=max_span_tags, offset=0, - type=serialized["type"], + type=( + AttributeKey.Type.TYPE_FLOAT + if serialized["type"] == "number" + else AttributeKey.Type.TYPE_STRING + ), ) - rpc_response = snuba_rpc.rpc(rpc_request, TraceItemAttributesResponse) + + rpc_response = snuba_rpc.attribute_names_rpc(rpc_request) paginator = ChainPaginator( [ [ - TagKey(TAG_NAME_MAPPING.get(tag.name, tag.name)) - for tag in rpc_response.tags - if tag.name + ( + as_tag_key(attribute.name, serialized["type"]) + if serialized["process"] + else TagKey(attribute.name) + ) + for attribute in rpc_response.attributes + if attribute.name ], ], max_limit=max_span_tags, @@ -217,9 +215,7 @@ def get(self, request: Request, organization: Organization, key: str) -> Respons executor: BaseSpanFieldValuesAutocompletionExecutor - if serialized["dataset"] == "spans" and features.has( - "organizations:visibility-explore-dataset", organization, actor=request.user - ): + if serialized["dataset"] == "spans": executor = EAPSpanFieldValuesAutocompletionExecutor( organization=organization, snuba_params=snuba_params, @@ -408,15 +404,14 @@ def __init__( max_span_tag_values: int, ): super().__init__(organization, snuba_params, key, query, max_span_tag_values) + self.resolver = SearchResolver(params=snuba_params, config=SearchResolverConfig()) self.attribute_key = self.resolve_attribute_key(key, snuba_params) def resolve_attribute_key(self, key: str, snuba_params: SnubaParams) -> AttributeKey | None: - resolver = SearchResolver(params=snuba_params, config=SearchResolverConfig()) - resolved, _ = resolver.resolve_attribute(key) - proto = resolved.proto_definition - if not isinstance(proto, AttributeKey): + resolved, _ = self.resolver.resolve_attribute(key) + if resolved.search_type != "string": return None - return proto + return resolved.proto_definition def execute(self) -> list[TagValue]: if self.key in self.PROJECT_ID_KEYS: @@ -443,31 +438,24 @@ def default_autocomplete_function(self) -> list[TagValue]: ) query = translate_escape_sequences(self.query) - rpc_request = AttributeValuesRequest( - meta=RequestMeta( - organization_id=self.organization.id, - cogs_category="performance", - referrer=Referrer.API_SPANS_TAG_VALUES_RPC.value, - project_ids=self.snuba_params.project_ids, - start_timestamp=start_timestamp, - end_timestamp=end_timestamp, - trace_item_name=TraceItemName.TRACE_ITEM_NAME_EAP_SPANS, - ), - name=self.attribute_key.name, + + meta = self.resolver.resolve_meta(referrer=Referrer.API_SPANS_TAG_VALUES_RPC.value) + rpc_request = TraceItemAttributeValuesRequest( + meta=meta, + key=self.attribute_key, value_substring_match=query, limit=self.max_span_tag_values, - offset=0, ) - rpc_response = snuba_rpc.rpc(rpc_request, AttributeValuesResponse) + rpc_response = snuba_rpc.attribute_values_rpc(rpc_request) return [ TagValue( key=self.key, - value=tag_value, + value=value, times_seen=None, first_seen=None, last_seen=None, ) - for tag_value in rpc_response.values - if tag_value + for value in rpc_response.values + if value ] diff --git a/src/sentry/api/endpoints/organization_tags.py b/src/sentry/api/endpoints/organization_tags.py index 197210992ecd40..ccaeeeaa10d658 100644 --- a/src/sentry/api/endpoints/organization_tags.py +++ b/src/sentry/api/endpoints/organization_tags.py @@ -1,16 +1,18 @@ +import datetime + import sentry_sdk from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response -from sentry import tagstore +from sentry import features, options, tagstore from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases import NoProjects from sentry.api.bases.organization import OrganizationEndpoint from sentry.api.serializers import serialize -from sentry.api.utils import handle_query_errors +from sentry.api.utils import clamp_date_range, handle_query_errors from sentry.snuba.dataset import Dataset from sentry.utils.numbers import format_grouped_length from sentry.utils.sdk import set_measurement @@ -39,11 +41,23 @@ def get(self, request: Request, organization) -> Response: with sentry_sdk.start_span(op="tagstore", name="get_tag_keys_for_projects"): with handle_query_errors(): + start = filter_params["start"] + end = filter_params["end"] + + if features.has("organizations:tag-key-sample-n", organization) and start and end: + # Tag queries longer than 14 days tend to time out for large customers. For getting a list of tags, clamping to 14 days is a reasonable compromise of speed vs. completeness + (start, end) = clamp_date_range( + (start, end), + datetime.timedelta( + days=options.get("visibility.tag-key-max-date-range.days") + ), + ) + results = tagstore.backend.get_tag_keys_for_projects( filter_params["project_id"], filter_params.get("environment"), - filter_params["start"], - filter_params["end"], + start, + end, use_cache=request.GET.get("use_cache", "0") == "1", dataset=dataset, tenant_ids={"organization_id": organization.id}, diff --git a/src/sentry/api/endpoints/organization_traces.py b/src/sentry/api/endpoints/organization_traces.py index 19f9cf153f7438..24785b8ef0f4a8 100644 --- a/src/sentry/api/endpoints/organization_traces.py +++ b/src/sentry/api/endpoints/organization_traces.py @@ -12,7 +12,7 @@ from rest_framework.exceptions import ParseError, ValidationError from rest_framework.request import Request from rest_framework.response import Response -from snuba_sdk import And, BooleanCondition, BooleanOp, Column, Condition, Function, Op, Or +from snuba_sdk import BooleanCondition, BooleanOp, Column, Condition, Function, Op from urllib3.exceptions import ReadTimeoutError from sentry import features, options @@ -35,7 +35,6 @@ ) from sentry.search.events.constants import TIMEOUT_SPAN_ERROR_MESSAGE from sentry.search.events.types import QueryBuilderConfig, SnubaParams, WhereType -from sentry.sentry_metrics.querying.samples_list import SpanKey, get_sample_list_executor_cls from sentry.snuba import discover, spans_indexed from sentry.snuba.dataset import Dataset from sentry.snuba.referrer import Referrer @@ -88,11 +87,6 @@ class OrganizationTracesSerializer(serializers.Serializer): dataset = serializers.ChoiceField( ["spans", "spansIndexed"], required=False, default="spansIndexed" ) - metricsMax = serializers.FloatField(required=False) - metricsMin = serializers.FloatField(required=False) - metricsOp = serializers.CharField(required=False) - metricsQuery = serializers.CharField(required=False) - mri = serializers.CharField(required=False) breakdownSlices = serializers.IntegerField(default=40, min_value=1, max_value=100) query = serializers.ListField( @@ -164,11 +158,6 @@ def get(self, request: Request, organization: Organization) -> Response: snuba_params=snuba_params, user_queries=serialized.get("query", []), sort=serialized.get("sort"), - metrics_max=serialized.get("metricsMax"), - metrics_min=serialized.get("metricsMin"), - metrics_operation=serialized.get("metricsOp"), - metrics_query=serialized.get("metricsQuery"), - mri=serialized.get("mri"), limit=self.get_per_page(request), breakdown_slices=serialized["breakdownSlices"], get_all_projects=lambda: self.get_projects( @@ -198,11 +187,6 @@ class OrganizationTraceSpansSerializer(serializers.Serializer): dataset = serializers.ChoiceField( ["spans", "spansIndexed"], required=False, default="spansIndexed" ) - metricsMax = serializers.FloatField(required=False) - metricsMin = serializers.FloatField(required=False) - metricsOp = serializers.CharField(required=False) - metricsQuery = serializers.CharField(required=False) - mri = serializers.CharField(required=False) field = serializers.ListField(required=True, allow_empty=False, child=serializers.CharField()) sort = serializers.ListField(required=False, allow_empty=True, child=serializers.CharField()) @@ -245,11 +229,6 @@ def get(self, request: Request, organization: Organization, trace_id: str) -> Re fields=serialized["field"], user_queries=serialized.get("query", []), sort=serialized.get("sort"), - metrics_max=serialized.get("metricsMax"), - metrics_min=serialized.get("metricsMin"), - metrics_operation=serialized.get("metricsOp"), - metrics_query=serialized.get("metricsQuery"), - mri=serialized.get("mri"), ) return self.paginate( @@ -311,7 +290,9 @@ def get(self, request: Request, organization: Organization) -> Response: zerofill = not ( request.GET.get("withoutZerofill") == "1" and features.get( - "organizations:performance-chart-interpolation", organization, actor=request.user + "organizations:performance-chart-interpolation", + organization, + actor=request.user, ) ) @@ -358,11 +339,6 @@ def __init__( snuba_params: SnubaParams, user_queries: list[str], sort: str | None, - metrics_max: float | None, - metrics_min: float | None, - metrics_operation: str | None, - metrics_query: str | None, - mri: str | None, limit: int, breakdown_slices: int, get_all_projects: Callable[[], list[Project]], @@ -371,11 +347,6 @@ def __init__( self.snuba_params = snuba_params self.user_queries = process_user_queries(snuba_params, user_queries, dataset) self.sort = sort - self.metrics_max = metrics_max - self.metrics_min = metrics_min - self.metrics_operation = metrics_operation - self.metrics_query = metrics_query - self.mri = mri self.offset = 0 self.limit = limit self.breakdown_slices = breakdown_slices @@ -499,76 +470,8 @@ def get_traces_matching_conditions( self, snuba_params: SnubaParams, ) -> tuple[datetime, datetime, list[str]]: - if self.mri is not None: - sentry_sdk.set_tag("mri", self.mri) - return self.get_traces_matching_metric_conditions(snuba_params) - return self.get_traces_matching_span_conditions(snuba_params) - def get_traces_matching_metric_conditions( - self, - snuba_params: SnubaParams, - ) -> tuple[datetime, datetime, list[str]]: - assert self.mri is not None - - executor_cls = get_sample_list_executor_cls(self.mri) - if executor_cls is None: - raise ParseError(detail=f"Unsupported MRI: {self.mri}") - - executor = executor_cls( - mri=self.mri, - snuba_params=snuba_params, - fields=["trace"], - max=self.metrics_max, - min=self.metrics_min, - operation=self.metrics_operation, - query=self.metrics_query, - referrer=Referrer.API_TRACE_EXPLORER_METRICS_SPANS_LIST, - ) - - trace_ids, timestamps = executor.get_matching_traces(MAX_SNUBA_RESULTS) - - min_timestamp = snuba_params.end - max_timestamp = snuba_params.start - assert min_timestamp is not None - assert max_timestamp is not None - - for timestamp in timestamps: - min_timestamp = min(min_timestamp, timestamp) - max_timestamp = max(max_timestamp, timestamp) - - if not trace_ids or min_timestamp > max_timestamp: - return min_timestamp, max_timestamp, [] - - self.refine_params(min_timestamp, max_timestamp) - - if self.user_queries: - # If there are user queries, further refine the trace ids by applying them - # leaving us with only traces where the metric exists and matches the user - # queries. - ( - min_timestamp, - max_timestamp, - trace_ids, - ) = self.get_traces_matching_span_conditions_in_traces(snuba_params, trace_ids) - - if not trace_ids: - return min_timestamp, max_timestamp, [] - else: - # No user queries so take the first N trace ids as our list - min_timestamp = snuba_params.end - max_timestamp = snuba_params.start - assert min_timestamp is not None - assert max_timestamp is not None - - trace_ids = trace_ids[: self.limit] - timestamps = timestamps[: self.limit] - for timestamp in timestamps: - min_timestamp = min(min_timestamp, timestamp) - max_timestamp = max(max_timestamp, timestamp) - - return min_timestamp, max_timestamp, trace_ids - def get_traces_matching_span_conditions( self, snuba_params: SnubaParams, @@ -947,7 +850,8 @@ def get_trace_info(trace: str) -> tuple[str, str, float] | tuple[None, None, Non "numErrors": traces_errors.get(row["trace"], 0), "numOccurrences": traces_occurrences.get(row["trace"], 0), "matchingSpans": row[MATCHING_COUNT_ALIAS], - "numSpans": row["count()"], + # In EAP mode, we have to use `count_sample()` to avoid extrapolation + "numSpans": row.get("count()") or row.get("count_sample()") or 0, "project": info[0], "name": info[1], "rootDuration": info[2], @@ -1066,7 +970,7 @@ def get_traces_metas_query_eap( query=None, selected_columns=[ "trace", - "count()", + "count_sample()", "first_seen()", "last_seen()", ], @@ -1227,74 +1131,30 @@ def __init__( fields: list[str], user_queries: list[str], sort: list[str] | None, - metrics_max: float | None, - metrics_min: float | None, - metrics_operation: str | None, - metrics_query: str | None, - mri: str | None, ): self.dataset = dataset self.snuba_params = snuba_params self.trace_id = trace_id self.fields = fields self.user_queries = process_user_queries(snuba_params, user_queries, dataset) - self.metrics_max = metrics_max - self.metrics_min = metrics_min - self.metrics_operation = metrics_operation - self.metrics_query = metrics_query - self.mri = mri self.sort = sort def execute(self, offset: int, limit: int): with handle_span_query_errors(): - span_keys = self.get_metrics_span_keys() - - with handle_span_query_errors(): - spans = self.get_user_spans( + return self.get_user_spans( self.snuba_params, - span_keys, offset=offset, limit=limit, ) - return spans - - def get_metrics_span_keys(self) -> list[SpanKey] | None: - if self.mri is None: - return None - - executor_cls = get_sample_list_executor_cls(self.mri) - if executor_cls is None: - raise ParseError(detail=f"Unsupported MRI: {self.mri}") - - executor = executor_cls( - mri=self.mri, - snuba_params=self.snuba_params, - fields=["trace"], - max=self.metrics_max, - min=self.metrics_min, - operation=self.metrics_operation, - query=self.metrics_query, - referrer=Referrer.API_TRACE_EXPLORER_METRICS_SPANS_LIST, - ) - - span_keys = executor.get_matching_spans_from_traces( - [self.trace_id], - MAX_SNUBA_RESULTS, - ) - - return span_keys - def get_user_spans( self, snuba_params: SnubaParams, - span_keys: list[SpanKey] | None, limit: int, offset: int, ): user_spans_query = self.get_user_spans_query( snuba_params, - span_keys, limit=limit, offset=offset, ) @@ -1312,15 +1172,12 @@ def get_user_spans( def get_user_spans_query( self, snuba_params: SnubaParams, - span_keys: list[SpanKey] | None, limit: int, offset: int, ) -> BaseQueryBuilder: if self.dataset == Dataset.EventsAnalyticsPlatform: - # span_keys is not supported in EAP mode because that's a legacy - # code path to support metrics that no longer exists return self.get_user_spans_query_eap(snuba_params, limit, offset) - return self.get_user_spans_query_indexed(snuba_params, span_keys, limit, offset) + return self.get_user_spans_query_indexed(snuba_params, limit, offset) def get_user_spans_query_eap( self, @@ -1382,7 +1239,6 @@ def get_user_spans_query_eap( def get_user_spans_query_indexed( self, snuba_params: SnubaParams, - span_keys: list[SpanKey] | None, limit: int, offset: int, ) -> BaseQueryBuilder: @@ -1410,69 +1266,30 @@ def get_user_spans_query_indexed( conditions = [] - if span_keys is None: - # Next we have to turn the user queries into the appropriate conditions in - # the SnQL that we produce. + # Next we have to turn the user queries into the appropriate conditions in + # the SnQL that we produce. - # There are multiple sets of user conditions that needs to be satisfied - # and if a span satisfy any of them, it should be considered. - # - # To handle this use case, we want to OR all the user specified - # conditions together in this query. - for where in user_conditions: - if len(where) > 1: - conditions.append(BooleanCondition(op=BooleanOp.AND, conditions=where)) - elif len(where) == 1: - conditions.append(where[0]) - - if len(conditions) > 1: - # More than 1 set of conditions were specified, we want to show - # spans that match any 1 of them so join the conditions with `OR`s. - user_spans_query.add_conditions( - [BooleanCondition(op=BooleanOp.OR, conditions=conditions)] - ) - elif len(conditions) == 1: - # Only 1 set of user conditions were specified, simply insert them into - # the final query. - user_spans_query.add_conditions([conditions[0]]) - else: - # Next if there are known span_keys, we only try to fetch those spans - # This are the additional conditions to better take advantage of the ORDER BY - # on the spans table. This creates a list of conditions to be `OR`ed together - # that can will be used by ClickHouse to narrow down the granules. - # - # The span ids are not in this condition because they are more effective when - # specified within the `PREWHERE` clause. So, it's in a separate condition. - conditions = [ - And( - [ - Condition(user_spans_query.column("span.group"), Op.EQ, key.group), - Condition( - user_spans_query.column("timestamp"), - Op.EQ, - datetime.fromisoformat(key.timestamp), - ), - ] - ) - for key in span_keys - ] + # There are multiple sets of user conditions that needs to be satisfied + # and if a span satisfy any of them, it should be considered. + # + # To handle this use case, we want to OR all the user specified + # conditions together in this query. + for where in user_conditions: + if len(where) > 1: + conditions.append(BooleanCondition(op=BooleanOp.AND, conditions=where)) + elif len(where) == 1: + conditions.append(where[0]) - if len(conditions) == 1: - order_by_condition = conditions[0] - else: - order_by_condition = Or(conditions) - - # Using `IN` combined with putting the list in a SnQL "tuple" triggers an optimizer - # in snuba where it - # 1. moves the condition into the `PREWHERE` clause - # 2. maps the ids to the underlying UInt64 and uses the bloom filter index - span_id_condition = Condition( - user_spans_query.column("id"), - Op.IN, - Function("tuple", [key.span_id for key in span_keys]), + if len(conditions) > 1: + # More than 1 set of conditions were specified, we want to show + # spans that match any 1 of them so join the conditions with `OR`s. + user_spans_query.add_conditions( + [BooleanCondition(op=BooleanOp.OR, conditions=conditions)] ) - - user_spans_query.add_conditions([order_by_condition, span_id_condition]) + elif len(conditions) == 1: + # Only 1 set of user conditions were specified, simply insert them into + # the final query. + user_spans_query.add_conditions([conditions[0]]) return user_spans_query diff --git a/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py b/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py deleted file mode 100644 index b3bd015ccb08d5..00000000000000 --- a/src/sentry/api/endpoints/organization_transaction_anomaly_detection.py +++ /dev/null @@ -1,143 +0,0 @@ -from collections import namedtuple -from datetime import datetime, timedelta, timezone - -import orjson -from django.conf import settings -from rest_framework.request import Request -from rest_framework.response import Response -from urllib3 import Retry - -from sentry import features -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases import OrganizationEventsEndpointBase -from sentry.api.utils import get_date_range_from_params, handle_query_errors -from sentry.net.http import connection_from_url -from sentry.snuba.metrics_enhanced_performance import timeseries_query - -ads_connection_pool = connection_from_url( - settings.SEER_ANOMALY_DETECTION_URL, - retries=Retry( - total=5, - status_forcelist=[408, 429, 502, 503, 504], - ), - timeout=settings.SEER_ANOMALY_DETECTION_TIMEOUT, -) - -MappedParams = namedtuple("MappedParams", ["query_start", "query_end", "granularity"]) - - -def get_anomalies(snuba_io): - response = ads_connection_pool.urlopen( - "POST", - "/anomaly/predict", - body=orjson.dumps(snuba_io, option=orjson.OPT_UTC_Z), - headers={"content-type": "application/json;charset=utf-8"}, - ) - return Response(orjson.loads(response.data), status=200) - - -def get_time_params(start: datetime, end: datetime) -> MappedParams: - """ - Takes visualization start/end timestamps - and returns the start/end/granularity - of the snuba query that we should execute - Attributes: - start: datetime representing start of visualization window - end: datetime representing end of visualization window - Returns: - results: namedtuple containing - query_start: datetime representing start of query window - query_end: datetime representing end of query window - granularity: granularity to use (in seconds) - """ - anomaly_detection_range = end - start - - if anomaly_detection_range > timedelta(days=14): - snuba_range = timedelta(days=90) - granularity = 3600 - - elif anomaly_detection_range > timedelta(days=1): - granularity = 1200 - snuba_range = timedelta(days=28) - - else: - snuba_range = timedelta(days=14) - granularity = 600 - - additional_time_needed = snuba_range - anomaly_detection_range - now = datetime.now(timezone.utc) - start_limit = now - timedelta(days=90) - end_limit = now - start = max(start, start_limit) - end = min(end, end_limit) - # By default, expand windows equally in both directions - window_increase = additional_time_needed / 2 - query_start, query_end = None, None - - # If window will go back farther than 90 days, use today - 90 as start - if start - window_increase < start_limit: - query_start = now - timedelta(days=90) - additional_time_needed -= start - query_start - window_increase = additional_time_needed - # If window extends beyond today, use today as end - if end + window_increase > end_limit: - query_end = now - additional_time_needed -= query_end - end - window_increase = additional_time_needed - - query_start = query_start or max(start - window_increase, start_limit) - query_end = query_end or min(end + window_increase, end_limit) - - return MappedParams( - query_start, - query_end, - granularity, - ) - - -@region_silo_endpoint -class OrganizationTransactionAnomalyDetectionEndpoint(OrganizationEventsEndpointBase): - publish_status = { - "GET": ApiPublishStatus.PRIVATE, - } - - def has_feature(self, organization, request): - return features.has( - "organizations:performance-anomaly-detection-ui", organization, actor=request.user - ) - - def get(self, request: Request, organization) -> Response: - if not self.has_feature(organization, request): - return Response(status=404) - - start, end = get_date_range_from_params(request.GET) - time_params = get_time_params(start, end) - snuba_params = self.get_snuba_params(request, organization) - query = request.GET.get("query") - query = f"{query} event.type:transaction" if query else "event.type:transaction" - - datetime_format = "%Y-%m-%d %H:%M:%S" - ads_request = { - "query": query, - "start": start.strftime(datetime_format), - "end": end.strftime(datetime_format), - "granularity": time_params.granularity, - } - - # overwrite relevant time params - snuba_params.start = time_params.query_start - snuba_params.end = time_params.query_end - - with handle_query_errors(): - snuba_response = timeseries_query( - selected_columns=["count()"], - query=query, - snuba_params=snuba_params, - rollup=time_params.granularity, - referrer="transaction-anomaly-detection", - zerofill_results=False, - ) - ads_request["data"] = snuba_response.data["data"] - - return get_anomalies(ads_request) diff --git a/src/sentry/api/endpoints/organization_user_rollback.py b/src/sentry/api/endpoints/organization_user_rollback.py index ac6a7120fe792e..2441357b07e978 100644 --- a/src/sentry/api/endpoints/organization_user_rollback.py +++ b/src/sentry/api/endpoints/organization_user_rollback.py @@ -1,5 +1,4 @@ from rest_framework import status -from rest_framework.permissions import BasePermission from rest_framework.request import Request from rest_framework.response import Response @@ -16,19 +15,12 @@ from sentry.models.rollbackuser import RollbackUser -class MemberPermission(BasePermission): - scope_map = { - "GET": ["member:read"], - } - - @region_silo_endpoint class OrganizationRollbackUserEndpoint(OrganizationEndpoint): publish_status = { "GET": ApiPublishStatus.PRIVATE, } owner = ApiOwner.ISSUES - permission_classes = (MemberPermission,) def get(self, request: Request, organization: Organization) -> Response: """ diff --git a/src/sentry/api/endpoints/project_autofix_codebase_index_status.py b/src/sentry/api/endpoints/project_autofix_codebase_index_status.py deleted file mode 100644 index 7ed2bb1fde4365..00000000000000 --- a/src/sentry/api/endpoints/project_autofix_codebase_index_status.py +++ /dev/null @@ -1,32 +0,0 @@ -from __future__ import annotations - -import logging - -from rest_framework.response import Response - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases.project import ProjectEndpoint -from sentry.api.helpers.autofix import get_project_codebase_indexing_status -from sentry.models.project import Project - -logger = logging.getLogger(__name__) - -from rest_framework.request import Request - - -@region_silo_endpoint -class ProjectAutofixCodebaseIndexStatusEndpoint(ProjectEndpoint): - publish_status = { - "GET": ApiPublishStatus.EXPERIMENTAL, - } - owner = ApiOwner.ML_AI - - def get(self, request: Request, project: Project) -> Response: - """ - Create a codebase index for for a project's repositories, uses the code mapping to determine which repositories to index - """ - status = get_project_codebase_indexing_status(project) - - return Response({"status": status}, status=200) diff --git a/src/sentry/api/endpoints/project_autofix_create_codebase_index.py b/src/sentry/api/endpoints/project_autofix_create_codebase_index.py deleted file mode 100644 index c79dd86ef3e94d..00000000000000 --- a/src/sentry/api/endpoints/project_autofix_create_codebase_index.py +++ /dev/null @@ -1,72 +0,0 @@ -from __future__ import annotations - -import logging - -import orjson -import requests -from django.conf import settings -from rest_framework.response import Response - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases.project import ProjectEndpoint, ProjectPermission -from sentry.autofix.utils import get_autofix_repos_from_project_code_mappings -from sentry.models.project import Project -from sentry.seer.signed_seer_api import get_seer_salted_url, sign_with_seer_secret - -logger = logging.getLogger(__name__) - -from rest_framework.request import Request - - -class ProjectAutofixCreateCodebaseIndexPermission(ProjectPermission): - scope_map = { - # We might want to re-evaluate this for LA/EA whether a user needs to have write access to the project to create a codebase index (probably yes?) - "POST": ["project:read", "project:write", "project:admin"], - } - - -@region_silo_endpoint -class ProjectAutofixCreateCodebaseIndexEndpoint(ProjectEndpoint): - publish_status = { - "POST": ApiPublishStatus.EXPERIMENTAL, - } - owner = ApiOwner.ML_AI - - permission_classes = (ProjectAutofixCreateCodebaseIndexPermission,) - - def post(self, request: Request, project: Project) -> Response: - """ - Create a codebase index for for a project's repositories, uses the code mapping to determine which repositories to index - """ - repos = get_autofix_repos_from_project_code_mappings(project) - path = "/v1/automation/codebase/index/create" - - for repo in repos: - body = orjson.dumps( - { - "organization_id": project.organization.id, - "project_id": project.id, - "repo": repo, - } - ) - - url, salt = get_seer_salted_url(f"{settings.SEER_AUTOFIX_URL}{path}") - response = requests.post( - url, - data=body, - headers={ - "content-type": "application/json;charset=utf-8", - **sign_with_seer_secret( - salt, - body=body, - ), - }, - ) - - response.raise_for_status() - - return Response( - status=202, - ) diff --git a/src/sentry/api/endpoints/project_details.py b/src/sentry/api/endpoints/project_details.py index 343aa481007697..62150f916098bb 100644 --- a/src/sentry/api/endpoints/project_details.py +++ b/src/sentry/api/endpoints/project_details.py @@ -752,8 +752,12 @@ def put(self, request: Request, project) -> Response: if project.update_option("sentry:origins", result["allowedDomains"]): changed_proj_settings["sentry:origins"] = result["allowedDomains"] if result.get("targetSampleRate") is not None: - if project.update_option("sentry:target_sample_rate", result["targetSampleRate"]): - changed_proj_settings["sentry:target_sample_rate"] = result["targetSampleRate"] + if project.update_option( + "sentry:target_sample_rate", round(result["targetSampleRate"], 4) + ): + changed_proj_settings["sentry:target_sample_rate"] = round( + result["targetSampleRate"], 4 + ) if "dynamicSamplingBiases" in result: updated_biases = get_user_biases(user_set_biases=result["dynamicSamplingBiases"]) if project.update_option("sentry:dynamic_sampling_biases", updated_biases): diff --git a/src/sentry/api/endpoints/project_filters.py b/src/sentry/api/endpoints/project_filters.py index 899e28cac08d13..afeddac6afd1cb 100644 --- a/src/sentry/api/endpoints/project_filters.py +++ b/src/sentry/api/endpoints/project_filters.py @@ -1,26 +1,51 @@ +from typing import TypedDict + +from drf_spectacular.utils import extend_schema from rest_framework.request import Request from rest_framework.response import Response +from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.project import ProjectEndpoint +from sentry.apidocs.constants import RESPONSE_FORBIDDEN +from sentry.apidocs.examples.project_examples import ProjectExamples +from sentry.apidocs.parameters import GlobalParams +from sentry.apidocs.utils import inline_sentry_response_serializer from sentry.ingest import inbound_filters +class ProjectFilterResponse(TypedDict): + id: str + active: bool | list[str] + + @region_silo_endpoint +@extend_schema(tags=["Projects"]) class ProjectFiltersEndpoint(ProjectEndpoint): + owner = ApiOwner.UNOWNED publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PUBLIC, } + @extend_schema( + operation_id="List a Project's Data Filters", + parameters=[ + GlobalParams.ORG_ID_OR_SLUG, + GlobalParams.PROJECT_ID_OR_SLUG, + ], + responses={ + 200: inline_sentry_response_serializer( + "ProjectFilterResponse", list[ProjectFilterResponse] + ), + 403: RESPONSE_FORBIDDEN, + }, + examples=ProjectExamples.GET_PROJECT_FILTERS, + ) def get(self, request: Request, project) -> Response: """ - List a project's filters - Retrieve a list of filters for a given project. - - {method} {path} - + `active` will be either a boolean or a list for the legacy browser filters. """ results = [] for flt in inbound_filters.get_all_filter_specs(): diff --git a/src/sentry/api/endpoints/project_rule_actions.py b/src/sentry/api/endpoints/project_rule_actions.py index 73ba633945a4fd..9463d252144670 100644 --- a/src/sentry/api/endpoints/project_rule_actions.py +++ b/src/sentry/api/endpoints/project_rule_actions.py @@ -5,7 +5,6 @@ from rest_framework.request import Request from rest_framework.response import Response -from sentry import features from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint @@ -15,7 +14,6 @@ from sentry.models.rule import Rule from sentry.rules.processing.processor import activate_downstream_actions from sentry.shared_integrations.exceptions import IntegrationFormError -from sentry.utils.safe import safe_execute from sentry.utils.samples import create_sample_event @@ -66,15 +64,7 @@ def post(self, request: Request, project) -> Response: project, platform=project.platform, default="javascript", tagged=True ) - if features.has( - "projects:verbose-test-alert-reporting", project=project, actor=request.user - ): - return self.execute_future_on_test_event(test_event, rule) - else: - # Old existing behavior to wrap this in a handler which buries exceptions - for callback, futures in activate_downstream_actions(rule, test_event).values(): - safe_execute(callback, test_event, futures) - return Response() + return self.execute_future_on_test_event(test_event, rule) def execute_future_on_test_event( self, diff --git a/src/sentry/api/endpoints/relocations/index.py b/src/sentry/api/endpoints/relocations/index.py index b493fde70bc72c..c6cacef17f791d 100644 --- a/src/sentry/api/endpoints/relocations/index.py +++ b/src/sentry/api/endpoints/relocations/index.py @@ -140,11 +140,18 @@ def validate_relocation_uniqueness(owner: RpcUser | AnonymousUser | User) -> Res return None -def get_autopause_value() -> int | None: +def get_autopause_value(provenance: Relocation.Provenance) -> int | None: try: - return Relocation.Step[options.get("relocation.autopause")].value + return Relocation.Step[options.get(f"relocation.autopause.{str(provenance)}")].value except KeyError: - return None + # DEPRECATED: for now, we fall through to the old `relocation.autopause` if the more + # specific `relocation.autopause.*` does not exist OR is set to the default, an empty + # string. Once we remove the old setting, this block can go away, and we can use the mre + # specific autopause only. + try: + return Relocation.Step[options.get("relocation.autopause")].value + except KeyError: + return None @region_silo_endpoint @@ -263,12 +270,14 @@ def post(self, request: Request) -> Response: with atomic_transaction( using=(router.db_for_write(Relocation), router.db_for_write(RelocationFile)) ): + provenance = Relocation.Provenance.SELF_HOSTED relocation: Relocation = Relocation.objects.create( creator_id=request.user.id, owner_id=owner.id, want_org_slugs=org_slugs, step=Relocation.Step.UPLOADING.value, - scheduled_pause_at_step=get_autopause_value(), + scheduled_pause_at_step=get_autopause_value(provenance), + provenance=provenance, ) RelocationFile.objects.create( relocation=relocation, diff --git a/src/sentry/api/endpoints/relocations/retry.py b/src/sentry/api/endpoints/relocations/retry.py index 4f80e14b5d5505..1986bc3c015c38 100644 --- a/src/sentry/api/endpoints/relocations/retry.py +++ b/src/sentry/api/endpoints/relocations/retry.py @@ -111,7 +111,10 @@ def post(self, request: Request, relocation_uuid: str) -> Response: owner_id=relocation.owner_id, want_org_slugs=relocation.want_org_slugs, step=Relocation.Step.UPLOADING.value, - scheduled_pause_at_step=get_autopause_value(), + scheduled_pause_at_step=get_autopause_value( + Relocation.Provenance(relocation.provenance) + ), + provenance=relocation.provenance, ) relocation_retry_link_promo_code.send_robust( diff --git a/src/sentry/api/endpoints/seer_rpc.py b/src/sentry/api/endpoints/seer_rpc.py index 8ad06295ed6c8a..7cedd5e2b31d22 100644 --- a/src/sentry/api/endpoints/seer_rpc.py +++ b/src/sentry/api/endpoints/seer_rpc.py @@ -153,7 +153,7 @@ def get_organization_slug(*, org_id: int) -> dict: def get_organization_autofix_consent(*, org_id: int) -> dict: org: Organization = Organization.objects.get(id=org_id) - consent = org.get_option("sentry:gen_ai_consent", False) + consent = org.get_option("sentry:gen_ai_consent_v2024_11_14", False) github_extension_enabled = org_id in options.get("github-extension.enabled-orgs") return { "consent": consent or github_extension_enabled, diff --git a/src/sentry/api/endpoints/team_projects.py b/src/sentry/api/endpoints/team_projects.py index c13586e89d3c22..697c447e00b047 100644 --- a/src/sentry/api/endpoints/team_projects.py +++ b/src/sentry/api/endpoints/team_projects.py @@ -13,6 +13,7 @@ from sentry.api.bases.team import TeamEndpoint, TeamPermission from sentry.api.fields.sentry_slug import SentrySerializerSlugField from sentry.api.helpers.default_inbound_filters import set_default_inbound_filters +from sentry.api.helpers.default_symbol_sources import set_default_symbol_sources from sentry.api.paginator import OffsetPaginator from sentry.api.serializers import ProjectSummarySerializer, serialize from sentry.api.serializers.models.project import OrganizationProjectResponse, ProjectSerializer @@ -203,6 +204,8 @@ def post(self, request: Request, team: Team) -> Response: if project.platform and project.platform.startswith("javascript"): set_default_inbound_filters(project, team.organization) + set_default_symbol_sources(project) + self.create_audit_entry( request=request, organization=team.organization, diff --git a/src/sentry/api/endpoints/user_notification_settings_options.py b/src/sentry/api/endpoints/user_notification_settings_options.py index a381d1eee7858f..c72a7b778c23ad 100644 --- a/src/sentry/api/endpoints/user_notification_settings_options.py +++ b/src/sentry/api/endpoints/user_notification_settings_options.py @@ -8,7 +8,7 @@ from sentry.api.exceptions import ParameterValidationError from sentry.api.serializers import serialize from sentry.api.validators.notifications import validate_type -from sentry.models.notificationsettingoption import NotificationSettingOption +from sentry.notifications.models.notificationsettingoption import NotificationSettingOption from sentry.notifications.serializers import NotificationSettingsOptionSerializer from sentry.notifications.validators import UserNotificationSettingOptionWithValueSerializer from sentry.users.api.bases.user import UserEndpoint diff --git a/src/sentry/api/endpoints/user_notification_settings_options_detail.py b/src/sentry/api/endpoints/user_notification_settings_options_detail.py index 53e73e039e850d..e7f46bcca4207c 100644 --- a/src/sentry/api/endpoints/user_notification_settings_options_detail.py +++ b/src/sentry/api/endpoints/user_notification_settings_options_detail.py @@ -6,7 +6,7 @@ from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import control_silo_endpoint -from sentry.models.notificationsettingoption import NotificationSettingOption +from sentry.notifications.models.notificationsettingoption import NotificationSettingOption from sentry.users.api.bases.user import UserEndpoint from sentry.users.models.user import User diff --git a/src/sentry/api/endpoints/user_notification_settings_providers.py b/src/sentry/api/endpoints/user_notification_settings_providers.py index dff386dae28821..ac5184fc109bf3 100644 --- a/src/sentry/api/endpoints/user_notification_settings_providers.py +++ b/src/sentry/api/endpoints/user_notification_settings_providers.py @@ -10,7 +10,7 @@ from sentry.api.serializers import serialize from sentry.api.validators.notifications import validate_type from sentry.integrations.types import PERSONAL_NOTIFICATION_PROVIDERS -from sentry.models.notificationsettingprovider import NotificationSettingProvider +from sentry.notifications.models.notificationsettingprovider import NotificationSettingProvider from sentry.notifications.serializers import NotificationSettingsProviderSerializer from sentry.notifications.types import NotificationSettingsOptionEnum from sentry.notifications.validators import UserNotificationSettingsProvidersDetailsSerializer diff --git a/src/sentry/api/exceptions.py b/src/sentry/api/exceptions.py index bc33cc12997bdd..2e5689a148aacd 100644 --- a/src/sentry/api/exceptions.py +++ b/src/sentry/api/exceptions.py @@ -50,16 +50,6 @@ def __init__(self, message: str, context: list[str] | None = None) -> None: super().__init__(message=message, context=".".join(context or [])) -class ProjectMoved(SentryAPIException): - status_code = status.HTTP_302_FOUND - # code/message currently don't get used - code = "resource-moved" - message = "Resource has been moved" - - def __init__(self, new_url, slug): - super().__init__(url=new_url, slug=slug) - - class SsoRequired(SentryAPIException): status_code = status.HTTP_401_UNAUTHORIZED code = "sso-required" diff --git a/src/sentry/api/helpers/autofix.py b/src/sentry/api/helpers/autofix.py deleted file mode 100644 index 3d6e8f51057f8d..00000000000000 --- a/src/sentry/api/helpers/autofix.py +++ /dev/null @@ -1,58 +0,0 @@ -import enum - -import orjson -import requests -from django.conf import settings - -from sentry.autofix.utils import get_autofix_repos_from_project_code_mappings -from sentry.seer.signed_seer_api import get_seer_salted_url, sign_with_seer_secret - - -class AutofixCodebaseIndexingStatus(str, enum.Enum): - UP_TO_DATE = "up_to_date" - INDEXING = "indexing" - NOT_INDEXED = "not_indexed" - - -def get_project_codebase_indexing_status(project): - repos = get_autofix_repos_from_project_code_mappings(project) - - if not repos: - return None - - statuses = [] - path = "/v1/automation/codebase/index/status" - for repo in repos: - body = orjson.dumps( - { - "organization_id": project.organization.id, - "project_id": project.id, - "repo": repo, - }, - option=orjson.OPT_UTC_Z, - ) - - url, salt = get_seer_salted_url(f"{settings.SEER_AUTOFIX_URL}{path}") - response = requests.post( - url, - data=body, - headers={ - "content-type": "application/json;charset=utf-8", - **sign_with_seer_secret( - salt, - body=body, - ), - }, - ) - - response.raise_for_status() - - statuses.append(response.json()["status"]) - - if any(status == AutofixCodebaseIndexingStatus.NOT_INDEXED for status in statuses): - return AutofixCodebaseIndexingStatus.NOT_INDEXED - - if any(status == AutofixCodebaseIndexingStatus.INDEXING for status in statuses): - return AutofixCodebaseIndexingStatus.INDEXING - - return AutofixCodebaseIndexingStatus.UP_TO_DATE diff --git a/src/sentry/api/helpers/default_symbol_sources.py b/src/sentry/api/helpers/default_symbol_sources.py new file mode 100644 index 00000000000000..4615d9fd6cf443 --- /dev/null +++ b/src/sentry/api/helpers/default_symbol_sources.py @@ -0,0 +1,14 @@ +from sentry.models.project import Project +from sentry.projects.services.project import RpcProject + +DEFAULT_SYMBOL_SOURCES = { + "electron": ["ios", "microsoft", "electron"], + "javascript-electron": ["ios", "microsoft", "electron"], +} + + +def set_default_symbol_sources(project: Project | RpcProject): + if project.platform and project.platform in DEFAULT_SYMBOL_SOURCES: + project.update_option( + "sentry:builtin_symbol_sources", DEFAULT_SYMBOL_SOURCES[project.platform] + ) diff --git a/src/sentry/api/helpers/group_index/update.py b/src/sentry/api/helpers/group_index/update.py index 1806baea8be1d4..a95ebdfdbf8149 100644 --- a/src/sentry/api/helpers/group_index/update.py +++ b/src/sentry/api/helpers/group_index/update.py @@ -4,12 +4,13 @@ import re from collections import defaultdict from collections.abc import Mapping, MutableMapping, Sequence -from typing import Any +from typing import Any, NotRequired, TypedDict from urllib.parse import urlparse import rest_framework +from django.contrib.auth.models import AnonymousUser from django.db import IntegrityError, router, transaction -from django.db.models import Q +from django.db.models import Q, QuerySet from django.db.models.signals import post_save from django.utils import timezone as django_timezone from rest_framework import serializers @@ -24,11 +25,12 @@ from sentry.integrations.tasks.kick_off_status_syncs import kick_off_status_syncs from sentry.issues.grouptype import GroupCategory from sentry.issues.ignored import handle_archived_until_escalating, handle_ignored -from sentry.issues.merge import handle_merge +from sentry.issues.merge import MergedGroup, handle_merge from sentry.issues.priority import update_priority from sentry.issues.status_change import handle_status_update, infer_substatus from sentry.issues.update_inbox import update_inbox from sentry.models.activity import Activity, ActivityIntegration +from sentry.models.commit import Commit from sentry.models.group import STATUS_UPDATE_CHOICES, Group, GroupStatus from sentry.models.groupassignee import GroupAssignee from sentry.models.groupbookmark import GroupBookmark @@ -36,7 +38,6 @@ from sentry.models.grouphistory import record_group_history_from_activity_type from sentry.models.groupinbox import GroupInboxRemoveAction, remove_group_from_inbox from sentry.models.grouplink import GroupLink -from sentry.models.grouprelease import GroupRelease from sentry.models.groupresolution import GroupResolution from sentry.models.groupseen import GroupSeen from sentry.models.groupshare import GroupShare @@ -61,11 +62,23 @@ logger = logging.getLogger(__name__) +class MultipleProjectsError(Exception): + pass + + +class ResolutionParams(TypedDict): + release: Release + type: int | None + status: int | None + actor_id: int | None + current_release_version: NotRequired[str] + + def handle_discard( request: Request, group_list: Sequence[Group], projects: Sequence[Project], - user: User, + user, ) -> Response: for project in projects: if not features.has("projects:discard-groups", project, actor=user): @@ -106,12 +119,14 @@ def handle_discard( def self_subscribe_and_assign_issue( - acting_user: User | RpcUser | None, group: Group, self_assign_issue: str + acting_user, group: Group, self_assign_issue: str ) -> Actor | None: - # Used during issue resolution to assign to acting user - # returns None if the user didn't elect to self assign on resolution - # or the group is assigned already, otherwise returns Actor - # representation of current user + """ + Used during issue resolution to assign to acting user + returns None if the user didn't elect to self assign on resolution + or the group is assigned already, otherwise returns Actor + representation of current user + """ if acting_user: GroupSubscription.objects.subscribe( subscriber=acting_user, group=group, reason=GroupSubscriptionReason.status_change @@ -122,9 +137,7 @@ def self_subscribe_and_assign_issue( return None -def get_current_release_version_of_group( - group: Group, follows_semver: bool = False -) -> Release | None: +def get_current_release_version_of_group(group: Group, follows_semver: bool = False) -> str | None: """ Function that returns the latest release version associated with a Group, and by latest we mean either most recent (date) or latest in semver versioning scheme @@ -137,23 +150,9 @@ def get_current_release_version_of_group( """ current_release_version = None if follows_semver: - try: - # This sets current_release_version to the latest semver version associated with a group - order_by_semver_desc = [f"-{col}" for col in Release.SEMVER_COLS] - current_release_version = ( - Release.objects.filter_to_semver() - .filter( - id__in=GroupRelease.objects.filter( - project_id=group.project.id, group_id=group.id - ).values_list("release_id"), - ) - .annotate_prerelease_column() - .order_by(*order_by_semver_desc) - .values_list("version", flat=True)[:1] - .get() - ) - except Release.DoesNotExist: - pass + release = greatest_semver_release(group.project) + if release is not None: + current_release_version = release.version else: # This sets current_release_version to the most recent release associated with a group # In order to be able to do that, `use_cache` has to be set to False. Otherwise, @@ -169,26 +168,103 @@ def update_groups( group_ids: Sequence[int | str] | None, projects: Sequence[Project], organization_id: int, - search_fn: SearchFunction | None, - user: RpcUser | User | None = None, + search_fn: SearchFunction | None = None, + user: RpcUser | User | AnonymousUser | None = None, data: Mapping[str, Any] | None = None, ) -> Response: # If `user` and `data` are passed as parameters then they should override # the values in `request`. user = user or request.user + acting_user = user if user and user.is_authenticated else None data = data or request.data - if group_ids: - group_list = Group.objects.filter( - project__organization_id=organization_id, project__in=projects, id__in=group_ids + try: + group_ids, group_list = get_group_ids_and_group_list( + organization_id, projects, group_ids, search_fn ) - # filter down group ids to only valid matches - group_ids = [g.id for g in group_list] - if not group_ids: - return Response(status=204) - else: - group_list = None + except ValidationError: + logger.exception("Error getting group ids and group list") # Track the error in Sentry + return Response( + {"detail": "Invalid query. Error getting group ids and group list"}, status=400 + ) + + if not group_ids or not group_list: + return Response({"detail": "No groups found"}, status=204) + + serializer = validate_request(request, projects, data) + + if serializer is None: + logger.error("Error validating request. Investigate.") + return Response(status=500) + + result = dict(serializer.validated_data) + + acting_user = user if user.is_authenticated else None + + # so we won't have to requery for each group + project_lookup = {g.project_id: g.project for g in group_list} + group_project_ids = {g.project_id for g in group_list} + # filter projects down to only those that have groups in the search results + projects = [p for p in projects if p.id in group_project_ids] + queryset = Group.objects.filter(id__in=group_ids) + + discard = result.get("discard") + if discard: + return handle_discard(request, list(queryset), projects, acting_user) + + status_details = result.pop("statusDetails", result) + status = result.get("status") + res_type = None + if "priority" in result: + handle_priority( + priority=result["priority"], + group_list=group_list, + acting_user=acting_user, + project_lookup=project_lookup, + ) + if status in ("resolved", "resolvedInNextRelease"): + try: + result, res_type = handle_resolve_in_release( + status, + status_details, + group_list, + projects, + project_lookup, + acting_user, + user, + result, + ) + except MultipleProjectsError: + return Response({"detail": "Cannot set resolved for multiple projects."}, status=400) + elif status: + result = handle_other_status_updates( + result, + group_list, + projects, + project_lookup, + status_details, + acting_user, + user, + ) + + return prepare_response( + result, + group_list, + project_lookup, + projects, + acting_user, + data, + res_type, + request.META.get("HTTP_REFERER", ""), + ) + + +def validate_request( + request: Request, + projects: Sequence[Project], + data: Mapping[str, Any], +) -> GroupValidator | None: serializer = None # TODO(jess): We may want to look into refactoring GroupValidator # to support multiple projects, but this is pretty complicated @@ -205,16 +281,74 @@ def update_groups( ) if not serializer.is_valid(): raise serializers.ValidationError(serializer.errors) + return serializer - if serializer is None: - return - result = dict(serializer.validated_data) +def get_group_ids_and_group_list( + organization_id: int, + projects: Sequence[Project], + group_ids: Sequence[int | str] | None, + search_fn: SearchFunction | None, +) -> tuple[list[int | str], list[Group]]: + """ + Gets group IDs and group list based on provided filters. - # so we won't have to requery for each group - project_lookup = {p.id: p for p in projects} + Args: + organization_id: ID of the organization + projects: Sequence of projects to filter groups by + group_ids: Optional sequence of specific group IDs to fetch + search_fn: Optional search function to find groups if no IDs provided - acting_user = user if user.is_authenticated else None + Returns: + Tuple of: + - List of group IDs that were found + - List of Group objects that were found + + Notes: + - If group_ids provided, filters to only valid groups in the org/projects + - If no group_ids but search_fn provided, uses search to find groups + - Limited to BULK_MUTATION_LIMIT results when using search + """ + _group_ids: list[int | str] = [] + _group_list: list[Group] = [] + + if group_ids: + _group_list = list( + Group.objects.filter( + project__organization_id=organization_id, project__in=projects, id__in=group_ids + ) + ) + # filter down group ids to only valid matches + _group_ids = [g.id for g in _group_list] + + if search_fn and not _group_ids: + # It can raise ValidationError + cursor_result, _ = search_fn( + { + "limit": BULK_MUTATION_LIMIT, + "paginator_options": {"max_limit": BULK_MUTATION_LIMIT}, + } + ) + + _group_list = list(cursor_result) + _group_ids = [g.id for g in _group_list] + + return _group_ids, _group_list + + +def handle_resolve_in_release( + status: str, + status_details: Mapping[str, Any], + group_list: Sequence[Group], + projects: Sequence[Project], + project_lookup: Mapping[int, Project], + acting_user, + user: RpcUser | User | AnonymousUser, + result: MutableMapping[str, Any], +) -> tuple[dict[str, Any], int | None]: + res_type = None + release = None + commit = None self_assign_issue = "0" if acting_user: user_options = user_option_service.get_many( @@ -222,396 +356,401 @@ def update_groups( ) if user_options: self_assign_issue = user_options[0].value - if search_fn and not group_ids: + res_status = None + if status == "resolvedInNextRelease" or status_details.get("inNextRelease"): + # TODO(jess): We may want to support this for multi project, but punting on it for now + if len(projects) > 1: + raise MultipleProjectsError() + # may not be a release yet + release = status_details.get("inNextRelease") or get_release_to_resolve_by(projects[0]) + + activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value + activity_data = { + # no version yet + "version": "" + } + + serialized_user = user_service.serialize_many(filter=dict(user_ids=[user.id]), as_user=user) + new_status_details = { + "inNextRelease": True, + } + if serialized_user: + new_status_details["actor"] = serialized_user[0] + res_type = GroupResolution.Type.in_next_release + res_type_str = "in_next_release" + res_status = GroupResolution.Status.pending + elif status_details.get("inUpcomingRelease"): + if len(projects) > 1: + raise MultipleProjectsError() + release = status_details.get("inUpcomingRelease") or most_recent_release(projects[0]) + activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value + activity_data = {"version": ""} + + serialized_user = user_service.serialize_many(filter=dict(user_ids=[user.id]), as_user=user) + new_status_details = { + "inUpcomingRelease": True, + } + if serialized_user: + new_status_details["actor"] = serialized_user[0] + res_type = GroupResolution.Type.in_upcoming_release + res_type_str = "in_upcoming_release" + res_status = GroupResolution.Status.pending + elif status_details.get("inRelease"): + # TODO(jess): We could update validation to check if release + # applies to multiple projects, but I think we agreed to punt + # on this for now + if len(projects) > 1: + raise MultipleProjectsError() + release = status_details["inRelease"] + activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value + activity_data = { + # no version yet + "version": release.version + } + + serialized_user = user_service.serialize_many(filter=dict(user_ids=[user.id]), as_user=user) + new_status_details = { + "inRelease": release.version, + } + if serialized_user: + new_status_details["actor"] = serialized_user[0] + res_type = GroupResolution.Type.in_release + res_type_str = "in_release" + res_status = GroupResolution.Status.resolved + elif status_details.get("inCommit"): + # TODO(jess): Same here, this is probably something we could do, but + # punting for now. + if len(projects) > 1: + raise MultipleProjectsError() + commit = status_details["inCommit"] + activity_type = ActivityType.SET_RESOLVED_IN_COMMIT.value + activity_data = {"commit": commit.id} + serialized_user = user_service.serialize_many(filter=dict(user_ids=[user.id]), as_user=user) + + new_status_details = { + "inCommit": serialize(commit, user), + } + if serialized_user: + new_status_details["actor"] = serialized_user[0] + res_type_str = "in_commit" + else: + res_type_str = "now" + activity_type = ActivityType.SET_RESOLVED.value + activity_data = {} + new_status_details = {} + + metrics.incr("group.resolved", instance=res_type_str, skip_internal=True) + + # if we've specified a commit, let's see if its already been released + # this will allow us to associate the resolution to a release as if we + # were simply using 'inRelease' above + # Note: this is different than the way commit resolution works on deploy + # creation, as a given deploy is connected to an explicit release, and + # in this case we're simply choosing the most recent release which contains + # the commit. + if commit and not release: + # TODO(jess): If we support multiple projects for release / commit resolution, + # we need to update this to find the release for each project (we shouldn't assume + # it's the same) try: - cursor_result, _ = search_fn( - { - "limit": BULK_MUTATION_LIMIT, - "paginator_options": {"max_limit": BULK_MUTATION_LIMIT}, - } + release = most_recent_release_matching_commit(projects, commit) + res_type = GroupResolution.Type.in_release + res_status = GroupResolution.Status.resolved + except IndexError: + release = None + for group in group_list: + with transaction.atomic(router.db_for_write(Group)): + process_group_resolution( + group, + group_list, + release, + commit, + res_type, + res_status, + acting_user, + user, + self_assign_issue, + activity_type, + activity_data, + result, ) - except ValidationError as exc: - return Response({"detail": str(exc)}, status=400) - - group_list = list(cursor_result) - group_ids = [g.id for g in group_list] - - is_bulk = len(group_ids) > 1 - - group_project_ids = {g.project_id for g in group_list} - # filter projects down to only those that have groups in the search results - projects = [p for p in projects if p.id in group_project_ids] - - queryset = Group.objects.filter(id__in=group_ids) - discard = result.get("discard") - if discard: - return handle_discard(request, list(queryset), projects, acting_user) + issue_resolved.send_robust( + organization_id=projects[0].organization_id, + user=(acting_user or user), + group=group, + project=project_lookup[group.project_id], + resolution_type=res_type_str, + sender=update_groups, + ) - status_details = result.pop("statusDetails", result) - status = result.get("status") - release = None - commit = None - res_type = None - activity_type = None - activity_data: MutableMapping[str, Any | None] | None = None - if "priority" in result: - handle_priority( - priority=result["priority"], - group_list=group_list, - actor=acting_user, - project_lookup=project_lookup, + kick_off_status_syncs.apply_async( + kwargs={"project_id": group.project_id, "group_id": group.id} ) - if status in ("resolved", "resolvedInNextRelease"): - res_status = None - if status == "resolvedInNextRelease" or status_details.get("inNextRelease"): - # TODO(jess): We may want to support this for multi project, but punting on it for now - if len(projects) > 1: - return Response( - {"detail": "Cannot set resolved in next release for multiple projects."}, - status=400, - ) - # may not be a release yet - release = status_details.get("inNextRelease") or get_latest_release(projects[0]) - activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value - activity_data = { - # no version yet - "version": "" - } + result.update({"status": "resolved", "statusDetails": new_status_details}) - serialized_user = user_service.serialize_many( - filter=dict(user_ids=[user.id]), as_user=user - ) - new_status_details = { - "inNextRelease": True, - } - if serialized_user: - new_status_details["actor"] = serialized_user[0] - res_type = GroupResolution.Type.in_next_release - res_type_str = "in_next_release" - res_status = GroupResolution.Status.pending - elif status_details.get("inUpcomingRelease"): - if len(projects) > 1: - return Response( - {"detail": "Cannot set resolved in upcoming release for multiple projects."}, - status=400, - ) - release = ( - status_details.get("inUpcomingRelease") - or Release.objects.filter( - projects=projects[0], organization_id=projects[0].organization_id - ) - .extra(select={"sort": "COALESCE(date_released, date_added)"}) - .order_by("-sort")[0] - ) - activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value - activity_data = {"version": ""} + return dict(result), res_type - serialized_user = user_service.serialize_many( - filter=dict(user_ids=[user.id]), as_user=user - ) - new_status_details = { - "inUpcomingRelease": True, - } - if serialized_user: - new_status_details["actor"] = serialized_user[0] - res_type = GroupResolution.Type.in_upcoming_release - res_type_str = "in_upcoming_release" - res_status = GroupResolution.Status.pending - elif status_details.get("inRelease"): - # TODO(jess): We could update validation to check if release - # applies to multiple projects, but I think we agreed to punt - # on this for now - if len(projects) > 1: - return Response( - {"detail": "Cannot set resolved in release for multiple projects."}, status=400 - ) - release = status_details["inRelease"] - activity_type = ActivityType.SET_RESOLVED_IN_RELEASE.value - activity_data = { - # no version yet - "version": release.version - } - serialized_user = user_service.serialize_many( - filter=dict(user_ids=[user.id]), as_user=user - ) - new_status_details = { - "inRelease": release.version, - } - if serialized_user: - new_status_details["actor"] = serialized_user[0] - res_type = GroupResolution.Type.in_release - res_type_str = "in_release" - res_status = GroupResolution.Status.resolved - elif status_details.get("inCommit"): - # TODO(jess): Same here, this is probably something we could do, but - # punting for now. - if len(projects) > 1: - return Response( - {"detail": "Cannot set resolved in commit for multiple projects."}, status=400 - ) - commit = status_details["inCommit"] - activity_type = ActivityType.SET_RESOLVED_IN_COMMIT.value - activity_data = {"commit": commit.id} - serialized_user = user_service.serialize_many( - filter=dict(user_ids=[user.id]), as_user=user +def process_group_resolution( + group: Group, + group_list: Sequence[Group], + release: Release | None, + commit: Commit | None, + res_type: int | None, + res_status: int | None, + acting_user, + user: RpcUser | User | AnonymousUser, + self_assign_issue: str, + activity_type: int, + activity_data: MutableMapping[str, Any], + result: MutableMapping[str, Any], +): + now = django_timezone.now() + resolution = None + created = None + if release: + # These are the parameters that are set for creating a GroupResolution + resolution_params: ResolutionParams = { + "release": release, + "type": res_type, + "status": res_status, + "actor_id": user.id if user and user.is_authenticated else None, + } + + # We only set `current_release_version` if GroupResolution type is + # in_next_release, because we need to store information about the latest/most + # recent release that was associated with a group and that is required for + # release comparisons (i.e. handling regressions) + if res_type == GroupResolution.Type.in_next_release: + # Check if semver versioning scheme is followed + follows_semver = follows_semver_versioning_scheme( + org_id=group.project.organization_id, + project_id=group.project_id, + release_version=release.version, ) - new_status_details = { - "inCommit": serialize(commit, user), - } - if serialized_user: - new_status_details["actor"] = serialized_user[0] - res_type_str = "in_commit" - else: - res_type_str = "now" - activity_type = ActivityType.SET_RESOLVED.value - activity_data = {} - new_status_details = {} - - now = django_timezone.now() - metrics.incr("group.resolved", instance=res_type_str, skip_internal=True) - - # if we've specified a commit, let's see if its already been released - # this will allow us to associate the resolution to a release as if we - # were simply using 'inRelease' above - # Note: this is different than the way commit resolution works on deploy - # creation, as a given deploy is connected to an explicit release, and - # in this case we're simply choosing the most recent release which contains - # the commit. - if commit and not release: - # TODO(jess): If we support multiple projects for release / commit resolution, - # we need to update this to find the release for each project (we shouldn't assume - # it's the same) - try: - release = ( - Release.objects.filter(projects__in=projects, releasecommit__commit=commit) - .extra(select={"sort": "COALESCE(date_released, date_added)"}) - .order_by("-sort")[0] - ) - res_type = GroupResolution.Type.in_release - res_status = GroupResolution.Status.resolved - except IndexError: - release = None - for group in group_list: - with transaction.atomic(router.db_for_write(Group)): - resolution = None - created = None - if release: - resolution_params = { - "release": release, - "type": res_type, - "status": res_status, - "actor_id": user.id if user.is_authenticated else None, - } - - # We only set `current_release_version` if GroupResolution type is - # in_next_release, because we need to store information about the latest/most - # recent release that was associated with a group and that is required for - # release comparisons (i.e. handling regressions) - if res_type == GroupResolution.Type.in_next_release: - # Check if semver versioning scheme is followed - follows_semver = follows_semver_versioning_scheme( - org_id=group.organization.id, - project_id=group.project.id, - release_version=release.version, + current_release_version = get_current_release_version_of_group(group, follows_semver) + + if current_release_version: + resolution_params.update({"current_release_version": current_release_version}) + + # Sets `current_release_version` for activity, since there is no point + # waiting for when a new release is created i.e. + # clear_expired_resolutions task to be run. + # Activity should look like "... resolved in version + # >current_release_version" in the UI + if follows_semver: + activity_data.update({"current_release_version": current_release_version}) + + # In semver projects, and thereby semver releases, we determine + # resolutions by comparing against an expression rather than a + # specific release (i.e. >current_release_version). Consequently, + # at this point we can consider this GroupResolution as resolved + # in release + resolution_params.update( + { + "type": GroupResolution.Type.in_release, + "status": GroupResolution.Status.resolved, + } + ) + else: + # If we already know the `next` release in date based ordering + # when clicking on `resolvedInNextRelease` because it is already + # been released, there is no point in setting GroupResolution to + # be of type in_next_release but rather in_release would suffice + + try: + # Get current release object from current_release_version + current_release_obj = Release.objects.get( + version=current_release_version, + organization_id=group.project.organization_id, ) - current_release_version = get_current_release_version_of_group( - group=group, follows_semver=follows_semver + date_order_q = Q(date_added__gt=current_release_obj.date_added) | Q( + date_added=current_release_obj.date_added, + id__gt=current_release_obj.id, ) - if current_release_version: - resolution_params.update( - {"current_release_version": current_release_version} + + # Find the next release after the current_release_version + # i.e. the release that resolves the issue + resolved_in_release = ( + Release.objects.filter( + date_order_q, + projects=group.project, + organization_id=group.project.organization_id, ) + .extra(select={"sort": "COALESCE(date_released, date_added)"}) + .order_by("sort", "id")[:1] + .get() + ) - # Sets `current_release_version` for activity, since there is no point - # waiting for when a new release is created i.e. - # clear_expired_resolutions task to be run. - # Activity should look like "... resolved in version - # >current_release_version" in the UI - if follows_semver: - activity_data.update( - {"current_release_version": current_release_version} - ) - - # In semver projects, and thereby semver releases, we determine - # resolutions by comparing against an expression rather than a - # specific release (i.e. >current_release_version). Consequently, - # at this point we can consider this GroupResolution as resolved - # in release - resolution_params.update( - { - "type": GroupResolution.Type.in_release, - "status": GroupResolution.Status.resolved, - } - ) - else: - # If we already know the `next` release in date based ordering - # when clicking on `resolvedInNextRelease` because it is already - # been released, there is no point in setting GroupResolution to - # be of type in_next_release but rather in_release would suffice - - try: - # Get current release object from current_release_version - current_release_obj = Release.objects.get( - version=current_release_version, - organization_id=projects[0].organization_id, - ) - - date_order_q = Q( - date_added__gt=current_release_obj.date_added - ) | Q( - date_added=current_release_obj.date_added, - id__gt=current_release_obj.id, - ) - - # Find the next release after the current_release_version - # i.e. the release that resolves the issue - resolved_in_release = ( - Release.objects.filter( - date_order_q, - projects=projects[0], - organization_id=projects[0].organization_id, - ) - .extra( - select={"sort": "COALESCE(date_released, date_added)"} - ) - .order_by("sort", "id")[:1] - .get() - ) - - # If we get here, we assume it exists and so we update - # GroupResolution and Activity - resolution_params.update( - { - "release": resolved_in_release, - "type": GroupResolution.Type.in_release, - "status": GroupResolution.Status.resolved, - } - ) - activity_data.update({"version": resolved_in_release.version}) - except Release.DoesNotExist: - # If it gets here, it means we don't know the upcoming - # release yet because it does not exist, and so we should - # fall back to our current model - ... - - resolution, created = GroupResolution.objects.get_or_create( - group=group, defaults=resolution_params - ) - if not created: - resolution.update(datetime=django_timezone.now(), **resolution_params) - - if commit: - GroupLink.objects.create( - group_id=group.id, - project_id=group.project_id, - linked_type=GroupLink.LinkedType.commit, - relationship=GroupLink.Relationship.resolves, - linked_id=commit.id, - ) + # If we get here, we assume it exists and so we update + # GroupResolution and Activity + resolution_params.update( + { + "release": resolved_in_release, + "type": GroupResolution.Type.in_release, + "status": GroupResolution.Status.resolved, + } + ) + activity_data.update({"version": resolved_in_release.version}) + except Release.DoesNotExist: + # If it gets here, it means we don't know the upcoming + # release yet because it does not exist, and so we should + # fall back to our current model + ... + + resolution, created = GroupResolution.objects.get_or_create( + group=group, defaults=resolution_params + ) + if not created: + resolution.update(datetime=django_timezone.now(), **resolution_params) + + if commit: + GroupLink.objects.create( + group_id=group.id, + project_id=group.project_id, + linked_type=GroupLink.LinkedType.commit, + relationship=GroupLink.Relationship.resolves, + linked_id=commit.id, + ) - affected = Group.objects.filter(id=group.id).update( - status=GroupStatus.RESOLVED, resolved_at=now, substatus=None - ) - if not resolution: - created = affected - - group.status = GroupStatus.RESOLVED - group.substatus = None - group.resolved_at = now - if affected and not options.get("groups.enable-post-update-signal"): - post_save.send( - sender=Group, - instance=group, - created=False, - update_fields=["resolved_at", "status", "substatus"], - ) - remove_group_from_inbox( - group, action=GroupInboxRemoveAction.RESOLVED, user=acting_user - ) - result["inbox"] = None - - assigned_to = self_subscribe_and_assign_issue(acting_user, group, self_assign_issue) - if assigned_to is not None: - result["assignedTo"] = assigned_to - - if created: - activity = Activity.objects.create( - project=project_lookup[group.project_id], - group=group, - type=activity_type, - user_id=acting_user.id, - ident=resolution.id if resolution else None, - data=activity_data, - ) - record_group_history_from_activity_type(group, activity_type, actor=acting_user) + affected = Group.objects.filter(id=group.id).update( + status=GroupStatus.RESOLVED, resolved_at=now, substatus=None + ) + if not resolution: + created = bool(affected) + + group.status = GroupStatus.RESOLVED + group.substatus = None + group.resolved_at = now + if affected and not options.get("groups.enable-post-update-signal"): + post_save.send( + sender=Group, + instance=group, + created=False, + update_fields=["resolved_at", "status", "substatus"], + ) + remove_group_from_inbox(group, action=GroupInboxRemoveAction.RESOLVED, user=acting_user) + result["inbox"] = None - # TODO(dcramer): we need a solution for activity rollups - # before sending notifications on bulk changes - if not is_bulk: - transaction.on_commit( - lambda: activity.send_notification(), router.db_for_write(Group) - ) + assigned_to = self_subscribe_and_assign_issue(acting_user, group, self_assign_issue) + if assigned_to is not None: + result["assignedTo"] = assigned_to - issue_resolved.send_robust( - organization_id=organization_id, - user=(acting_user or user), - group=group, - project=project_lookup[group.project_id], - resolution_type=res_type_str, - sender=update_groups, - ) + if created: + activity = Activity.objects.create( + project=group.project, + group=group, + type=activity_type, + user_id=acting_user.id, + ident=resolution.id if resolution else None, + data=dict(activity_data), + ) + record_group_history_from_activity_type(group, activity_type, actor=acting_user) - kick_off_status_syncs.apply_async( - kwargs={"project_id": group.project_id, "group_id": group.id} - ) + # TODO(dcramer): we need a solution for activity rollups + # before sending notifications on bulk changes + if not len(group_list) > 1: + transaction.on_commit(lambda: activity.send_notification(), router.db_for_write(Group)) - result.update({"status": "resolved", "statusDetails": new_status_details}) - elif status: - new_status = STATUS_UPDATE_CHOICES[result["status"]] - new_substatus = ( - SUBSTATUS_UPDATE_CHOICES[result.get("substatus")] if result.get("substatus") else None - ) - new_substatus = infer_substatus(new_status, new_substatus, status_details, group_list) +def merge_groups( + group_list: Sequence[Group], + project_lookup: Mapping[int, Project], + acting_user, + referer: str, +) -> MergedGroup: + issue_stream_regex = r"^(\/organizations\/[^\/]+)?\/issues\/$" + similar_issues_tab_regex = r"^(\/organizations\/[^\/]+)?\/issues\/\d+\/similar\/$" + + metrics.incr( + "grouping.merge_issues", + sample_rate=1.0, + tags={ + # We assume that if someone's merging groups, they're from the same platform + "platform": group_list[0].platform or "unknown", + "sdk": group_list[0].sdk or "unknown", + # TODO: It's probably cleaner to just send this value from the front end + "referer": ( + "issue stream" + if re.search(issue_stream_regex, referer) + else ( + "similar issues tab" + if re.search(similar_issues_tab_regex, referer) + else "unknown" + ) + ), + }, + ) + return handle_merge(group_list, project_lookup, acting_user) - with transaction.atomic(router.db_for_write(Group)): - # TODO(gilbert): update() doesn't call pre_save and bypasses any substatus defaulting we have there - # we should centralize the logic for validating and defaulting substatus values - # and refactor pre_save and the above new_substatus assignment to account for this - status_updated = queryset.exclude(status=new_status).update( - status=new_status, substatus=new_substatus - ) - GroupResolution.objects.filter(group__in=group_ids).delete() - if new_status == GroupStatus.IGNORED: - if new_substatus == GroupSubStatus.UNTIL_ESCALATING: - result["statusDetails"] = handle_archived_until_escalating( - group_list, acting_user, projects, sender=update_groups - ) - else: - result["statusDetails"] = handle_ignored( - group_ids, group_list, status_details, acting_user, user - ) - result["inbox"] = None + +def handle_other_status_updates( + result: dict[str, Any], + group_list: Sequence[Group], + projects: Sequence[Project], + project_lookup: Mapping[int, Project], + status_details: dict[str, Any], + acting_user, + user: RpcUser | User | AnonymousUser, +) -> dict[str, Any]: + group_ids = [group.id for group in group_list] + queryset = Group.objects.filter(id__in=group_ids) + new_status = STATUS_UPDATE_CHOICES[result["status"]] + new_substatus = None + if result.get("substatus"): + new_substatus = SUBSTATUS_UPDATE_CHOICES[result["substatus"]] + new_substatus = infer_substatus(new_status, new_substatus, status_details, group_list) + + with transaction.atomic(router.db_for_write(Group)): + # TODO(gilbert): update() doesn't call pre_save and bypasses any substatus defaulting we have there + # we should centralize the logic for validating and defaulting substatus values + # and refactor pre_save and the above new_substatus assignment to account for this + status_updated = queryset.exclude(status=new_status).update( + status=new_status, substatus=new_substatus + ) + GroupResolution.objects.filter(group__in=group_ids).delete() + if new_status == GroupStatus.IGNORED: + if new_substatus == GroupSubStatus.UNTIL_ESCALATING: + result["statusDetails"] = handle_archived_until_escalating( + group_list, acting_user, projects, sender=update_groups + ) else: - result["statusDetails"] = {} - if group_list and status_updated: - activity_type, activity_data = handle_status_update( - group_list=group_list, - projects=projects, - project_lookup=project_lookup, - new_status=new_status, - new_substatus=new_substatus, - is_bulk=is_bulk, - acting_user=acting_user, - status_details=result.get("statusDetails", {}), - sender=update_groups, - ) + result["statusDetails"] = handle_ignored( + group_list, status_details, acting_user, user + ) + result["inbox"] = None + else: + result["statusDetails"] = {} + if group_list and status_updated: + handle_status_update( + group_list=group_list, + projects=projects, + project_lookup=project_lookup, + new_status=new_status, + new_substatus=new_substatus, + is_bulk=len(group_ids) > 1, + acting_user=acting_user, + status_details=result.get("statusDetails", {}), + sender=update_groups, + ) + return result + + +def prepare_response( + result: dict[str, Any], + group_list: Sequence[Group], + project_lookup: Mapping[int, Project], + projects: Sequence[Project], + acting_user, + data: Mapping[str, Any], + res_type: int | None, + referer: str, +) -> Response: # XXX (ahmed): hack to get the activities to work properly on issues page. Not sure of # what performance impact this might have & this possibly should be moved else where try: @@ -640,14 +779,10 @@ def update_groups( acting_user, ) - handle_has_seen( - result.get("hasSeen"), group_list, group_ids, project_lookup, projects, acting_user - ) + handle_has_seen(result.get("hasSeen"), group_list, project_lookup, projects, acting_user) if "isBookmarked" in result: - handle_is_bookmarked( - result["isBookmarked"], group_list, group_ids, project_lookup, acting_user - ) + handle_is_bookmarked(result["isBookmarked"], group_list, project_lookup, acting_user) if result.get("isSubscribed") in (True, False): result["subscriptionDetails"] = handle_is_subscribed( @@ -659,38 +794,20 @@ def update_groups( result["isPublic"], group_list, project_lookup, acting_user ) - # XXX(dcramer): this feels a bit shady like it should be its own endpoint. + # TODO: Create new endpoint for this if result.get("merge") and len(group_list) > 1: # don't allow merging cross project - if len(projects) > 1: - return Response({"detail": "Merging across multiple projects is not supported"}) - - referer = urlparse(request.META.get("HTTP_REFERER", "")).path - issue_stream_regex = r"^(\/organizations\/[^\/]+)?\/issues\/$" - similar_issues_tab_regex = r"^(\/organizations\/[^\/]+)?\/issues\/\d+\/similar\/$" - - metrics.incr( - "grouping.merge_issues", - sample_rate=1.0, - tags={ - # We assume that if someone's merging groups, they're from the same platform - "platform": group_list[0].platform or "unknown", - "sdk": group_list[0].sdk or "unknown", - # TODO: It's probably cleaner to just send this value from the front end - "referer": ( - "issue stream" - if re.search(issue_stream_regex, referer) - else ( - "similar issues tab" - if re.search(similar_issues_tab_regex, referer) - else "unknown" - ) - ), - }, + if len(project_lookup) > 1: + return Response( + {"detail": "Merging across multiple projects is not supported"}, status=400 + ) + result["merge"] = merge_groups( + group_list, + project_lookup, + acting_user, + urlparse(referer).path, ) - result["merge"] = handle_merge(group_list, project_lookup, acting_user) - inbox = result.get("inbox", None) if inbox is not None: result["inbox"] = update_inbox( @@ -698,45 +815,57 @@ def update_groups( group_list, project_lookup, acting_user, - http_referrer=request.META.get("HTTP_REFERER"), + http_referrer=referer, sender=update_groups, ) return Response(result) -def get_latest_release(project: Project) -> Release | None: - release = None +def get_release_to_resolve_by(project: Project) -> Release | None: + follows_semver = follows_semver_versioning_scheme( + org_id=project.organization_id, project_id=project.id + ) + return greatest_semver_release(project) if follows_semver else most_recent_release(project) - # XXX: Remove block once released - follows_semver = False - if features.has("organizations:releases-resolve-next-release-semver-fix", project.organization): - follows_semver = follows_semver_versioning_scheme( - org_id=project.organization_id, project_id=project.id - ) - releases = Release.objects.filter(projects=project, organization_id=project.organization_id) - if follows_semver: - release = ( - releases.filter_to_semver() - .annotate_prerelease_column() - .order_by(*[f"-{col}" for col in Release.SEMVER_COLS]) - .first() - ) - else: - release = ( - releases.extra(select={"sort": "COALESCE(date_released, date_added)"}) - .order_by("-sort") - .first() - ) - return release +def most_recent_release(project: Project) -> Release | None: + return ( + Release.objects.filter(projects=project, organization_id=project.organization_id) + .extra(select={"sort": "COALESCE(date_released, date_added)"}) + .order_by("-sort") + .first() + ) + + +def most_recent_release_matching_commit( + projects: Sequence[Project], commit: Commit +) -> Release | None: + return ( + Release.objects.filter(projects__in=projects, releasecommit__commit=commit) + .extra(select={"sort": "COALESCE(date_released, date_added)"}) + .order_by("-sort")[0] + ) + + +def greatest_semver_release(project: Project) -> Release | None: + return get_semver_releases(project).first() + + +def get_semver_releases(project: Project) -> QuerySet[Release]: + return ( + Release.objects.filter(projects=project, organization_id=project.organization_id) + .filter_to_semver() # type: ignore[attr-defined] + .annotate_prerelease_column() + .order_by(*[f"-{col}" for col in Release.SEMVER_COLS]) + ) def handle_is_subscribed( is_subscribed: bool, group_list: Sequence[Group], - project_lookup: dict[int, Any], - acting_user: User, + project_lookup: Mapping[int, Project], + acting_user, ) -> dict[str, str]: # TODO(dcramer): we could make these more efficient by first # querying for which `GroupSubscription` rows are present (if N > 2), @@ -761,14 +890,14 @@ def handle_is_subscribed( def handle_is_bookmarked( is_bookmarked: bool, - group_list: Sequence[Group] | None, - group_ids: Sequence[Group], - project_lookup: dict[int, Project], - acting_user: User | None, + group_list: Sequence[Group], + project_lookup: Mapping[int, Project], + acting_user, ) -> None: """ Creates bookmarks and subscriptions for a user, or deletes the existing bookmarks and subscriptions. """ + group_ids = [group.id for group in group_list] if is_bookmarked: for group in group_list: GroupBookmark.objects.get_or_create( @@ -776,13 +905,16 @@ def handle_is_bookmarked( group=group, user_id=acting_user.id if acting_user else None, ) - GroupSubscription.objects.subscribe( - subscriber=acting_user, group=group, reason=GroupSubscriptionReason.bookmark - ) - elif is_bookmarked is False: + if acting_user: + GroupSubscription.objects.subscribe( + subscriber=acting_user, + group=group, + reason=GroupSubscriptionReason.bookmark, + ) + elif is_bookmarked is False and acting_user is not None: GroupBookmark.objects.filter( group__in=group_ids, - user_id=acting_user.id if acting_user else None, + user_id=acting_user.id, ).delete() if group_list: GroupSubscription.objects.filter( @@ -792,12 +924,11 @@ def handle_is_bookmarked( def handle_has_seen( - has_seen: Any, + has_seen: bool | None, group_list: Sequence[Group], - group_ids: Sequence[Group], - project_lookup: dict[int, Project], + project_lookup: Mapping[int, Project], projects: Sequence[Project], - acting_user: User | None, + acting_user, ) -> None: is_member_map = { project.id: ( @@ -816,15 +947,17 @@ def handle_has_seen( project=project_lookup[group.project_id], values={"last_seen": django_timezone.now()}, ) - elif has_seen is False: - GroupSeen.objects.filter(group__in=group_ids, user_id=user_id).delete() + elif has_seen is False and user_id is not None: + GroupSeen.objects.filter( + group__in=[group.id for group in group_list], user_id=user_id + ).delete() def handle_priority( priority: str, group_list: Sequence[Group], - actor: User | None, - project_lookup: dict[int, Project], + acting_user, + project_lookup: Mapping[int, Project], ) -> None: for group in group_list: priority_value = PriorityLevel.from_str(priority) if priority else None @@ -833,7 +966,7 @@ def handle_priority( group=group, priority=priority_value, sender="manual_update_priority", - actor=actor, + actor=acting_user, project=project_lookup[group.project_id], ) group.update(priority_locked_at=django_timezone.now()) @@ -841,9 +974,9 @@ def handle_priority( def handle_is_public( is_public: bool, - group_list: list[Group], - project_lookup: dict[int, Project], - acting_user: User | None, + group_list: Sequence[Group], + project_lookup: Mapping[int, Project], + acting_user, ) -> str | None: """ Handle the isPublic flag on a group update. @@ -855,7 +988,7 @@ def handle_is_public( user_id = acting_user.id if acting_user else None share_id = None for group in group_list: - if GroupShare.objects.filter(group=group).delete(): + if GroupShare.objects.filter(group=group).delete()[0] > 0: share_id = None Activity.objects.create( project=project_lookup[group.project_id], @@ -885,9 +1018,9 @@ def handle_assigned_to( assigned_actor: Actor, assigned_by: str | None, integration: str | None, - group_list: list[Group], - project_lookup: dict[int, Project], - acting_user: User | None, + group_list: Sequence[Group], + project_lookup: Mapping[int, Project], + acting_user, ) -> ActorSerializerResponse | None: """ Handle the assignedTo field on a group update. diff --git a/src/sentry/api/serializers/models/__init__.py b/src/sentry/api/serializers/models/__init__.py index 55cd1ce1872ce6..dd1128c455d2a8 100644 --- a/src/sentry/api/serializers/models/__init__.py +++ b/src/sentry/api/serializers/models/__init__.py @@ -23,7 +23,6 @@ from .groupseen import * # noqa: F401,F403 from .grouptombstone import * # noqa: F401,F403 from .incidentactivity import * # noqa: F401,F403 -from .incidentseen import * # noqa: F401,F403 from .notification_action import * # noqa: F401,F403 from .organization import * # noqa: F401,F403 from .organization_access_request import * # noqa: F401,F403 diff --git a/src/sentry/api/serializers/models/apiauthorization.py b/src/sentry/api/serializers/models/apiauthorization.py index 9c3d9b795a9329..4dc4517e9e9ece 100644 --- a/src/sentry/api/serializers/models/apiauthorization.py +++ b/src/sentry/api/serializers/models/apiauthorization.py @@ -1,5 +1,6 @@ from sentry.api.serializers import Serializer, register, serialize from sentry.models.apiauthorization import ApiAuthorization +from sentry.organizations.services.organization import organization_service @register(ApiAuthorization) @@ -23,4 +24,9 @@ def serialize(self, obj, attrs, user, **kwargs): "scopes": obj.get_scopes(), "application": attrs["application"], "dateCreated": obj.date_added, + "organization": ( + organization_service.serialize_organization(id=obj.organization_id) + if obj.organization_id + else None + ), } diff --git a/src/sentry/api/serializers/models/dashboard.py b/src/sentry/api/serializers/models/dashboard.py index 1b8731adb11c62..3528bd1e62e5de 100644 --- a/src/sentry/api/serializers/models/dashboard.py +++ b/src/sentry/api/serializers/models/dashboard.py @@ -6,7 +6,7 @@ from sentry import features from sentry.api.serializers import Serializer, register, serialize from sentry.constants import ALL_ACCESS_PROJECTS -from sentry.models.dashboard import Dashboard +from sentry.models.dashboard import Dashboard, DashboardFavoriteUser from sentry.models.dashboard_permissions import DashboardPermissions from sentry.models.dashboard_widget import ( DashboardWidget, @@ -192,6 +192,8 @@ class DashboardListResponse(TypedDict): createdBy: UserSerializerResponse widgetDisplay: list[str] widgetPreview: list[dict[str, str]] + permissions: DashboardPermissionsResponse | None + isFavorited: bool class DashboardListSerializer(Serializer): @@ -204,6 +206,14 @@ def get_attrs(self, item_list, user, **kwargs): .values("dashboard_id", "order", "display_type", "detail", "id") ) + favorited_dashboard_ids = set( + DashboardFavoriteUser.objects.filter( + user_id=user.id, dashboard_id__in=item_dict.keys() + ).values_list("dashboard_id", flat=True) + ) + + permissions = DashboardPermissions.objects.filter(dashboard_id__in=item_dict.keys()) + result = defaultdict(lambda: {"widget_display": [], "widget_preview": [], "created_by": {}}) for widget in widgets: dashboard = item_dict[widget["dashboard_id"]] @@ -237,8 +247,13 @@ def get_attrs(self, item_list, user, **kwargs): ) } + for permission in permissions: + dashboard = item_dict[permission.dashboard_id] + result[dashboard]["permissions"] = serialize(permission) + for dashboard in item_dict.values(): result[dashboard]["created_by"] = serialized_users.get(str(dashboard.created_by_id)) + result[dashboard]["is_favorited"] = dashboard.id in favorited_dashboard_ids return result @@ -250,6 +265,8 @@ def serialize(self, obj, attrs, user, **kwargs) -> DashboardListResponse: "createdBy": attrs.get("created_by"), "widgetDisplay": attrs.get("widget_display", []), "widgetPreview": attrs.get("widget_preview", []), + "permissions": attrs.get("permissions", None), + "isFavorited": attrs.get("is_favorited", False), } return data @@ -276,6 +293,7 @@ class DashboardDetailsResponse(DashboardDetailsResponseOptional): projects: list[int] filters: DashboardFilters permissions: DashboardPermissionsResponse | None + isFavorited: bool @register(Dashboard) @@ -312,6 +330,7 @@ def serialize(self, obj, attrs, user, **kwargs) -> DashboardDetailsResponse: "projects": [project.id for project in obj.projects.all()], "filters": {}, "permissions": serialize(obj.permissions) if hasattr(obj, "permissions") else None, + "isFavorited": user.id in obj.favorited_by, } if obj.filters is not None: diff --git a/src/sentry/api/serializers/models/event.py b/src/sentry/api/serializers/models/event.py index ccde2d14b2d637..1d2255fa2e3941 100644 --- a/src/sentry/api/serializers/models/event.py +++ b/src/sentry/api/serializers/models/event.py @@ -586,6 +586,7 @@ def serialize(self, obj, attrs, user, **kwargs): "platform": str, "dateCreated": datetime, "crashFile": str | None, + "metadata": dict[str, Any] | None, }, ) @@ -642,6 +643,7 @@ def serialize(self, obj: BaseEvent, attrs, user, **kwargs) -> SimpleEventSeriali "dateCreated": obj.datetime, # Needed to generate minidump links in UI "crashFile": attrs["crash_file"], + "metadata": obj.get_event_metadata(), } return response diff --git a/src/sentry/api/serializers/models/incidentseen.py b/src/sentry/api/serializers/models/incidentseen.py deleted file mode 100644 index 68f02b3b5632ba..00000000000000 --- a/src/sentry/api/serializers/models/incidentseen.py +++ /dev/null @@ -1,26 +0,0 @@ -from sentry.api.serializers import Serializer, register -from sentry.incidents.models.incident import IncidentSeen -from sentry.users.services.user.serial import serialize_generic_user -from sentry.users.services.user.service import user_service - - -@register(IncidentSeen) -class IncidentSeenSerializer(Serializer): - def get_attrs(self, item_list, user, **kwargs): - item_users = user_service.serialize_many( - filter={ - "user_ids": [i.user_id for i in item_list], - }, - as_user=serialize_generic_user(user), - ) - user_map = {d["id"]: d for d in item_users} - - result = {} - for item in item_list: - result[item] = {"user": user_map[str(item.user_id)]} - return result - - def serialize(self, obj, attrs, user, **kwargs): - data = attrs["user"] - data["lastSeen"] = obj.last_seen - return data diff --git a/src/sentry/api/serializers/models/notification_action.py b/src/sentry/api/serializers/models/notification_action.py index eb04e8a33781ea..52b1cf3aaabe32 100644 --- a/src/sentry/api/serializers/models/notification_action.py +++ b/src/sentry/api/serializers/models/notification_action.py @@ -4,7 +4,7 @@ from django.contrib.auth.models import AnonymousUser from sentry.api.serializers import Serializer, register -from sentry.models.notificationaction import ( +from sentry.notifications.models.notificationaction import ( ActionService, ActionTarget, ActionTrigger, diff --git a/src/sentry/api/serializers/models/organization.py b/src/sentry/api/serializers/models/organization.py index 281f6108427e3e..57fe4a7765e958 100644 --- a/src/sentry/api/serializers/models/organization.py +++ b/src/sentry/api/serializers/models/organization.py @@ -30,7 +30,6 @@ from sentry.auth.services.auth import RpcOrganizationAuthConfig, auth_service from sentry.constants import ( ACCOUNT_RATE_LIMIT_DEFAULT, - AI_SUGGESTED_SOLUTION, ALERTS_MEMBER_WRITE_DEFAULT, ATTACHMENTS_ROLE_DEFAULT, DATA_CONSENT_DEFAULT, @@ -482,7 +481,6 @@ class DetailedOrganizationSerializerResponse(_DetailedOrganizationSerializerResp pendingAccessRequests: int onboardingTasks: list[OnboardingTasksSerializerResponse] codecovAccess: bool - aiSuggestedSolution: bool hideAiFeatures: bool githubPRBot: bool githubOpenPRBot: bool @@ -599,9 +597,6 @@ def serialize( # type: ignore[explicit-override, override] ), "relayPiiConfig": str(obj.get_option("sentry:relay_pii_config") or "") or None, "codecovAccess": bool(obj.flags.codecov_access), - "aiSuggestedSolution": bool( - obj.get_option("sentry:ai_suggested_solution", AI_SUGGESTED_SOLUTION) - ), "hideAiFeatures": bool( obj.get_option("sentry:hide_ai_features", HIDE_AI_FEATURES_DEFAULT) ), @@ -614,7 +609,9 @@ def serialize( # type: ignore[explicit-override, override] "githubNudgeInvite": bool( obj.get_option("sentry:github_nudge_invite", GITHUB_COMMENT_BOT_DEFAULT) ), - "genAIConsent": bool(obj.get_option("sentry:gen_ai_consent", DATA_CONSENT_DEFAULT)), + "genAIConsent": bool( + obj.get_option("sentry:gen_ai_consent_v2024_11_14", DATA_CONSENT_DEFAULT) + ), "aggregatedDataConsent": bool( obj.get_option("sentry:aggregated_data_consent", DATA_CONSENT_DEFAULT) ), @@ -698,7 +695,13 @@ def serialize( # type: ignore[explicit-override, override] if sample_rate is not None: context["planSampleRate"] = sample_rate - desired_sample_rate, _ = get_org_sample_rate(org_id=obj.id, default_sample_rate=sample_rate) + if is_project_mode_sampling(obj): + desired_sample_rate = None + else: + desired_sample_rate, _ = get_org_sample_rate( + org_id=obj.id, default_sample_rate=sample_rate + ) + if desired_sample_rate is not None: context["desiredSampleRate"] = desired_sample_rate diff --git a/src/sentry/api/serializers/models/plugin.py b/src/sentry/api/serializers/models/plugin.py index 410985ab4572cd..41e3092eb0a7b1 100644 --- a/src/sentry/api/serializers/models/plugin.py +++ b/src/sentry/api/serializers/models/plugin.py @@ -6,8 +6,6 @@ from sentry.api.serializers import Serializer from sentry.models.options.project_option import ProjectOption from sentry.models.project import Project -from sentry.utils.assets import get_asset_url -from sentry.utils.http import absolute_uri # Dict with the plugin_name as the key, and enabling_feature_name as the value SHADOW_DEPRECATED_PLUGINS = { @@ -70,10 +68,6 @@ def serialize(self, obj, attrs, user, **kwargs): "metadata": obj.get_metadata(), "contexts": contexts, "status": obj.get_status(), - "assets": [ - {"url": absolute_uri(get_asset_url(obj.asset_key or obj.slug, asset))} - for asset in obj.get_assets() - ], "doc": doc, "firstPartyAlternative": getattr(obj, "alternative", None), "deprecationDate": ( diff --git a/src/sentry/api/serializers/models/project.py b/src/sentry/api/serializers/models/project.py index 9574a289bfb6c1..840d4af67c0dce 100644 --- a/src/sentry/api/serializers/models/project.py +++ b/src/sentry/api/serializers/models/project.py @@ -78,6 +78,9 @@ "first-event-severity-calculation", "alert-filters", "servicehooks", + "similarity-embeddings", + "similarity-embeddings-delete-by-hash", + "similarity-embeddings-backfill", } @@ -770,14 +773,16 @@ def serialize( ) if not self._collapse(LATEST_DEPLOYS_KEY): context[LATEST_DEPLOYS_KEY] = attrs["deploys"] - if "stats" in attrs: - context.update(stats=attrs["stats"]) - if "transactionStats" in attrs: - context.update(transactionStats=attrs["transactionStats"]) - if "sessionStats" in attrs: - context.update(sessionStats=attrs["sessionStats"]) - if "options" in attrs: - context.update(options=attrs["options"]) + + if attrs["has_access"] or user.is_staff: + if "stats" in attrs: + context.update(stats=attrs["stats"]) + if "transactionStats" in attrs: + context.update(transactionStats=attrs["transactionStats"]) + if "sessionStats" in attrs: + context.update(sessionStats=attrs["sessionStats"]) + if "options" in attrs: + context.update(options=attrs["options"]) return context diff --git a/src/sentry/api/serializers/rest_framework/dashboard.py b/src/sentry/api/serializers/rest_framework/dashboard.py index 7d2fa91c3e9a9c..2c0a022186a181 100644 --- a/src/sentry/api/serializers/rest_framework/dashboard.py +++ b/src/sentry/api/serializers/rest_framework/dashboard.py @@ -4,6 +4,7 @@ from enum import Enum from typing import TypedDict +import sentry_sdk from django.db.models import Max from drf_spectacular.types import OpenApiTypes from drf_spectacular.utils import extend_schema_field, extend_schema_serializer @@ -26,6 +27,7 @@ DashboardWidgetTypes, DatasetSourcesTypes, ) +from sentry.models.organization import Organization from sentry.models.team import Team from sentry.relay.config.metric_extraction import get_current_widget_specs, widget_exceeds_max_specs from sentry.search.events.builder.discover import UnresolvedQuery @@ -38,6 +40,7 @@ set_or_create_on_demand_state, ) from sentry.tasks.relay import schedule_invalidate_project_config +from sentry.users.models.user import User from sentry.utils.dates import parse_stats_period from sentry.utils.strings import oxfordize_list @@ -171,6 +174,33 @@ class DashboardWidgetQuerySerializer(CamelSnakeSerializer[Dashboard]): validate_id = validate_id + def get_metrics_features( + self, organization: Organization | None, user: User | None + ) -> dict[str, bool | None]: + if organization is None or user is None: + return {} + + feature_names = [ + "organizations:mep-rollout-flag", + "organizations:dynamic-sampling", + "organizations:performance-use-metrics", + "organizations:dashboards-mep", + ] + batch_features = features.batch_has( + feature_names, + organization=organization, + actor=user, + ) + + return ( + batch_features.get(f"organization:{organization.id}", {}) + if batch_features is not None + else { + feature_name: features.has(feature_name, organization=organization, actor=user) + for feature_name in feature_names + } + ) + def validate(self, data): if not data.get("id"): keys = set(data.keys()) @@ -185,7 +215,7 @@ def validate(self, data): # Validate the query that would be created when run. conditions = self._get_attr(data, "conditions", "") orderby = self._get_attr(data, "orderby", "") - is_table = is_table_display_type(self.context.get("displayType")) + is_table = is_table_display_type(self.context.get("display_type")) columns = self._get_attr(data, "columns", []).copy() aggregates = self._get_attr(data, "aggregates", []).copy() fields = columns + aggregates @@ -226,6 +256,17 @@ def validate(self, data): data["issue_query_error"] = {"conditions": [f"Invalid conditions: {err}"]} try: + batch_features = self.get_metrics_features( + self.context.get("organization"), self.context.get("user") + ) + use_metrics = bool( + ( + batch_features.get("organizations:mep-rollout-flag", False) + and batch_features.get("organizations:dynamic-sampling", False) + ) + or batch_features.get("organizations:performance-use-metrics", False) + or batch_features.get("organizations:dashboards-mep", False) + ) # When using the eps/epm functions, they require an interval argument # or to provide the start/end so that the interval can be computed. # This uses a hard coded start/end to ensure the validation succeeds @@ -239,6 +280,7 @@ def validate(self, data): "aggregates_only": not is_table, }, use_aggregate_conditions=True, + has_metrics=use_metrics, ), ) @@ -312,7 +354,24 @@ def validate_display_type(self, display_type): return DashboardWidgetDisplayTypes.get_id_for_type_name(display_type) def validate_widget_type(self, widget_type): - return DashboardWidgetTypes.get_id_for_type_name(widget_type) + widget_type = DashboardWidgetTypes.get_id_for_type_name(widget_type) + if widget_type == DashboardWidgetTypes.DISCOVER or widget_type is None: + sentry_sdk.set_context( + "dashboard", + { + "org_slug": self.context["organization"].slug, + }, + ) + sentry_sdk.capture_message("Created or updated widget with discover dataset.") + if features.has( + "organizations:deprecate-discover-widget-type", + self.context["organization"], + actor=self.context["request"].user, + ): + raise serializers.ValidationError( + "Attribute value `discover` is deprecated. Please use `error-events` or `transaction-like`" + ) + return widget_type validate_id = validate_id @@ -321,6 +380,20 @@ def validate_interval(self, interval): raise serializers.ValidationError("Invalid interval") return interval + def to_internal_value(self, data): + # Update the context for the queries serializer because the display type is + # required for validation of the queries + queries_serializer = self.fields["queries"] + additional_context = {} + + if data.get("display_type"): + additional_context["display_type"] = data.get("display_type") + if self.context.get("request") and self.context["request"].user: + additional_context["user"] = self.context["request"].user + + queries_serializer.context.update(additional_context) + return super().to_internal_value(data) + def validate(self, data): query_errors = [] all_columns: set[str] = set() @@ -567,7 +640,9 @@ def validate(self, data): permissions = data.get("permissions") if permissions and self.instance: currentUser = self.context["request"].user - if self.instance.created_by_id != currentUser.id: + # managers and owners + has_write_access = self.context["request"].access.has_scope("org:write") + if self.instance.created_by_id != currentUser.id and not has_write_access: raise serializers.ValidationError( "Only the Dashboard Creator may modify Dashboard Edit Access" ) diff --git a/src/sentry/api/serializers/rest_framework/notification_action.py b/src/sentry/api/serializers/rest_framework/notification_action.py index 98f2048de063e6..a0df4108607086 100644 --- a/src/sentry/api/serializers/rest_framework/notification_action.py +++ b/src/sentry/api/serializers/rest_framework/notification_action.py @@ -1,3 +1,4 @@ +from collections.abc import Sequence from typing import TypedDict from django.db import router, transaction @@ -9,13 +10,17 @@ from sentry.constants import SentryAppInstallationStatus from sentry.integrations.services.integration import integration_service from sentry.integrations.slack.utils.channel import get_channel_id, validate_channel_id -from sentry.models.notificationaction import ActionService, ActionTarget, NotificationAction from sentry.models.project import Project +from sentry.notifications.models.notificationaction import ( + ActionService, + ActionTarget, + NotificationAction, +) from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation from sentry.utils.strings import oxfordize_list -def format_choices_text(choices: list[tuple[int, str]]): +def format_choices_text(choices: Sequence[tuple[int, str]]): choices_as_display_text = [f"'{display_text}'" for (_, display_text) in choices] return oxfordize_list(choices_as_display_text) @@ -29,7 +34,7 @@ def format_choices_text(choices: list[tuple[int, str]]): # Note the ordering of fields affects the Spike Protection API Documentation -class NotificationActionInputData(TypedDict): +class NotificationActionInputData(TypedDict, total=False): trigger_type: int service_type: int integration_id: int @@ -156,6 +161,11 @@ def validate_integration_and_service(self, data: NotificationActionInputData): } ) integration = integration_service.get_integration(integration_id=data.get("integration_id")) + if integration is None: + raise serializers.ValidationError( + f"Service type of '{service_provider}' requires having an active integration" + ) + if integration and service_provider != integration.provider: raise serializers.ValidationError( { @@ -247,7 +257,7 @@ def validate_discord_channel( ) -> NotificationActionInputData: """ Validates that SPECIFIC targets for DISCORD service have the following target data: - target_display: Discord channel id + target_display: Discord channel name target_identifier: Discord channel id NOTE: Reaches out to via discord integration to verify channel """ @@ -259,12 +269,12 @@ def validate_discord_channel( ): return data - channel_name = data.get("target_display") - channel_id = data.get("target_identifier") + channel_name = data.get("target_display", None) + channel_id = data.get("target_identifier", None) - if not channel_id and channel_name: + if channel_id is None or channel_name is None: raise serializers.ValidationError( - {"target_identifier": "Did not receive a discord channel id."} + {"target_identifier": "Did not receive a discord channel id or name."} ) try: @@ -276,7 +286,6 @@ def validate_discord_channel( except Exception as e: raise serializers.ValidationError({"target_identifier": str(e)}) - data["target_identifier"] = channel_id return data def validate_pagerduty_service( diff --git a/src/sentry/api/serializers/snuba.py b/src/sentry/api/serializers/snuba.py index c42d6d76d4baa8..3a5f09c8b24d18 100644 --- a/src/sentry/api/serializers/snuba.py +++ b/src/sentry/api/serializers/snuba.py @@ -114,6 +114,23 @@ def serialize( ) } + confidence_values = [] + if "confidence" in result.data: + for key, group in itertools.groupby(result.data["confidence"], key=lambda r: r["time"]): + result_row = [] + for confidence_row in group: + item = {"count": confidence_row.get(column, None)} + if extra_columns is not None: + for extra_column in extra_columns: + item[extra_column] = confidence_row.get(extra_column, 0) + if self.lookup: + value = value_from_row(confidence_row, self.lookup.columns) + item[self.lookup.name] = (attrs.get(value),) + result_row.append(item) + confidence_values.append((key, result_row)) + # confidence only comes from the RPC which already helps us zerofill by returning all buckets + res["confidence"] = confidence_values + if result.data.get("totals"): res["totals"] = {"count": result.data["totals"][column]} # If order is passed let that overwrite whats in data since its order for multi-axis diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py index 86a808a5587777..ab5ea0cba6d58a 100644 --- a/src/sentry/api/urls.py +++ b/src/sentry/api/urls.py @@ -83,6 +83,10 @@ OrganizationFlagLogDetailsEndpoint, OrganizationFlagLogIndexEndpoint, ) +from sentry.flags.endpoints.secrets import ( + OrganizationFlagsWebHookSigningSecretEndpoint, + OrganizationFlagsWebHookSigningSecretsEndpoint, +) from sentry.incidents.endpoints.organization_alert_rule_activations import ( OrganizationAlertRuleActivationsEndpoint, ) @@ -99,23 +103,10 @@ OrganizationAlertRuleIndexEndpoint, OrganizationCombinedRuleIndexEndpoint, ) -from sentry.incidents.endpoints.organization_incident_activity_index import ( - OrganizationIncidentActivityIndexEndpoint, -) -from sentry.incidents.endpoints.organization_incident_comment_details import ( - OrganizationIncidentCommentDetailsEndpoint, -) -from sentry.incidents.endpoints.organization_incident_comment_index import ( - OrganizationIncidentCommentIndexEndpoint, -) from sentry.incidents.endpoints.organization_incident_details import ( OrganizationIncidentDetailsEndpoint, ) from sentry.incidents.endpoints.organization_incident_index import OrganizationIncidentIndexEndpoint -from sentry.incidents.endpoints.organization_incident_seen import OrganizationIncidentSeenEndpoint -from sentry.incidents.endpoints.organization_incident_subscription_index import ( - OrganizationIncidentSubscriptionIndexEndpoint, -) from sentry.incidents.endpoints.project_alert_rule_details import ProjectAlertRuleDetailsEndpoint from sentry.incidents.endpoints.project_alert_rule_index import ProjectAlertRuleIndexEndpoint from sentry.incidents.endpoints.project_alert_rule_task_details import ( @@ -349,6 +340,7 @@ from sentry.users.api.endpoints.user_roles import UserUserRolesEndpoint from sentry.users.api.endpoints.userroles_details import UserRoleDetailsEndpoint from sentry.users.api.endpoints.userroles_index import UserRolesEndpoint +from sentry.workflow_engine.endpoints import urls as workflow_urls from .endpoints.accept_organization_invite import AcceptOrganizationInvite from .endpoints.accept_project_transfer import AcceptProjectTransferEndpoint @@ -384,7 +376,6 @@ SourceMapsEndpoint, UnknownDebugFilesEndpoint, ) -from .endpoints.event_ai_suggested_fix import EventAiSuggestedFixEndpoint from .endpoints.event_apple_crash_report import EventAppleCrashReportEndpoint from .endpoints.event_attachment_details import EventAttachmentDetailsEndpoint from .endpoints.event_attachments import EventAttachmentsEndpoint @@ -442,6 +433,7 @@ from .endpoints.organization_config_repositories import OrganizationConfigRepositoriesEndpoint from .endpoints.organization_dashboard_details import ( OrganizationDashboardDetailsEndpoint, + OrganizationDashboardFavoriteEndpoint, OrganizationDashboardVisitEndpoint, ) from .endpoints.organization_dashboard_widget_details import ( @@ -500,9 +492,6 @@ OrganizationMemberIndexEndpoint, ) from .endpoints.organization_member.team_details import OrganizationMemberTeamDetailsEndpoint -from .endpoints.organization_member_unreleased_commits import ( - OrganizationMemberUnreleasedCommitsEndpoint, -) from .endpoints.organization_metrics_code_locations import OrganizationMetricsCodeLocationsEndpoint from .endpoints.organization_metrics_details import OrganizationMetricsDetailsEndpoint from .endpoints.organization_metrics_meta import ( @@ -510,7 +499,6 @@ OrganizationMetricsCompatibilitySums, ) from .endpoints.organization_metrics_query import OrganizationMetricsQueryEndpoint -from .endpoints.organization_metrics_samples import OrganizationMetricsSamplesEndpoint from .endpoints.organization_metrics_tag_details import OrganizationMetricsTagDetailsEndpoint from .endpoints.organization_metrics_tags import OrganizationMetricsTagsEndpoint from .endpoints.organization_on_demand_metrics_estimation_stats import ( @@ -524,7 +512,6 @@ from .endpoints.organization_profiling_functions import OrganizationProfilingFunctionTrendsEndpoint from .endpoints.organization_profiling_profiles import ( OrganizationProfilingChunksEndpoint, - OrganizationProfilingChunksFlamegraphEndpoint, OrganizationProfilingFlamegraphEndpoint, OrganizationProfilingHasChunksEndpoint, ) @@ -573,9 +560,6 @@ OrganizationTraceSpansEndpoint, OrganizationTracesStatsEndpoint, ) -from .endpoints.organization_transaction_anomaly_detection import ( - OrganizationTransactionAnomalyDetectionEndpoint, -) from .endpoints.organization_user_details import OrganizationUserDetailsEndpoint from .endpoints.organization_user_reports import OrganizationUserReportsEndpoint from .endpoints.organization_user_teams import OrganizationUserTeamsEndpoint @@ -583,12 +567,6 @@ from .endpoints.project_agnostic_rule_conditions import ProjectAgnosticRuleConditionsEndpoint from .endpoints.project_artifact_bundle_file_details import ProjectArtifactBundleFileDetailsEndpoint from .endpoints.project_artifact_bundle_files import ProjectArtifactBundleFilesEndpoint -from .endpoints.project_autofix_codebase_index_status import ( - ProjectAutofixCodebaseIndexStatusEndpoint, -) -from .endpoints.project_autofix_create_codebase_index import ( - ProjectAutofixCreateCodebaseIndexEndpoint, -) from .endpoints.project_commits import ProjectCommitsEndpoint from .endpoints.project_create_sample import ProjectCreateSampleEndpoint from .endpoints.project_create_sample_transaction import ProjectCreateSampleTransactionEndpoint @@ -1211,21 +1189,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-data-secrecy", ), # Incidents - re_path( - r"^(?P[^\/]+)/incidents/(?P[^\/]+)/activity/$", - OrganizationIncidentActivityIndexEndpoint.as_view(), - name="sentry-api-0-organization-incident-activity", - ), - re_path( - r"^(?P[^\/]+)/incidents/(?P[^\/]+)/comments/$", - OrganizationIncidentCommentIndexEndpoint.as_view(), - name="sentry-api-0-organization-incident-comments", - ), - re_path( - r"^(?P[^\/]+)/incidents/(?P[^\/]+)/comments/(?P[^\/]+)/$", - OrganizationIncidentCommentDetailsEndpoint.as_view(), - name="sentry-api-0-organization-incident-comment-details", - ), re_path( r"^(?P[^\/]+)/incidents/(?P[^\/]+)/$", OrganizationIncidentDetailsEndpoint.as_view(), @@ -1236,16 +1199,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationIncidentIndexEndpoint.as_view(), name="sentry-api-0-organization-incident-index", ), - re_path( - r"^(?P[^\/]+)/incidents/(?P[^\/]+)/seen/$", - OrganizationIncidentSeenEndpoint.as_view(), - name="sentry-api-0-organization-incident-seen", - ), - re_path( - r"^(?P[^\/]+)/incidents/(?P[^\/]+)/subscriptions/$", - OrganizationIncidentSubscriptionIndexEndpoint.as_view(), - name="sentry-api-0-organization-incident-subscription-index", - ), re_path( r"^(?P[^\/]+)/chunk-upload/$", ChunkUploadEndpoint.as_view(), @@ -1340,6 +1293,11 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationDashboardVisitEndpoint.as_view(), name="sentry-api-0-organization-dashboard-visit", ), + re_path( + r"^(?P[^\/]+)/dashboards/(?P[^\/]+)/favorite/$", + OrganizationDashboardFavoriteEndpoint.as_view(), + name="sentry-api-0-organization-dashboard-favorite", + ), re_path( r"^(?P[^\/]+)/shortids/(?P[^\/]+)/$", ShortIdLookupEndpoint.as_view(), @@ -1810,11 +1768,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationMemberDetailsEndpoint.as_view(), name="sentry-api-0-organization-member-details", ), - re_path( - r"^(?P[^\/]+)/members/(?P[^\/]+)/unreleased-commits/$", - OrganizationMemberUnreleasedCommitsEndpoint.as_view(), - name="sentry-api-0-organization-member-unreleased-commits", - ), re_path( r"^(?P[^\/]+)/members/(?P[^\/]+)/teams/(?P[^\/]+)/$", OrganizationMemberTeamDetailsEndpoint.as_view(), @@ -2041,11 +1994,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationJoinRequestEndpoint.as_view(), name="sentry-api-0-organization-join-request", ), - re_path( - r"^(?P[^\/]+)/transaction-anomaly-detection/$", - OrganizationTransactionAnomalyDetectionEndpoint.as_view(), - name="sentry-api-0-organization-transaction-anomaly-detection", - ), # relay usage re_path( r"^(?P[^\/]+)/relay_usage/$", @@ -2064,10 +2012,20 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: name="sentry-api-0-organization-flag-log", ), re_path( - r"^(?P[^\/]+)/flags/hooks/provider/(?P[\w-]+)/token/(?P.+)/$", + r"^(?P[^\/]+)/flags/hooks/provider/(?P[\w-]+)/$", OrganizationFlagsHooksEndpoint.as_view(), name="sentry-api-0-organization-flag-hooks", ), + re_path( + r"^(?P[^\/]+)/flags/signing-secrets/$", + OrganizationFlagsWebHookSigningSecretsEndpoint.as_view(), + name="sentry-api-0-organization-flag-hooks-signing-secrets", + ), + re_path( + r"^(?P[^\/]+)/flags/signing-secrets/(?P\d+)/$", + OrganizationFlagsWebHookSigningSecretEndpoint.as_view(), + name="sentry-api-0-organization-flag-hooks-signing-secret", + ), # Replays re_path( r"^(?P[^\/]+)/replays/$", @@ -2151,11 +2109,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationMetricsQueryEndpoint.as_view(), name="sentry-api-0-organization-metrics-query", ), - re_path( - r"^(?P[^/]+)/metrics/samples/$", - OrganizationMetricsSamplesEndpoint.as_view(), - name="sentry-api-0-organization-metrics-samples", - ), re_path( r"^(?P[^/]+)/metrics/tags/$", OrganizationMetricsTagsEndpoint.as_view(), @@ -2175,11 +2128,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: OrganizationProfilingFlamegraphEndpoint.as_view(), name="sentry-api-0-organization-profiling-flamegraph", ), - re_path( - r"^chunks-flamegraph/$", - OrganizationProfilingChunksFlamegraphEndpoint.as_view(), - name="sentry-api-0-organization-profiling-chunks-flamegraph", - ), re_path( r"^function-trends/$", OrganizationProfilingFunctionTrendsEndpoint.as_view(), @@ -2322,11 +2270,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: EventGroupingInfoEndpoint.as_view(), name="sentry-api-0-event-grouping-info", ), - re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/ai-fix-suggest/$", - EventAiSuggestedFixEndpoint.as_view(), - name="sentry-api-0-event-ai-fix-suggest", - ), re_path( r"^(?P[^\/]+)/(?P[^\/]+)/events/(?P[\w-]+)/apple-crash-report$", EventAppleCrashReportEndpoint.as_view(), @@ -2833,16 +2776,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ProjectMonitorStatsEndpoint.as_view(), name="sentry-api-0-project-monitor-stats", ), - re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/autofix/codebase-index/status/$", - ProjectAutofixCodebaseIndexStatusEndpoint.as_view(), - name="sentry-api-0-project-autofix-codebase-index-status", - ), - re_path( - r"^(?P[^\/]+)/(?P[^\/]+)/autofix/codebase-index/create/$", - ProjectAutofixCreateCodebaseIndexEndpoint.as_view(), - name="sentry-api-0-project-autofix-codebase-index-create", - ), # Uptime re_path( r"^(?P[^\/]+)/(?P[^\/]+)/uptime/(?P[^\/]+)/$", @@ -2854,6 +2787,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]: ProjectUptimeAlertIndexEndpoint.as_view(), name="sentry-api-0-project-uptime-alert-index", ), + *workflow_urls.urlpatterns, ] TEAM_URLS = [ diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index fb0738df6b3ed3..ebfbf0dfac2cd4 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -234,6 +234,35 @@ def get_date_range_from_stats_period( return start, end +def clamp_date_range( + range: tuple[datetime.datetime, datetime.datetime], max_timedelta: datetime.timedelta +) -> tuple[datetime.datetime, datetime.datetime]: + """ + Accepts a date range and a maximum time delta. If the date range is shorter + than the max delta, returns the range as-is. If the date range is longer than the max delta, clamps the range range, anchoring to the end. + + If any of the inputs are invalid (e.g., a negative range) returns the range + without modifying it. + + :param range: A tuple of two `datetime.datetime` objects + :param max_timedelta: Maximum allowed range delta + :return: A tuple of two `datetime.datetime` objects + """ + + [start, end] = range + delta = end - start + + # Ignore negative max time deltas + if max_timedelta < datetime.timedelta(0): + return (start, end) + + # Ignore if delta is within acceptable range + if delta < max_timedelta: + return (start, end) + + return (end - max_timedelta, end) + + # The wide typing allows us to move towards RpcUserOrganizationContext in the future to save RPC calls. # If you can use the wider more correct type, please do. def is_member_disabled_from_limit( diff --git a/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py b/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py index 7f5ef84ac62f9d..230d3e1703f016 100644 --- a/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py +++ b/src/sentry/apidocs/api_ownership_allowlist_dont_modify.py @@ -42,7 +42,6 @@ "/api/0/sentry-apps/{sentry_app_id_or_slug}/features/", "/api/0/organizations/{organization_id_or_slug}/monitors/", "/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/filters/{filter_id}/", - "/api/0/organizations/{organization_id_or_slug}/members/{member_id}/unreleased-commits/", "/api/0/sentry-apps/{sentry_app_id_or_slug}/api-tokens/", "/api/0/internal/quotas/", "/api/0/sentry-apps/{sentry_app_id_or_slug}/stats/", diff --git a/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py b/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py index 6a7117f1c73097..f0ca4e6a4e1c67 100644 --- a/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py +++ b/src/sentry/apidocs/api_publish_status_allowlist_dont_modify.py @@ -406,9 +406,6 @@ "/api/0/organizations/{organization_id_or_slug}/sessions/": {"GET"}, "/api/0/organizations/{organization_id_or_slug}/releases/{version}/resolved/": {"GET"}, "/api/0/organizations/{organization_id_or_slug}/request-project-creation/": {"POST"}, - "/api/0/organizations/{organization_id_or_slug}/members/{member_id}/unreleased-commits/": { - "GET" - }, "/api/0/organizations/{organization_id_or_slug}/members/{member_id}/teams/{team_id_or_slug}/": { "GET", "PUT", diff --git a/src/sentry/apidocs/examples/dashboard_examples.py b/src/sentry/apidocs/examples/dashboard_examples.py index 20fdeaeaf8c920..f844e04b04585c 100644 --- a/src/sentry/apidocs/examples/dashboard_examples.py +++ b/src/sentry/apidocs/examples/dashboard_examples.py @@ -72,6 +72,7 @@ "isEditableByEveryone": True, "teamsWithEditAccess": [], }, + "isFavorited": False, } DASHBOARDS_OBJECT = [ @@ -104,6 +105,8 @@ }, "widgetDisplay": [], "widgetPreview": [], + "permissions": {"isEditableByEveryone": True, "teamsWithEditAccess": []}, + "isFavorited": False, }, { "id": "2", @@ -134,6 +137,8 @@ }, "widgetDisplay": [], "widgetPreview": [], + "permissions": None, + "isFavorited": False, }, ] diff --git a/src/sentry/apidocs/examples/event_examples.py b/src/sentry/apidocs/examples/event_examples.py index 22797eafad0537..0dae9ea877e764 100644 --- a/src/sentry/apidocs/examples/event_examples.py +++ b/src/sentry/apidocs/examples/event_examples.py @@ -28,6 +28,7 @@ "location": "example.py:123", "culprit": "/books/new/", "projectID": "49271", + "metadata": None, } GROUP_EVENT: GroupEventDetailsResponse = { diff --git a/src/sentry/apidocs/examples/organization_examples.py b/src/sentry/apidocs/examples/organization_examples.py index 09f90d6ba36b71..676eee217115c6 100644 --- a/src/sentry/apidocs/examples/organization_examples.py +++ b/src/sentry/apidocs/examples/organization_examples.py @@ -310,7 +310,6 @@ class OrganizationExamples: "allowJoinRequests": True, "relayPiiConfig": None, "codecovAccess": False, - "aiSuggestedSolution": True, "hideAiFeatures": False, "githubPRBot": True, "githubOpenPRBot": True, diff --git a/src/sentry/apidocs/examples/project_examples.py b/src/sentry/apidocs/examples/project_examples.py index d46b805b9dc825..cc609bbc07168a 100644 --- a/src/sentry/apidocs/examples/project_examples.py +++ b/src/sentry/apidocs/examples/project_examples.py @@ -526,3 +526,30 @@ class ProjectExamples: response_only=True, ), ] + + GET_PROJECT_FILTERS = [ + OpenApiExample( + "List a project's filters", + value=[ + {"id": "browser-extensions", "active": False}, + {"id": "filtered-transaction", "active": True}, + { + "id": "legacy-browsers", + "active": [ + "opera", + "edge", + "safari", + "chrome", + "ie", + "opera_mini", + "firefox", + "android", + ], + }, + {"id": "localhost", "active": False}, + {"id": "web-crawlers", "active": True}, + ], + status_codes=["200"], + response_only=True, + ), + ] diff --git a/src/sentry/auth/services/auth/model.py b/src/sentry/auth/services/auth/model.py index 86c918af04a68c..d0bbd928b60133 100644 --- a/src/sentry/auth/services/auth/model.py +++ b/src/sentry/auth/services/auth/model.py @@ -40,6 +40,7 @@ class RpcApiToken(RpcModel): expires_at: datetime.datetime | None = None allowed_origins: list[str] = Field(default_factory=list) scope_list: list[str] = Field(default_factory=list) + scoping_organization_id: int | None = None class RpcMemberSsoState(RpcModel): diff --git a/src/sentry/auth/services/auth/serial.py b/src/sentry/auth/services/auth/serial.py index 514b54ff1eb090..722649ed210584 100644 --- a/src/sentry/auth/services/auth/serial.py +++ b/src/sentry/auth/services/auth/serial.py @@ -85,6 +85,7 @@ def serialize_api_token(at: ApiToken) -> RpcApiToken: user_id=at.user_id, application_id=at.application_id, organization_id=at.organization_id, + scoping_organization_id=at.scoping_organization_id, application_is_active=at.application is None or at.application.is_active, token=at.token, hashed_token=at.hashed_token, diff --git a/src/sentry/auth/system.py b/src/sentry/auth/system.py index baa20a77f4bc04..6bc29df82b15e5 100644 --- a/src/sentry/auth/system.py +++ b/src/sentry/auth/system.py @@ -46,6 +46,7 @@ class SystemToken: token = "" application = None organization_id = None + scoping_organization_id = None @classmethod def from_request(cls, request: HttpRequest, token: str) -> SystemToken | None: diff --git a/src/sentry/autofix/utils.py b/src/sentry/autofix/utils.py index dc9b5f5fe78f41..4de15e4be3ca06 100644 --- a/src/sentry/autofix/utils.py +++ b/src/sentry/autofix/utils.py @@ -26,10 +26,10 @@ class AutofixRequest(TypedDict): class AutofixStatus(str, enum.Enum): COMPLETED = "COMPLETED" ERROR = "ERROR" - PENDING = "PENDING" PROCESSING = "PROCESSING" NEED_MORE_INFORMATION = "NEED_MORE_INFORMATION" CANCELLED = "CANCELLED" + WAITING_FOR_USER_RESPONSE = "WAITING_FOR_USER_RESPONSE" class AutofixState(BaseModel): diff --git a/src/sentry/backup/comparators.py b/src/sentry/backup/comparators.py index cb1b27fa603aa7..a2f5a9e7d1ac7b 100644 --- a/src/sentry/backup/comparators.py +++ b/src/sentry/backup/comparators.py @@ -802,6 +802,9 @@ def get_default_comparators() -> dict[str, list[JSONScrubbingComparator]]: "sentry.alertrule": [ DateUpdatedComparator("date_modified"), ], + "sentry.dashboardfavoriteuser": [ + DateUpdatedComparator("date_added", "date_updated"), + ], "sentry.groupsearchview": [DateUpdatedComparator("date_updated")], "sentry.incident": [UUID4Comparator("detection_uuid")], "sentry.incidentactivity": [UUID4Comparator("notification_uuid")], diff --git a/src/sentry/backup/crypto.py b/src/sentry/backup/crypto.py index 5ea2a64eac2e93..1e842ca6e2ed01 100644 --- a/src/sentry/backup/crypto.py +++ b/src/sentry/backup/crypto.py @@ -54,8 +54,6 @@ class Encryptor(ABC): A `IO[bytes]`-wrapper that contains relevant information and methods to encrypt some an in-memory JSON-ifiable dict. """ - __fp: IO[bytes] - @abstractmethod def get_public_key_pem(self) -> bytes: pass @@ -67,10 +65,10 @@ class LocalFileEncryptor(Encryptor): """ def __init__(self, fp: IO[bytes]): - self.__fp = fp + self.__key = fp.read() def get_public_key_pem(self) -> bytes: - return self.__fp.read() + return self.__key class GCPKMSEncryptor(Encryptor): @@ -82,7 +80,7 @@ class GCPKMSEncryptor(Encryptor): crypto_key_version: CryptoKeyVersion | None = None def __init__(self, fp: IO[bytes]): - self.__fp = fp + self.__key = fp.read() @classmethod def from_crypto_key_version(cls, crypto_key_version: CryptoKeyVersion) -> GCPKMSEncryptor: @@ -93,7 +91,7 @@ def from_crypto_key_version(cls, crypto_key_version: CryptoKeyVersion) -> GCPKMS def get_public_key_pem(self) -> bytes: if self.crypto_key_version is None: # Read the user supplied configuration into the proper format. - gcp_kms_config_json = orjson.loads(self.__fp.read()) + gcp_kms_config_json = orjson.loads(self.__key) try: self.crypto_key_version = CryptoKeyVersion(**gcp_kms_config_json) except TypeError: @@ -217,12 +215,6 @@ class Decryptor(ABC): tarball. """ - __fp: IO[bytes] - - @abstractmethod - def read(self) -> bytes: - pass - @abstractmethod def decrypt_data_encryption_key(self, unwrapped: UnwrappedEncryptedExportTarball) -> bytes: pass @@ -234,22 +226,19 @@ class LocalFileDecryptor(Decryptor): """ def __init__(self, fp: IO[bytes]): - self.__fp = fp + self.__key = fp.read() @classmethod def from_bytes(cls, b: bytes) -> LocalFileDecryptor: return cls(io.BytesIO(b)) - def read(self) -> bytes: - return self.__fp.read() - def decrypt_data_encryption_key(self, unwrapped: UnwrappedEncryptedExportTarball) -> bytes: """ Decrypt the encrypted data encryption key used to encrypt the actual export JSON. """ # Compare the public and private key, to ensure that they are a match. - private_key_pem = self.__fp.read() + private_key_pem = self.__key private_key = serialization.load_pem_private_key( private_key_pem, password=None, @@ -286,17 +275,14 @@ class GCPKMSDecryptor(Decryptor): """ def __init__(self, fp: IO[bytes]): - self.__fp = fp + self.__key = fp.read() @classmethod def from_bytes(cls, b: bytes) -> GCPKMSDecryptor: return cls(io.BytesIO(b)) - def read(self) -> bytes: - return self.__fp.read() - def decrypt_data_encryption_key(self, unwrapped: UnwrappedEncryptedExportTarball) -> bytes: - gcp_kms_config_bytes = self.__fp.read() + gcp_kms_config_bytes = self.__key # Read the user supplied configuration into the proper format. gcp_kms_config_json = orjson.loads(gcp_kms_config_bytes) @@ -345,3 +331,13 @@ def decrypt_encrypted_tarball(tarball: IO[bytes], decryptor: Decryptor) -> bytes decrypted_dek = decryptor.decrypt_data_encryption_key(unwrapped) fernet = Fernet(decrypted_dek) return fernet.decrypt(unwrapped.encrypted_json_blob) + + +class EncryptorDecryptorPair: + """ + An Encryptor and Decryptor that use paired public and private keys, respectively. + """ + + def __init__(self, encryptor: Encryptor, decryptor: Decryptor): + self.encryptor = encryptor + self.decryptor = decryptor diff --git a/src/sentry/backup/exports.py b/src/sentry/backup/exports.py index 5fefc80354e7cc..8ff011dc4cc6c0 100644 --- a/src/sentry/backup/exports.py +++ b/src/sentry/backup/exports.py @@ -5,12 +5,15 @@ # We have to use the default JSON interface to enable pretty-printing on export. When loading JSON, # we still use the one from `sentry.utils`, imported as `sentry_json` below. import json as builtin_json # noqa: S003 +from abc import ABC, abstractmethod from typing import IO import orjson -from sentry.backup.crypto import Encryptor, create_encrypted_export_tarball +from sentry.backup.crypto import Encryptor, EncryptorDecryptorPair, create_encrypted_export_tarball from sentry.backup.dependencies import ( + ImportKind, + NormalizedModelName, PrimaryKeyMap, dependencies, get_model_name, @@ -20,6 +23,7 @@ from sentry.backup.scopes import ExportScope from sentry.backup.services.import_export.model import ( RpcExportError, + RpcExportOk, RpcExportScope, RpcFilter, RpcPrimaryKeyMap, @@ -41,6 +45,69 @@ def __init__(self, context: RpcExportError) -> None: self.context = context +class ExportCheckpointerError(Exception): + pass + + +class ExportCheckpointer(ABC): + """ + For very large exports, the exporting environment may fall over half-way through the process: + the thread running it may hit some timeout, or it may OOM, or fail for some other ephemeral + reason. To help in such situations, we'd like an API for saving "checkpoints" during the export. + + This class provides per-model checkpointing support for exports. Since there is a topologically + sorted order of models being exported, as we move through this list, we can save the exported + JSON for each kind of model in order to some stable media (disk, GCP, etc). If there is a + failure late in the export process, when it is retried, the exporter can check if that + particular model already exists in the checkpointer's cache, thereby avoiding redoing the work + of pulling the models from the database, processing them, etc. This ensures that in most retry + situations, we can quickly "re-ingest" already-exported models in memory and pick up where we + left off. + """ + + def _parse_cached_json(self, json_data: bytes) -> RpcExportOk | None: + max_pk = 0 + pk_map = PrimaryKeyMap() + models = orjson.loads(json_data) + for model in models: + model_name = model.get("model", None) + pk = model.get("pk", None) + if model_name is None or pk is None: + raise ExportCheckpointerError("Improperly formatted entry") + + pk_map.insert(model_name, pk, pk, ImportKind.Inserted) + if pk > max_pk: + max_pk = pk + + return RpcExportOk( + mapped_pks=RpcPrimaryKeyMap.into_rpc(pk_map), max_pk=max_pk, json_data=json_data + ) + + @abstractmethod + def get(self, model_name: NormalizedModelName) -> RpcExportOk | None: + pass + + @abstractmethod + def add(self, model_name: NormalizedModelName, json_data: str) -> None: + pass + + +class NoopExportCheckpointer(ExportCheckpointer): + """ + A noop checkpointer - that is, it doesn't write or read any checkpoints, always returning None. + This means that no checkpointing ever occurs. + """ + + def __init__(self, crypto: EncryptorDecryptorPair | None, printer: Printer): + pass + + def get(self, model_name: NormalizedModelName) -> RpcExportOk | None: + return None + + def add(self, model_name: NormalizedModelName, json_data: str) -> None: + return None + + def _export( dest: IO[bytes], scope: ExportScope, @@ -49,6 +116,7 @@ def _export( indent: int = 2, filter_by: Filter | None = None, printer: Printer, + checkpointer: ExportCheckpointer | None = None, ): """ Exports core data for the Sentry installation. @@ -68,6 +136,7 @@ def _export( printer.echo(errText, err=True) raise RuntimeError(errText) + cache = checkpointer if checkpointer is not None else NoopExportCheckpointer(None, printer) json_export = [] pk_map = PrimaryKeyMap() allowed_relocation_scopes = scope.value @@ -119,13 +188,18 @@ def _export( dep_models = {get_model_name(d) for d in model_relations.get_dependencies_for_relocation()} export_by_model = ImportExportService.get_exporter_for_model(model) - result = export_by_model( - export_model_name=str(model_name), - scope=RpcExportScope.into_rpc(scope), - from_pk=0, - filter_by=[RpcFilter.into_rpc(f) for f in filters], - pk_map=RpcPrimaryKeyMap.into_rpc(pk_map.partition(dep_models)), - indent=indent, + cached_result = cache.get(model_name) + result = ( + cached_result + if cached_result is not None + else export_by_model( + export_model_name=str(model_name), + scope=RpcExportScope.into_rpc(scope), + from_pk=0, + filter_by=[RpcFilter.into_rpc(f) for f in filters], + pk_map=RpcPrimaryKeyMap.into_rpc(pk_map.partition(dep_models)), + indent=indent, + ) ) if isinstance(result, RpcExportError): @@ -133,11 +207,14 @@ def _export( raise ExportingError(result) pk_map.extend(result.mapped_pks.from_rpc()) + json_models = orjson.loads(result.json_data) + if cached_result is None: + cache.add(model_name, json_models) # TODO(getsentry/team-ospo#190): Since the structure of this data is very predictable (an # array of serialized model objects), we could probably avoid re-ingesting the JSON string # as a future optimization. - for json_model in orjson.loads(result.json_data): + for json_model in json_models: json_export.append(json_model) # If no `encryptor` argument was passed in, this is an unencrypted export, so we can just dump @@ -158,6 +235,7 @@ def export_in_user_scope( user_filter: set[str] | None = None, indent: int = 2, printer: Printer, + checkpointer: ExportCheckpointer | None = None, ): """ Perform an export in the `User` scope, meaning that only models with `RelocationScope.User` will @@ -174,6 +252,7 @@ def export_in_user_scope( filter_by=Filter(User, "username", user_filter) if user_filter is not None else None, indent=indent, printer=printer, + checkpointer=checkpointer, ) @@ -184,6 +263,7 @@ def export_in_organization_scope( org_filter: set[str] | None = None, indent: int = 2, printer: Printer, + checkpointer: ExportCheckpointer | None = None, ): """ Perform an export in the `Organization` scope, meaning that only models with @@ -201,6 +281,7 @@ def export_in_organization_scope( filter_by=Filter(Organization, "slug", org_filter) if org_filter is not None else None, indent=indent, printer=printer, + checkpointer=checkpointer, ) @@ -210,6 +291,7 @@ def export_in_config_scope( encryptor: Encryptor | None = None, indent: int = 2, printer: Printer, + checkpointer: ExportCheckpointer | None = None, ): """ Perform an export in the `Config` scope, meaning that only models directly related to the global @@ -226,6 +308,7 @@ def export_in_config_scope( filter_by=Filter(User, "pk", import_export_service.get_all_globally_privileged_users()), indent=indent, printer=printer, + checkpointer=checkpointer, ) @@ -235,6 +318,7 @@ def export_in_global_scope( encryptor: Encryptor | None = None, indent: int = 2, printer: Printer, + checkpointer: ExportCheckpointer | None = None, ): """ Perform an export in the `Global` scope, meaning that all models will be exported from the @@ -246,4 +330,5 @@ def export_in_global_scope( encryptor=encryptor, indent=indent, printer=printer, + checkpointer=checkpointer, ) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 841b87ba0c8500..a06966905e2e38 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -26,6 +26,7 @@ from sentry.conf.types.role_dict import RoleDict from sentry.conf.types.sdk_config import ServerSdkConfig from sentry.conf.types.sentry_config import SentryMode +from sentry.conf.types.service_options import ServiceOptions from sentry.utils import json # NOQA (used in getsentry config) from sentry.utils.celery import crontab_with_minute_jitter, make_split_task_queues from sentry.utils.types import Type, type_from_value @@ -322,7 +323,6 @@ def env( # This is because CommonMiddleware Sets the Content-Length header for non-streaming responses. MIDDLEWARE: tuple[str, ...] = ( "csp.middleware.CSPMiddleware", - "sentry.middleware.flag.FlagMiddleware", "sentry.middleware.health.HealthCheck", "sentry.middleware.security.SecurityHeadersMiddleware", "sentry.middleware.env.SentryEnvMiddleware", @@ -401,6 +401,7 @@ def env( "sentry.users", "sentry.sentry_apps", "sentry.integrations", + "sentry.notifications", "sentry.flags", "sentry.monitors", "sentry.uptime", @@ -2492,7 +2493,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: SENTRY_SELF_HOSTED_ERRORS_ONLY = False # only referenced in getsentry to provide the stable beacon version # updated with scripts/bump-version.sh -SELF_HOSTED_STABLE_VERSION = "24.10.0" +SELF_HOSTED_STABLE_VERSION = "24.11.1" # Whether we should look at X-Forwarded-For header or not # when checking REMOTE_ADDR ip addresses @@ -2881,6 +2882,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "transactions-subscription-results": "default", "generic-metrics-subscription-results": "default", "metrics-subscription-results": "default", + "eap-spans-subscription-results": "default", "ingest-events": "default", "ingest-feedback-events": "default", "ingest-feedback-events-dlq": "default", @@ -3066,6 +3068,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: ZERO_DOWNTIME_MIGRATIONS_LOCK_TIMEOUT = None ZERO_DOWNTIME_MIGRATIONS_STATEMENT_TIMEOUT = None ZERO_DOWNTIME_MIGRATIONS_LOCK_TIMEOUT_FORCE = False +ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL = False if int(PG_VERSION.split(".", maxsplit=1)[0]) < 12: # In v0.6 of django-pg-zero-downtime-migrations this settings is deprecated for PostreSQLv12+ @@ -3159,12 +3162,12 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: SENTRY_SNOWFLAKE_EPOCH_START = datetime(2022, 8, 8, 0, 0).timestamp() SENTRY_USE_SNOWFLAKE = False -SENTRY_DEFAULT_LOCKS_BACKEND_OPTIONS = { +SENTRY_DEFAULT_LOCKS_BACKEND_OPTIONS: ServiceOptions = { "path": "sentry.utils.locking.backends.redis.RedisLockBackend", "options": {"cluster": "default"}, } -SENTRY_POST_PROCESS_LOCKS_BACKEND_OPTIONS = { +SENTRY_POST_PROCESS_LOCKS_BACKEND_OPTIONS: ServiceOptions = { "path": "sentry.utils.locking.backends.redis.RedisLockBackend", "options": {"cluster": "default"}, } @@ -3242,7 +3245,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: # lost as a result of toggling this setting. SENTRY_REPLAYS_ATTEMPT_LEGACY_FILESTORE_LOOKUP = True -SENTRY_FEATURE_ADOPTION_CACHE_OPTIONS = { +SENTRY_FEATURE_ADOPTION_CACHE_OPTIONS: ServiceOptions = { "path": "sentry.models.featureadoption.FeatureAdoptionRedisBackend", "options": {"cluster": "default"}, } diff --git a/src/sentry/conf/types/kafka_definition.py b/src/sentry/conf/types/kafka_definition.py index f486e4c3f3469f..59ae1228343494 100644 --- a/src/sentry/conf/types/kafka_definition.py +++ b/src/sentry/conf/types/kafka_definition.py @@ -26,6 +26,7 @@ class Topic(Enum): TRANSACTIONS_SUBSCRIPTIONS_RESULTS = "transactions-subscription-results" GENERIC_METRICS_SUBSCRIPTIONS_RESULTS = "generic-metrics-subscription-results" METRICS_SUBSCRIPTIONS_RESULTS = "metrics-subscription-results" + EAP_SPANS_SUBSCRIPTIONS_RESULTS = "eap-spans-subscription-results" INGEST_EVENTS = "ingest-events" INGEST_EVENTS_DLQ = "ingest-events-dlq" INGEST_FEEDBACK_EVENTS = "ingest-feedback-events" @@ -48,7 +49,6 @@ class Topic(Enum): MONITORS_CLOCK_TICK = "monitors-clock-tick" MONITORS_CLOCK_TASKS = "monitors-clock-tasks" MONITORS_INCIDENT_OCCURRENCES = "monitors-incident-occurrences" - UPTIME_CONFIG = "uptime-configs" UPTIME_RESULTS = "uptime-results" UPTIME_CONFIGS = "uptime-configs" EVENTSTREAM_GENERIC = "generic-events" diff --git a/src/sentry/conf/types/service_options.py b/src/sentry/conf/types/service_options.py new file mode 100644 index 00000000000000..5c1bccb44dbbb6 --- /dev/null +++ b/src/sentry/conf/types/service_options.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +from typing import TypedDict + + +class ServiceOptions(TypedDict, total=False): + path: str + options: dict[str, object] + executor: ServiceOptions diff --git a/src/sentry/constants.py b/src/sentry/constants.py index b6d53e557f2f4d..fc526aa8d11cce 100644 --- a/src/sentry/constants.py +++ b/src/sentry/constants.py @@ -706,7 +706,6 @@ class InsightModules(Enum): SCRAPE_JAVASCRIPT_DEFAULT = True TRUSTED_RELAYS_DEFAULT = None JOIN_REQUESTS_DEFAULT = True -AI_SUGGESTED_SOLUTION = True HIDE_AI_FEATURES_DEFAULT = False GITHUB_COMMENT_BOT_DEFAULT = True ISSUE_ALERTS_THREAD_DEFAULT = True @@ -779,17 +778,17 @@ class InsightModules(Enum): HEALTH_CHECK_GLOBS = [ "*healthcheck*", "*heartbeat*", - "*/health", - "*/healthy", - "*/healthz", - "*/_health", - r"*/\[_health\]", - "*/live", - "*/livez", - "*/ready", - "*/readyz", - "*/ping", - "*/up", + "*/health{/,}", + "*/healthy{/,}", + "*/healthz{/,}", + "*/_health{/,}", + r"*/\[_health\]{/,}", + "*/live{/,}", + "*/livez{/,}", + "*/ready{/,}", + "*/readyz{/,}", + "*/ping{/,}", + "*/up{/,}", ] diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py index 02c31010a58a91..bb21f85fadb759 100644 --- a/src/sentry/consumers/__init__.py +++ b/src/sentry/consumers/__init__.py @@ -301,6 +301,12 @@ def ingest_transactions_options() -> list[click.Option]: "click_options": multiprocessing_options(default_max_batch_size=100), "static_args": {"dataset": "metrics"}, }, + "eap-spans-subscription-results": { + "topic": Topic.EAP_SPANS_SUBSCRIPTIONS_RESULTS, + "strategy_factory": "sentry.snuba.query_subscriptions.run.QuerySubscriptionStrategyFactory", + "click_options": multiprocessing_options(default_max_batch_size=100), + "static_args": {"dataset": "events_analytics_platform"}, + }, "ingest-events": { "topic": Topic.INGEST_EVENTS, "strategy_factory": "sentry.ingest.consumer.factory.IngestStrategyFactory", diff --git a/src/sentry/consumers/synchronized.py b/src/sentry/consumers/synchronized.py index 5c72a658f574ed..9b2efe1d128705 100644 --- a/src/sentry/consumers/synchronized.py +++ b/src/sentry/consumers/synchronized.py @@ -300,3 +300,7 @@ def close(self, timeout: float | None = None) -> None: @property def closed(self) -> bool: return self.__consumer.closed + + @property + def member_id(self) -> str: + return self.__consumer.member_id diff --git a/src/sentry/db/postgres/schema.py b/src/sentry/db/postgres/schema.py index 65e500578163c8..385feff6591a91 100644 --- a/src/sentry/db/postgres/schema.py +++ b/src/sentry/db/postgres/schema.py @@ -17,13 +17,9 @@ ) unsafe_mapping = { - Unsafe.ADD_COLUMN_DEFAULT: ( - "Adding {}.{} as column with a default is safe, but you need to take additional steps.\n" - "Follow this guide: https://develop.sentry.dev/database-migrations/#adding-columns-with-a-default" - ), Unsafe.ADD_COLUMN_NOT_NULL: ( - "Adding {}.{} as a not null column is unsafe.\n" - "More info: https://develop.sentry.dev/database-migrations/#adding-not-null-to-columns" + "Adding {}.{} as a not null column with no default is unsafe. Provide a default using db_default. \n" + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#adding-columns-with-a-default" ), Unsafe.ALTER_COLUMN_TYPE: ( "Altering the type of column {}.{} in this way is unsafe\n" @@ -92,23 +88,27 @@ def alter_db_table(self, model, old_db_table, new_db_table): "More info here: https://develop.sentry.dev/database-migrations/#renaming-tables" ) - def delete_model(self, model): + def delete_model(self, model, is_safe=False): """ It's never safe to delete a model using the standard migration process """ - raise UnsafeOperationException( - f"Deleting the {model.__name__} model is unsafe.\n" - "More info here: https://develop.sentry.dev/database-migrations/#deleting-tables" - ) + if not is_safe: + raise UnsafeOperationException( + f"Deleting the {model.__name__} model is unsafe.\n" + "More info here: https://develop.sentry.dev/database-migrations/#deleting-tables" + ) + super(DatabaseSchemaEditorMixin, self).delete_model(model) - def remove_field(self, model, field): + def remove_field(self, model, field, is_safe=False): """ It's never safe to remove a field using the standard migration process """ - raise UnsafeOperationException( - f"Removing the {model.__name__}.{field.name} field is unsafe.\n" - "More info here: https://develop.sentry.dev/database-migrations/#deleting-columns" - ) + if not is_safe: + raise UnsafeOperationException( + f"Removing the {model.__name__}.{field.name} field is unsafe.\n" + "More info here: https://develop.sentry.dev/database-migrations/#deleting-columns" + ) + super(DatabaseSchemaEditorMixin, self).remove_field(model, field) def execute(self, sql, params=()): if sql is DUMMY_SQL: @@ -121,29 +121,34 @@ def execute(self, sql, params=()): else: statements.append(sql) for statement in statements: + idempotent_condition = None if isinstance(statement, PGLock): use_timeouts = statement.use_timeouts disable_statement_timeout = statement.disable_statement_timeout + idempotent_condition = statement.idempotent_condition statement = statement.sql elif isinstance(statement, Statement) and isinstance(statement.template, PGLock): use_timeouts = statement.template.use_timeouts disable_statement_timeout = statement.template.disable_statement_timeout + if statement.template.idempotent_condition is not None: + idempotent_condition = statement.template.idempotent_condition % statement.parts statement = Statement(statement.template.sql, **statement.parts) else: use_timeouts = False disable_statement_timeout = False - if use_timeouts: - with self._set_operation_timeout(self.STATEMENT_TIMEOUT, self.LOCK_TIMEOUT): - PostgresDatabaseSchemaEditor.execute(self, statement, params) - elif self.LOCK_TIMEOUT_FORCE: - with self._set_operation_timeout(lock_timeout=self.LOCK_TIMEOUT): + if not self._skip_applied(idempotent_condition): + if use_timeouts: + with self._set_operation_timeout(self.STATEMENT_TIMEOUT, self.LOCK_TIMEOUT): + PostgresDatabaseSchemaEditor.execute(self, statement, params) + elif self.LOCK_TIMEOUT_FORCE: + with self._set_operation_timeout(lock_timeout=self.LOCK_TIMEOUT): + PostgresDatabaseSchemaEditor.execute(self, statement, params) + elif disable_statement_timeout and self.FLEXIBLE_STATEMENT_TIMEOUT: + with self._set_operation_timeout(self.ZERO_TIMEOUT): + PostgresDatabaseSchemaEditor.execute(self, statement, params) + else: PostgresDatabaseSchemaEditor.execute(self, statement, params) - elif disable_statement_timeout and self.FLEXIBLE_STATEMENT_TIMEOUT: - with self._set_operation_timeout(self.ZERO_TIMEOUT): - PostgresDatabaseSchemaEditor.execute(self, statement, params) - else: - PostgresDatabaseSchemaEditor.execute(self, statement, params) @contextmanager def _set_operation_timeout(self, statement_timeout=None, lock_timeout=None): @@ -152,27 +157,27 @@ def _set_operation_timeout(self, statement_timeout=None, lock_timeout=None): previous_lock_timeout = self.ZERO_TIMEOUT else: with self.connection.cursor() as cursor: - cursor.execute(self.sql_get_statement_timeout) + cursor.execute(self._sql_get_statement_timeout) (previous_statement_timeout,) = cursor.fetchone() - cursor.execute(self.sql_get_lock_timeout) + cursor.execute(self._sql_get_lock_timeout) (previous_lock_timeout,) = cursor.fetchone() if statement_timeout is not None: PostgresDatabaseSchemaEditor.execute( - self, self.sql_set_statement_timeout % {"statement_timeout": statement_timeout} + self, self._sql_set_statement_timeout % {"statement_timeout": statement_timeout} ) if lock_timeout is not None: PostgresDatabaseSchemaEditor.execute( - self, self.sql_set_lock_timeout % {"lock_timeout": lock_timeout} + self, self._sql_set_lock_timeout % {"lock_timeout": lock_timeout} ) yield if statement_timeout is not None: PostgresDatabaseSchemaEditor.execute( self, - self.sql_set_statement_timeout % {"statement_timeout": previous_statement_timeout}, + self._sql_set_statement_timeout % {"statement_timeout": previous_statement_timeout}, ) if lock_timeout is not None: PostgresDatabaseSchemaEditor.execute( - self, self.sql_set_lock_timeout % {"lock_timeout": previous_lock_timeout} + self, self._sql_set_lock_timeout % {"lock_timeout": previous_lock_timeout} ) diff --git a/src/sentry/db/router.py b/src/sentry/db/router.py index 5668c49b9ac3da..26cbd56d8d1969 100644 --- a/src/sentry/db/router.py +++ b/src/sentry/db/router.py @@ -71,6 +71,8 @@ class SiloRouter: "sentry_projectavatar": SiloMode.REGION, "sentry_pagerdutyservice": SiloMode.REGION, "sentry_notificationsetting": SiloMode.CONTROL, + "authprovider_duplicate": SiloMode.CONTROL, + "authidentity_duplicate": SiloMode.CONTROL, } """ When we remove models, we are no longer able to resolve silo assignments diff --git a/src/sentry/deletions/defaults/alert_rule_trigger_action.py b/src/sentry/deletions/defaults/alert_rule_trigger_action.py index ed7c6307839d47..dc82647bd210aa 100644 --- a/src/sentry/deletions/defaults/alert_rule_trigger_action.py +++ b/src/sentry/deletions/defaults/alert_rule_trigger_action.py @@ -6,7 +6,7 @@ class AlertRuleTriggerActionDeletionTask(ModelDeletionTask[AlertRuleTriggerActio manager_name = "objects_for_deletion" def get_child_relations(self, instance: AlertRuleTriggerAction) -> list[BaseRelation]: - from sentry.models.notificationmessage import NotificationMessage + from sentry.notifications.models.notificationmessage import NotificationMessage return [ ModelRelation(NotificationMessage, {"trigger_action_id": instance.id}), diff --git a/src/sentry/deletions/defaults/project.py b/src/sentry/deletions/defaults/project.py index fb06ba353c6fc5..d1d4874652e3df 100644 --- a/src/sentry/deletions/defaults/project.py +++ b/src/sentry/deletions/defaults/project.py @@ -93,7 +93,7 @@ def get_child_relations(self, instance: Project) -> list[BaseRelation]: relations.append( ModelRelation( AlertRule, - {"snuba_query__subscriptions__project": instance, "include_all_projects": False}, + {"snuba_query__subscriptions__project": instance}, ) ) diff --git a/src/sentry/deletions/defaults/rulefirehistory.py b/src/sentry/deletions/defaults/rulefirehistory.py index 67e4f54569cae4..f71a8964dedd3b 100644 --- a/src/sentry/deletions/defaults/rulefirehistory.py +++ b/src/sentry/deletions/defaults/rulefirehistory.py @@ -4,7 +4,7 @@ class RuleFireHistoryDeletionTask(ModelDeletionTask[RuleFireHistory]): def get_child_relations(self, instance: RuleFireHistory) -> list[BaseRelation]: - from sentry.models.notificationmessage import NotificationMessage + from sentry.notifications.models.notificationmessage import NotificationMessage return [ ModelRelation(NotificationMessage, {"rule_fire_history_id": instance.id}), diff --git a/src/sentry/discover/endpoints/discover_homepage_query.py b/src/sentry/discover/endpoints/discover_homepage_query.py index 44abebde7c399f..cbd3c68698b06f 100644 --- a/src/sentry/discover/endpoints/discover_homepage_query.py +++ b/src/sentry/discover/endpoints/discover_homepage_query.py @@ -71,7 +71,7 @@ def put(self, request: Request, organization) -> Response: serializer = DiscoverSavedQuerySerializer( # HACK: To ensure serializer data is valid, pass along a name temporarily data={**request.data, "name": "New Query"}, - context={"params": params}, + context={"params": params, "organization": organization, "user": request.user}, ) if not serializer.is_valid(): raise ParseError(serializer.errors) diff --git a/src/sentry/discover/endpoints/discover_saved_queries.py b/src/sentry/discover/endpoints/discover_saved_queries.py index 3f28dea8a7d166..009b919ad43862 100644 --- a/src/sentry/discover/endpoints/discover_saved_queries.py +++ b/src/sentry/discover/endpoints/discover_saved_queries.py @@ -175,7 +175,7 @@ def post(self, request: Request, organization) -> Response: serializer = DiscoverSavedQuerySerializer( data=request.data, - context={"params": params}, + context={"params": params, "organization": organization, "user": request.user}, ) if not serializer.is_valid(): diff --git a/src/sentry/discover/endpoints/discover_saved_query_detail.py b/src/sentry/discover/endpoints/discover_saved_query_detail.py index d2529fd74a1ce4..f1a40778355175 100644 --- a/src/sentry/discover/endpoints/discover_saved_query_detail.py +++ b/src/sentry/discover/endpoints/discover_saved_query_detail.py @@ -114,7 +114,7 @@ def put(self, request: Request, organization, query) -> Response: serializer = DiscoverSavedQuerySerializer( data=request.data, - context={"params": params}, + context={"params": params, "organization": organization, "user": request.user}, ) if not serializer.is_valid(): return Response(serializer.errors, status=400) diff --git a/src/sentry/discover/endpoints/serializers.py b/src/sentry/discover/endpoints/serializers.py index 52bb22efdfe6ff..3f1ca91c671b44 100644 --- a/src/sentry/discover/endpoints/serializers.py +++ b/src/sentry/discover/endpoints/serializers.py @@ -1,11 +1,13 @@ import re from collections.abc import Sequence +import sentry_sdk from django.db.models import Count, Max, QuerySet from drf_spectacular.utils import extend_schema_serializer from rest_framework import serializers from rest_framework.serializers import ListField +from sentry import features from sentry.api.fields.empty_integer import EmptyIntegerField from sentry.api.utils import get_date_range_from_params from sentry.constants import ALL_ACCESS_PROJECTS @@ -16,9 +18,12 @@ TeamKeyTransaction, ) from sentry.exceptions import InvalidParams, InvalidSearchQuery +from sentry.models.organization import Organization from sentry.models.team import Team from sentry.search.events.builder.discover import DiscoverQueryBuilder +from sentry.search.events.types import QueryBuilderConfig from sentry.snuba.dataset import Dataset +from sentry.users.models import User from sentry.utils.dates import parse_stats_period, validate_interval from sentry.utils.snuba import SENTRY_SNUBA_MAP @@ -159,8 +164,11 @@ class DiscoverSavedQuerySerializer(serializers.Serializer): ) queryDataset = serializers.ChoiceField( choices=DiscoverSavedQueryTypes.as_text_choices(), - default=DiscoverSavedQueryTypes.get_type_name(DiscoverSavedQueryTypes.DISCOVER), - help_text="The dataset you would like to query.", + default=DiscoverSavedQueryTypes.get_type_name(DiscoverSavedQueryTypes.ERROR_EVENTS), + help_text="""The dataset you would like to query. Allowed values are: +- error-events +- transaction-like +""", ) start = serializers.DateTimeField( required=False, allow_null=True, help_text="The saved start time for this saved query." @@ -253,11 +261,58 @@ class DiscoverSavedQuerySerializer(serializers.Serializer): 2: {"groupby", "rollup", "aggregations", "conditions", "limit"}, } + def get_metrics_features( + self, organization: Organization | None, user: User | None + ) -> dict[str, bool | None]: + if organization is None or user is None: + return {} + + feature_names = [ + "organizations:mep-rollout-flag", + "organizations:dynamic-sampling", + "organizations:performance-use-metrics", + "organizations:dashboards-mep", + ] + batch_features = features.batch_has( + feature_names, + organization=organization, + actor=user, + ) + + return ( + batch_features.get(f"organization:{organization.id}", {}) + if batch_features is not None + else { + feature_name: features.has(feature_name, organization=organization, actor=user) + for feature_name in feature_names + } + ) + def validate_projects(self, projects): from sentry.api.validators import validate_project_ids return validate_project_ids(projects, self.context["params"]["project_id"]) + def validate_queryDataset(self, value): + dataset = DiscoverSavedQueryTypes.get_id_for_type_name(value) + if dataset == DiscoverSavedQueryTypes.DISCOVER or dataset is None: + sentry_sdk.set_context( + "discover", + { + "org_slug": self.context["organization"].slug, + }, + ) + sentry_sdk.capture_message("Created or updated saved query with discover dataset.") + if features.has( + "organizations:deprecate-discover-widget-type", + self.context["organization"], + actor=self.context["user"], + ): + raise serializers.ValidationError( + "Attribute value `discover` is deprecated. Please use `error-events` or `transaction-like`" + ) + return dataset + def validate(self, data): query = {} query_keys = [ @@ -305,6 +360,18 @@ def validate(self, data): 0, ) try: + batch_features = self.get_metrics_features( + self.context.get("organization"), self.context.get("user") + ) + use_metrics = bool( + ( + batch_features.get("organizations:mep-rollout-flag", False) + and batch_features.get("organizations:dynamic-sampling", False) + ) + or batch_features.get("organizations:performance-use-metrics", False) + or batch_features.get("organizations:dashboards-mep", False) + ) + equations, columns = categorize_columns(query["fields"]) builder = DiscoverQueryBuilder( dataset=Dataset.Discover, @@ -313,19 +380,18 @@ def validate(self, data): selected_columns=columns, equations=equations, orderby=query.get("orderby"), + config=QueryBuilderConfig(has_metrics=use_metrics), ) builder.get_snql_query().validate() except (InvalidSearchQuery, ArithmeticError) as err: raise serializers.ValidationError(f"Cannot save invalid query: {err}") - dataset = DiscoverSavedQueryTypes.get_id_for_type_name(data["queryDataset"]) - return { "name": data["name"], "project_ids": data["projects"], "query": query, "version": version, - "query_dataset": dataset, + "query_dataset": data["queryDataset"], } def validate_version_fields(self, version, query): diff --git a/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py b/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py index ab6a4c5c060466..a1f5115f57cc8c 100644 --- a/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py +++ b/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py @@ -401,6 +401,10 @@ def adjust_sample_rates_of_projects( projects_with_counts = { project_id: count_per_root for project_id, count_per_root, _, _ in projects_with_tx_count } + # The rebalancing will not work (or would make sense) when we have only projects with zero-counts. + if not any(projects_with_counts.values()): + return + # Since we don't mind about strong consistency, we query a replica of the main database with the possibility of # having out of date information. This is a trade-off we accept, since we work under the assumption that eventually # the projects of an org will be replicated consistently across replicas, because no org should continue to create diff --git a/src/sentry/dynamic_sampling/tasks/helpers/recalibrate_orgs.py b/src/sentry/dynamic_sampling/tasks/helpers/recalibrate_orgs.py index e1a60995eeaebc..ff4444c013743e 100644 --- a/src/sentry/dynamic_sampling/tasks/helpers/recalibrate_orgs.py +++ b/src/sentry/dynamic_sampling/tasks/helpers/recalibrate_orgs.py @@ -93,7 +93,8 @@ def compute_adjusted_factor( Calculates an adjustment factor in order to bring the effective sample rate close to the target sample rate. """ # If the factor is outside the range, we can't do much besides bailing. - if prev_factor <= 0.0: + # We also bail, when we don't have a valid effective sample rate. + if prev_factor <= 0.0 or effective_sample_rate == 0.0: return None # This formula aims at scaling the factor proportionally to the ratio of the sample rate we are targeting compared diff --git a/src/sentry/dynamic_sampling/tasks/sliding_window_org.py b/src/sentry/dynamic_sampling/tasks/sliding_window_org.py index ca7f5d202f42ec..3e5babb8c398cc 100644 --- a/src/sentry/dynamic_sampling/tasks/sliding_window_org.py +++ b/src/sentry/dynamic_sampling/tasks/sliding_window_org.py @@ -29,8 +29,8 @@ queue="dynamicsampling", default_retry_delay=5, max_retries=5, - soft_time_limit=3 * 60, # 3 minutes - time_limit=3 * 60 + 5, + soft_time_limit=15 * 60, # 15 minutes + time_limit=15 * 60 + 5, silo_mode=SiloMode.REGION, ) @dynamic_sampling_task_with_context(max_task_execution=MAX_TASK_SECONDS) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index f0cdbda79b8f12..bbb6d1dd9b3a07 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -47,6 +47,7 @@ from sentry.eventtypes import EventType from sentry.eventtypes.transaction import TransactionEvent from sentry.exceptions import HashDiscarded +from sentry.features.rollout import in_rollout_group from sentry.grouping.api import ( NULL_GROUPHASH_INFO, GroupHashInfo, @@ -70,6 +71,9 @@ ) from sentry.grouping.variants import BaseVariant from sentry.ingest.inbound_filters import FilterStatKeys +from sentry.ingest.transaction_clusterer.datasource.redis import ( + record_transaction_name as record_transaction_name_for_clustering, +) from sentry.integrations.tasks.kick_off_status_syncs import kick_off_status_syncs from sentry.issues.grouptype import ErrorGroupType from sentry.issues.issue_occurrence import IssueOccurrence @@ -99,6 +103,8 @@ from sentry.net.http import connection_from_url from sentry.plugins.base import plugins from sentry.quotas.base import index_data_category +from sentry.receivers.features import record_event_processed +from sentry.receivers.onboarding import record_release_received, record_user_context_received from sentry.reprocessing2 import is_reprocessed_event from sentry.seer.signed_seer_api import make_signed_seer_api_request from sentry.signals import ( @@ -132,6 +138,8 @@ from sentry.utils.sdk import set_measurement from sentry.utils.tag_normalization import normalized_sdk_tag_from_event +from .utils.event_tracker import TransactionStageStatus, track_sampled_event + if TYPE_CHECKING: from sentry.eventstore.models import BaseEvent, Event @@ -2111,14 +2119,17 @@ def _get_severity_score(event: Event) -> tuple[float, str]: reason = "microservice_max_retry" update_severity_error_count() metrics.incr("issues.severity.error", tags={"reason": "max_retries"}) + logger.exception("Seer severity microservice max retries exceeded") except TimeoutError: reason = "microservice_timeout" update_severity_error_count() metrics.incr("issues.severity.error", tags={"reason": "timeout"}) + logger.exception("Seer severity microservice timeout") except Exception: reason = "microservice_error" update_severity_error_count() metrics.incr("issues.severity.error", tags={"reason": "unknown"}) + logger.exception("Seer severity microservice error") sentry_sdk.capture_exception() else: update_severity_error_count(reset=True) @@ -2512,6 +2523,34 @@ def _detect_performance_problems( ) +@sentry_sdk.tracing.trace +def _record_transaction_info(jobs: Sequence[Job], projects: ProjectsMapping) -> None: + """ + this function does what we do in post_process for transactions. if this option is + turned on, we do the actions here instead of in post_process, with the goal + eventually being to not run transactions through post_process + """ + for job in jobs: + try: + event = job["event"] + if not in_rollout_group("transactions.do_post_process_in_save", event.event_id): + continue + + project = event.project + with sentry_sdk.start_span(op="event_manager.record_transaction_name_for_clustering"): + record_transaction_name_for_clustering(project, event.data) + + # these are what the "transaction_processed" signal hooked into + # we should not use signals here, so call the recievers directly + # instead of sending a signal. we should consider potentially + # deleting these + record_event_processed(project, event) + record_user_context_received(project, event) + record_release_received(project, event) + except Exception: + sentry_sdk.capture_exception() + + class PerformanceJob(TypedDict, total=False): performance_problems: Sequence[PerformanceProblem] event: Event @@ -2577,6 +2616,8 @@ def _send_occurrence_to_platform(jobs: Sequence[Job], projects: ProjectsMapping) @sentry_sdk.tracing.trace def save_transaction_events(jobs: Sequence[Job], projects: ProjectsMapping) -> Sequence[Job]: + from .ingest.types import ConsumerType + organization_ids = {project.organization_id for project in projects.values()} organizations = {o.id: o for o in Organization.objects.get_many_from_cache(organization_ids)} @@ -2628,6 +2669,13 @@ def save_transaction_events(jobs: Sequence[Job], projects: ProjectsMapping) -> S with metrics.timer("save_transaction_events.eventstream_insert_many"): _eventstream_insert_many(jobs) + for job in jobs: + track_sampled_event( + job["event"].event_id, + ConsumerType.Transactions, + TransactionStageStatus.SNUBA_TOPIC_PUT, + ) + with metrics.timer("save_transaction_events.track_outcome_accepted_many"): _track_outcome_accepted_many(jobs) @@ -2637,6 +2685,9 @@ def save_transaction_events(jobs: Sequence[Job], projects: ProjectsMapping) -> S with metrics.timer("save_transaction_events.send_occurrence_to_platform"): _send_occurrence_to_platform(jobs, projects) + with metrics.timer("save_transaction_events.record_transaction_info"): + _record_transaction_info(jobs, projects) + return jobs diff --git a/src/sentry/eventstore/models.py b/src/sentry/eventstore/models.py index bca5783584f736..67a49708ce2a03 100644 --- a/src/sentry/eventstore/models.py +++ b/src/sentry/eventstore/models.py @@ -572,6 +572,11 @@ def __getstate__(self) -> Mapping[str, Any]: state.pop("_groups_cache", None) return state + def __repr__(self): + return "".format( + id(self), self.event_id + ) + @property def data(self) -> NodeData: return self._data diff --git a/src/sentry/eventstore/processing/__init__.py b/src/sentry/eventstore/processing/__init__.py index 8666683109849c..04034c01126772 100644 --- a/src/sentry/eventstore/processing/__init__.py +++ b/src/sentry/eventstore/processing/__init__.py @@ -1,4 +1,3 @@ -import sentry_sdk from django.conf import settings from sentry.eventstore.processing.base import EventProcessingStore @@ -15,20 +14,11 @@ settings.SENTRY_TRANSACTION_PROCESSING_STORE and settings.SENTRY_TRANSACTION_PROCESSING_STORE_OPTIONS ): - try: - transaction_processing_store = LazyServiceWrapper( - EventProcessingStore, - settings.SENTRY_TRANSACTION_PROCESSING_STORE, - settings.SENTRY_TRANSACTION_PROCESSING_STORE_OPTIONS, - ) - except BaseException as e: - sentry_sdk.capture_exception(e) - transaction_processing_store = LazyServiceWrapper( - EventProcessingStore, - settings.SENTRY_EVENT_PROCESSING_STORE, - settings.SENTRY_EVENT_PROCESSING_STORE_OPTIONS, - ) - + transaction_processing_store = LazyServiceWrapper( + EventProcessingStore, + settings.SENTRY_TRANSACTION_PROCESSING_STORE, + settings.SENTRY_TRANSACTION_PROCESSING_STORE_OPTIONS, + ) else: transaction_processing_store = LazyServiceWrapper( EventProcessingStore, @@ -36,5 +26,4 @@ settings.SENTRY_EVENT_PROCESSING_STORE_OPTIONS, ) - __all__ = ["event_processing_store", "transaction_processing_store"] diff --git a/src/sentry/features/manager.py b/src/sentry/features/manager.py index 98c3eb8b72d589..accbc814a146f0 100644 --- a/src/sentry/features/manager.py +++ b/src/sentry/features/manager.py @@ -14,8 +14,8 @@ from sentry import options from sentry.users.services.user.model import RpcUser -from sentry.utils import flag as flag_manager from sentry.utils import metrics +from sentry.utils.flag import flag_pole_hook from sentry.utils.types import Dict from .base import Feature, FeatureHandlerStrategy @@ -285,7 +285,7 @@ def has(self, name: str, *args: Any, skip_entity: bool | None = False, **kwargs: tags={"feature": name, "result": rv}, sample_rate=sample_rate, ) - flag_manager.process_flag_result(name, rv) + flag_pole_hook(name, rv) return rv if self._entity_handler and not skip_entity: @@ -296,7 +296,7 @@ def has(self, name: str, *args: Any, skip_entity: bool | None = False, **kwargs: tags={"feature": name, "result": rv}, sample_rate=sample_rate, ) - flag_manager.process_flag_result(name, rv) + flag_pole_hook(name, rv) return rv rv = settings.SENTRY_FEATURES.get(feature.name, False) @@ -306,7 +306,7 @@ def has(self, name: str, *args: Any, skip_entity: bool | None = False, **kwargs: tags={"feature": name, "result": rv}, sample_rate=sample_rate, ) - flag_manager.process_flag_result(name, rv) + flag_pole_hook(name, rv) return rv # Features are by default disabled if no plugin or default enables them @@ -315,7 +315,7 @@ def has(self, name: str, *args: Any, skip_entity: bool | None = False, **kwargs: tags={"feature": name, "result": False}, sample_rate=sample_rate, ) - flag_manager.process_flag_result(name, False) + flag_pole_hook(name, False) return False except Exception as e: if in_random_rollout("features.error.capture_rate"): diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py index da1fc645becd7e..8f688d52cd6f68 100644 --- a/src/sentry/features/temporary.py +++ b/src/sentry/features/temporary.py @@ -80,12 +80,8 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:continuous-profiling-beta", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable stopping the ingestion of continuous profile for non-beta orgs manager.add("organizations:continuous-profiling-beta-ingest", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) - # Enable continuous profiling ui - manager.add("organizations:continuous-profiling-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Display profile durations on the stats page manager.add("organizations:continuous-profiling-stats", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True) - # Enable the continuous profiling compatible redesign - manager.add("organizations:continuous-profiling-compat", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Delightful Developer Metrics (DDM): # Enables experimental WIP custom metrics related features manager.add("organizations:custom-metrics-experimental", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) @@ -102,12 +98,16 @@ def register_temporary_features(manager: FeatureManager): # Enable metrics enhanced performance for AM2+ customers as they transition from AM2 to AM3 manager.add("organizations:dashboards-metrics-transition", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) manager.add("organizations:dashboards-span-metrics", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False) - # Enable releases overlay on dashboard chart widgets - manager.add("organizations:dashboards-releases-on-charts", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) + # Enable table view on dashboards landing page + manager.add("organizations:dashboards-table-view", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable access protected editing of dashboards manager.add("organizations:dashboards-edit-access", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable share links for dashboards for sharing outside the org manager.add("organizations:dashboards-share", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) + # Enable favouriting dashboards + manager.add("organizations:dashboards-favourite", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) + # Enable the dashboard widget builder redesign UI + manager.add("organizations:dashboards-widget-builder-redesign", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable the dev toolbar PoC code for employees # Data Secrecy manager.add("organizations:data-secrecy", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) @@ -137,6 +137,8 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:escalating-metrics-backend", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False) # Enable logging for failure rate subscription processor manager.add("organizations:failure-rate-metric-alert-logging", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) + # Enable GenAI features such as Autofix and Issue Summary + manager.add("organizations:gen-ai-features", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable disabling gitlab integrations when broken is detected manager.add("organizations:gitlab-disable-on-broken", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Allow creating `GroupHashMetadata` records @@ -152,6 +154,8 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:integrations-feature-flag-integration", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False) # Allow tenant type installations through issue alert actions manager.add("organizations:integrations-msteams-tenant", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) + # Enable inviting billing members to organizations at the member limit. + manager.add("organizations:invite-billing", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, default=False, api_expose=False) # Enable inviting members to organizations. manager.add("organizations:invite-members", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, default=True, api_expose=True) # Enable new invite members modal. @@ -168,8 +172,10 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:issue-platform-deletion-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enables a toggle for entering the new issue details UI manager.add("organizations:issue-details-new-experience-toggle", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) - # Enables access to the streamlined issue details UI + # Enables opt-in access to the streamlined issue details UI for all users of an organization manager.add("organizations:issue-details-streamline", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) + # Enables streamlined issue details UI for all users of an organization without opt-out + manager.add("organizations:issue-details-streamline-enforce", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Whether to allow issue only search on the issue list manager.add("organizations:issue-search-allow-postgres-only-search", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Whether to make a side/parallel query against events -> group_attributes when searching issues @@ -197,8 +203,6 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:messaging-integration-onboarding-project-creation", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable threshold period in metric alert rule builder manager.add("organizations:metric-alert-threshold-period", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) - # Enables the search bar for metrics samples list - manager.add("organizations:metrics-samples-list-search", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Migrate Orgs to new Azure DevOps Integration manager.add("organizations:migrate-azure-devops-integration", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable Session Stats down to a minute resolution @@ -238,8 +242,6 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:ownership-size-limit-large", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Enable xlarge ownership rule file size limit manager.add("organizations:ownership-size-limit-xlarge", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) - # Enable views for anomaly detection - manager.add("organizations:performance-anomaly-detection-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable mobile performance score calculation for transactions in relay manager.add("organizations:performance-calculate-mobile-perf-score-relay", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False) # Enable performance change explorer panel on trends page @@ -255,6 +257,8 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:performance-db-main-thread-detector", OrganizationFeature, api_expose=False) # Enable Discover Saved Query dataset selector manager.add("organizations:performance-discover-dataset-selector", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) + # Enable deprecate discover widget type + manager.add("organizations:deprecate-discover-widget-type", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable backend overriding and always making a fresh split decision manager.add("organizations:performance-discover-widget-split-override-save", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Enable UI sending a discover split for widget @@ -344,6 +348,8 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:project-templates", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False) # Enable the new quick start guide manager.add("organizations:quick-start-updates", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) + # Enable new small design changes for the quick start guide GA + manager.add("organizations:quick-start-updates-ga", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable the new Related Events feature manager.add("organizations:related-events", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False) # Enable related issues feature @@ -352,8 +358,6 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:relay-cardinality-limiter", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False) # Enable the release details performance section manager.add("organizations:release-comparison-performance", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) - # Fixes the next release resolution for semver releases - manager.add("organizations:releases-resolve-next-release-semver-fix", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # enable new release set_commits functionality manager.add("organizations:set-commits-updated", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Enable new release UI @@ -366,6 +370,8 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:reprocessing-v2", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False) # Enable Sentry's 2024 Rollback feature manager.add("organizations:sentry-rollback-2024", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) + # Enable Sentry's 2024 Rollback toggle within organization settings + manager.add("organizations:sentry-rollback-settings", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable resolve in upcoming release # TODO(steve): Remove when we remove the feature from the UI manager.add("organizations:resolve-in-upcoming-release", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=True) @@ -446,8 +452,6 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:insights-use-eap", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable access to insights metrics alerts manager.add("organizations:insights-alerts", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) - # Enable domain view in Insights modules - manager.add("organizations:insights-domain-view", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable Related Issues table in Insights modules manager.add("organizations:insights-related-issues-table", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable access to Mobile Screens insights module @@ -525,6 +529,8 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:user-feedback-trace-section", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable view hierarchies options manager.add("organizations:view-hierarchies-options-dev", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) + # Enable admin features on the new explore page + manager.add("organizations:visibility-explore-admin", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable the new explore page manager.add("organizations:visibility-explore-view", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable the dataset toggle on the new explore page @@ -533,12 +539,22 @@ def register_temporary_features(manager: FeatureManager): manager.add("organizations:widget-viewer-modal-minimap", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enabled unresolved issue webhook for organization manager.add("organizations:webhooks-unresolved", OrganizationFeature, FeatureHandlerStrategy.OPTIONS, api_expose=True) + # Enable dual writing for metric alert issues (see: alerts create issues) + manager.add("organizations:workflow-engine-m3-dual-write", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) + # Enable reading from new ACI tables for metric alert issues (see: alerts create issues) + manager.add("organizations:workflow-engine-m3-read", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) + # Enable new workflow_engine UI (see: alerts create issues) + manager.add("organizations:workflow-engine-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Enable EventUniqueUserFrequencyConditionWithConditions special alert condition manager.add("organizations:event-unique-user-frequency-condition-with-conditions", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # Use spans instead of transactions for dynamic sampling calculations. This will become the new default. manager.add("organizations:dynamic-sampling-spans", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) # Enable tagging span with whether or not we should ingest it in the EAP manager.add("organizations:ingest-spans-in-eap", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) + # Enables EAP alerts UI use RPC + manager.add("organizations:eap-alerts-ui-uses-rpc", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) + # Test flag for flagpole region checking + manager.add("organizations:validate-region-test-flag", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True) # NOTE: Don't add features down here! Add them to their specific group and sort # them alphabetically! The order features are registered is not important. @@ -560,6 +576,8 @@ def register_temporary_features(manager: FeatureManager): # Enable alternative version of group creation that is supposed to be less racy. manager.add("projects:race-free-group-creation", ProjectFeature, FeatureHandlerStrategy.INTERNAL, default=True, api_expose=False) # Enable similarity embeddings API call + # This feature is only available on the frontend using project details since the handler gets + # project options and this is slow in the project index endpoint feature flag serialization manager.add("projects:similarity-embeddings", ProjectFeature, FeatureHandlerStrategy.INTERNAL, default=False, api_expose=True) manager.add("projects:similarity-embeddings-backfill", ProjectFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False) manager.add("projects:similarity-embeddings-delete-by-hash", ProjectFeature, FeatureHandlerStrategy.OPTIONS, api_expose=False) @@ -572,9 +590,6 @@ def register_temporary_features(manager: FeatureManager): # EAP: extremely experimental flag that makes DDM page use EAP tables manager.add("projects:use-eap-spans-for-metrics-explorer", ProjectFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False) - # Ecosystem: Enable verbose alert reporting when triggering test alerts - manager.add("projects:verbose-test-alert-reporting", ProjectFeature, FeatureHandlerStrategy.FLAGPOLE, - api_expose=False) # Project plugin features manager.add("projects:plugins", ProjectPluginFeature, FeatureHandlerStrategy.INTERNAL, default=True, api_expose=True) @@ -594,6 +609,12 @@ def register_temporary_features(manager: FeatureManager): FeatureHandlerStrategy.FLAGPOLE, api_expose=True, ) + manager.add( + "organizations:feature-flag-cta", + OrganizationFeature, + FeatureHandlerStrategy.FLAGPOLE, + api_expose=True, + ) # Partner oauth manager.add( diff --git a/src/sentry/flags/docs/api.md b/src/sentry/flags/docs/api.md index 2fee96927f558f..5e63588b40142c 100644 --- a/src/sentry/flags/docs/api.md +++ b/src/sentry/flags/docs/api.md @@ -8,7 +8,7 @@ Host: https://sentry.io/api/0 **How to read this document.** -This document is structured by resource with each resource having actions that can be performed against it. Every action that either accepts a request or returns a response WILL document the full interchange format. Clients may opt to restrict response data or provide a subset of the request data. +This document is structured by resource with each resource having actions that can be performed against it. Every action that either accepts a request or returns a response **must** document the full interchange format. ## Flag Logs [/organizations//flags/logs/] @@ -17,7 +17,7 @@ This document is structured by resource with each resource having actions that c - start (optional, string) - ISO 8601 format (`YYYY-MM-DDTHH:mm:ss.sssZ`) - end (optional, string) - ISO 8601 format. Required if `start` is set. - statsPeriod (optional, string) - A positive integer suffixed with a unit type. - - cursor (optional, string)` + - cursor (optional, string) - per_page (optional, number) Default: 10 - offset (optional, number) @@ -83,32 +83,78 @@ Retrieve a single flag log instance. } ``` -## Webhooks [/organizations//flags/hooks/provider//token//] +## Signing Secrets [/organizations//flags/signing-secrets/] -### Create Flag Log [POST] +- Parameters + - cursor (optional, string) + - per_page (optional, number) + Default: 10 + - offset (optional, number) + Default: 0 -The shape of the request object varies by provider. The `` URI parameter informs the server of the shape of the request and it is on the server to handle the provider. The following providers are supported: Unleash, Split, Statsig, and LaunchDarkly. +### Browse Signing Secrets [GET] -**Flag Pole Example:** +Browse a list of signing secrets. Secrets are unique per provider. Secrets only show the first six characters; the remainder are redacted. -Flag pole is Sentry owned. It matches our audit-log resource because it is designed for that purpose. +**Attributes** -- Request (application/json) +| Column | Type | Description | +| --------- | ------ | ---------------------------------------------------------------------- | +| createdAt | string | ISO-8601 timestamp of when the secret was added. | +| createdBy | string | The user responsible for adding the secret. | +| id | number | A unique identifier for the secret entry. | +| provider | string | The provider this secret applies to. | +| secret | string | A secret value which allows us to verify the signature of the request. | + +- Response 200 ```json { "data": [ { - "action": "updated", - "createdAt": "2024-11-19T19:12:55", - "createdBy": "colton.allen@sentry.io", - "flag": "flag-name", - "tags": { - "commit_sha": "1f33a107d7cd060ab9c98e11c9e5a62dc1347861" - } + "createdAt": "2024-12-12T00:00:00+00:00", + "createdBy": 12345, + "id": 123, + "provider": "launchdarkly", + "secret": "abc123**********" } ] } ``` +### Create Signing Secret [POST] + +Requests from web hook providers can be signed. We use the signing secret to verify the webhook's origin is authentic. + +- Request (application/json) + + ```json + { + "provider": "launchdarkly", + "secret": "d41d7d1adced450d9e2eb7f76dde6a04" + } + ``` + +- Response 201 + +## Signing Secret [/organizations//flags/signing-secrets//] + +### Delete Signing Secret [DELETE] + +Delete a signing secret. + +- Response 204 + +## Webhooks [/organizations//flags/hooks/provider//] + +### Create Flag Log [POST] + +The shape of the request object varies by provider. The `` URI parameter informs the server of the shape of the request and it is on the server to handle the provider. The following providers are supported: LaunchDarkly. + +Webhooks are signed by their provider. The provider handler must use the secret stored in Sentry to verify the signature of the payload. Failure to do so could lead to unauthorized access. + +Any request content-type is acceptable (JSON, XML, binary-formats) so long as the server is capable of decoding the request and mapping it to our object model. + +- Request + - Response 201 diff --git a/src/sentry/flags/endpoints/__init__.py b/src/sentry/flags/endpoints/__init__.py index e69de29bb2d1d6..be4cef59b09284 100644 --- a/src/sentry/flags/endpoints/__init__.py +++ b/src/sentry/flags/endpoints/__init__.py @@ -0,0 +1,12 @@ +from sentry.api.bases.organization import OrganizationEndpoint +from sentry.api.exceptions import ResourceDoesNotExist + +VALID_PROVIDERS = {"launchdarkly"} + + +class OrganizationFlagsEndpoint(OrganizationEndpoint): + + def convert_args(self, *args, **kwargs): + if kwargs.get("provider", "") not in VALID_PROVIDERS: + raise ResourceDoesNotExist + return super().convert_args(*args, **kwargs) diff --git a/src/sentry/flags/endpoints/hooks.py b/src/sentry/flags/endpoints/hooks.py index 6db6bc7d7e9bf4..e223f602df2363 100644 --- a/src/sentry/flags/endpoints/hooks.py +++ b/src/sentry/flags/endpoints/hooks.py @@ -1,70 +1,29 @@ -import logging -from urllib.parse import unquote - import sentry_sdk -from rest_framework.exceptions import AuthenticationFailed from rest_framework.request import Request from rest_framework.response import Response from sentry import features from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import Endpoint, region_silo_endpoint +from sentry.api.base import region_silo_endpoint from sentry.api.exceptions import ResourceDoesNotExist +from sentry.flags.endpoints import OrganizationFlagsEndpoint from sentry.flags.providers import ( DeserializationError, InvalidProvider, handle_provider_event, + validate_provider_event, write, ) -from sentry.hybridcloud.models.orgauthtokenreplica import OrgAuthTokenReplica from sentry.models.organization import Organization -from sentry.models.orgauthtoken import OrgAuthToken -from sentry.silo.base import SiloMode -from sentry.utils.security.orgauthtoken_token import hash_token - -"""HTTP endpoint. - -This endpoint accepts only organization authorization tokens. I've made the conscious -decision to exclude all other forms of authentication. We don't want users accidentally -writing logs or leaked DSNs generating invalid log entries. An organization token is -secret and reasonably restricted and so makes sense for this use case where we have -inter-provider communication. -""" - -logger = logging.getLogger() @region_silo_endpoint -class OrganizationFlagsHooksEndpoint(Endpoint): +class OrganizationFlagsHooksEndpoint(OrganizationFlagsEndpoint): authentication_classes = () owner = ApiOwner.REPLAY permission_classes = () - publish_status = { - "POST": ApiPublishStatus.PRIVATE, - } - - def convert_args( - self, - request: Request, - organization_id_or_slug: str, - token: str, - *args, - **kwargs, - ): - try: - if str(organization_id_or_slug).isdigit(): - organization = Organization.objects.get_from_cache(id=organization_id_or_slug) - else: - organization = Organization.objects.get_from_cache(slug=organization_id_or_slug) - except Organization.DoesNotExist: - raise ResourceDoesNotExist - - if not is_valid_token(organization.id, token): - raise AuthenticationFailed("Invalid token specified.") - - kwargs["organization"] = organization - return args, kwargs + publish_status = {"POST": ApiPublishStatus.PRIVATE} def post(self, request: Request, organization: Organization, provider: str) -> Response: if not features.has( @@ -73,6 +32,14 @@ def post(self, request: Request, organization: Organization, provider: str) -> R return Response("Not enabled.", status=404) try: + if not validate_provider_event( + provider, + request.body, + request.headers, + organization.id, + ): + return Response("Not authorized.", status=401) + write(handle_provider_event(provider, request.data, organization.id)) return Response(status=200) except InvalidProvider: @@ -80,28 +47,3 @@ def post(self, request: Request, organization: Organization, provider: str) -> R except DeserializationError as exc: sentry_sdk.capture_exception() return Response(exc.errors, status=200) - - -def is_valid_token(organization_id: int, token: str) -> bool: - token_hashed = hash_token(unquote(token)) - - if SiloMode.get_current_mode() == SiloMode.REGION: - try: - OrgAuthTokenReplica.objects.get( - token_hashed=token_hashed, - date_deactivated__isnull=True, - organization_id=organization_id, - ) - return True - except OrgAuthTokenReplica.DoesNotExist: - return False - else: - try: - OrgAuthToken.objects.get( - token_hashed=token_hashed, - date_deactivated__isnull=True, - organization_id=organization_id, - ) - return True - except OrgAuthToken.DoesNotExist: - return False diff --git a/src/sentry/flags/endpoints/logs.py b/src/sentry/flags/endpoints/logs.py index 8378569e14951c..196227873dbdd0 100644 --- a/src/sentry/flags/endpoints/logs.py +++ b/src/sentry/flags/endpoints/logs.py @@ -5,7 +5,6 @@ from rest_framework.request import Request from rest_framework.response import Response -# from sentry import features from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint @@ -48,9 +47,6 @@ class OrganizationFlagLogIndexEndpoint(OrganizationEndpoint): publish_status = {"GET": ApiPublishStatus.PRIVATE} def get(self, request: Request, organization: Organization) -> Response: - # if not features.has("organizations:feature-flag-ui", organization, actor=request.user): - # raise ResourceDoesNotExist - start, end = get_date_range_from_params(request.GET) if start is None or end is None: raise ParseError(detail="Invalid date range") @@ -81,9 +77,6 @@ class OrganizationFlagLogDetailsEndpoint(OrganizationEndpoint): publish_status = {"GET": ApiPublishStatus.PRIVATE} def get(self, request: Request, organization: Organization, flag_log_id: int) -> Response: - # if not features.has("organizations:feature-flag-ui", organization, actor=request.user): - # raise ResourceDoesNotExist - try: model = FlagAuditLogModel.objects.filter( id=flag_log_id, diff --git a/src/sentry/flags/endpoints/secrets.py b/src/sentry/flags/endpoints/secrets.py new file mode 100644 index 00000000000000..17a03d98b3a045 --- /dev/null +++ b/src/sentry/flags/endpoints/secrets.py @@ -0,0 +1,117 @@ +from __future__ import annotations + +from datetime import datetime, timezone +from typing import TypedDict + +from rest_framework import serializers +from rest_framework.request import Request +from rest_framework.response import Response + +from sentry import features +from sentry.api.api_owners import ApiOwner +from sentry.api.api_publish_status import ApiPublishStatus +from sentry.api.base import region_silo_endpoint +from sentry.api.bases.organization import OrganizationEndpoint, OrgAuthTokenPermission +from sentry.api.paginator import OffsetPaginator +from sentry.api.serializers import Serializer, register, serialize +from sentry.flags.models import FlagWebHookSigningSecretModel +from sentry.models.organization import Organization + + +class FlagWebhookSigningSecretResponse(TypedDict): + createdAt: str + createdBy: int + id: int + provider: str + secret: str + + +@register(FlagWebHookSigningSecretModel) +class FlagWebhookSigningSecretSerializer(Serializer): + def serialize(self, obj, attrs, user, **kwargs) -> FlagWebhookSigningSecretResponse: + return { + "createdAt": obj.date_added.isoformat(), + "createdBy": obj.created_by, + "id": obj.id, + "provider": obj.provider, + "secret": obj.secret[0:6] + "*" * (len(obj.secret) - 6), + } + + +class FlagWebhookSigningSecretValidator(serializers.Serializer): + provider = serializers.ChoiceField(choices=[("launchdarkly", "launchdarkly")], required=True) + secret = serializers.CharField(required=True, max_length=32, min_length=32) + + +@region_silo_endpoint +class OrganizationFlagsWebHookSigningSecretsEndpoint(OrganizationEndpoint): + owner = ApiOwner.REPLAY + permission_classes = (OrgAuthTokenPermission,) + publish_status = { + "GET": ApiPublishStatus.PRIVATE, + "POST": ApiPublishStatus.PRIVATE, + } + + def get(self, request: Request, organization: Organization) -> Response: + if not features.has( + "organizations:feature-flag-audit-log", organization, actor=request.user + ): + return Response("Not enabled.", status=404) + + return self.paginate( + request=request, + queryset=FlagWebHookSigningSecretModel.objects.filter(organization_id=organization.id), + order_by="-date_added", + on_results=lambda x: { + "data": serialize(x, request.user, FlagWebhookSigningSecretSerializer()) + }, + paginator_cls=OffsetPaginator, + ) + + def post(self, request: Request, organization: Organization) -> Response: + if not features.has( + "organizations:feature-flag-audit-log", organization, actor=request.user + ): + return Response("Not enabled.", status=404) + + validator = FlagWebhookSigningSecretValidator(data=request.data) + if not validator.is_valid(): + return self.respond(validator.errors, status=400) + + FlagWebHookSigningSecretModel.objects.create_or_update( + organization=organization, + provider=validator.validated_data["provider"], + values={ + "created_by": request.user.id, + "date_added": datetime.now(tz=timezone.utc), + "provider": validator.validated_data["provider"], + "secret": validator.validated_data["secret"], + }, + ) + + return Response(status=201) + + +@region_silo_endpoint +class OrganizationFlagsWebHookSigningSecretEndpoint(OrganizationEndpoint): + owner = ApiOwner.REPLAY + permission_classes = (OrgAuthTokenPermission,) + publish_status = {"DELETE": ApiPublishStatus.PRIVATE} + + def delete( + self, request: Request, organization: Organization, signing_secret_id: str + ) -> Response: + if not features.has( + "organizations:feature-flag-audit-log", organization, actor=request.user + ): + return Response("Not enabled.", status=404) + + try: + model = FlagWebHookSigningSecretModel.objects.filter( + organization_id=organization.id + ).get(id=int(signing_secret_id)) + model.delete() + except FlagWebHookSigningSecretModel.DoesNotExist: + return Response(status=404) + else: + return Response(status=204) diff --git a/src/sentry/flags/migrations/0002_add_flags_webhooksigningsecret.py b/src/sentry/flags/migrations/0002_add_flags_webhooksigningsecret.py new file mode 100644 index 00000000000000..22b42652c5b387 --- /dev/null +++ b/src/sentry/flags/migrations/0002_add_flags_webhooksigningsecret.py @@ -0,0 +1,64 @@ +# Generated by Django 5.1.1 on 2024-11-13 15:32 + +import django.db.models.deletion +import django.utils.timezone +from django.db import migrations, models + +import sentry.db.models.fields.bounded +import sentry.db.models.fields.foreignkey +import sentry.db.models.fields.hybrid_cloud_foreign_key +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("flags", "0001_add_flag_audit_log"), + ("sentry", "0787_make_dashboard_perms_col_nullable"), + ] + + operations = [ + migrations.CreateModel( + name="FlagWebHookSigningSecretModel", + fields=[ + ( + "id", + sentry.db.models.fields.bounded.BoundedBigAutoField( + primary_key=True, serialize=False + ), + ), + ( + "created_by", + sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.User", db_index=True, null=True, on_delete="SET_NULL" + ), + ), + ("date_added", models.DateTimeField(default=django.utils.timezone.now)), + ("provider", models.CharField(db_index=True)), + ("secret", models.CharField()), + ( + "organization", + sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="sentry.organization" + ), + ), + ], + options={ + "db_table": "flags_webhooksigningsecret", + "unique_together": {("organization", "provider", "secret")}, + }, + ), + ] diff --git a/src/sentry/flags/models.py b/src/sentry/flags/models.py index 80852d7cc89e5d..5e12788c61696f 100644 --- a/src/sentry/flags/models.py +++ b/src/sentry/flags/models.py @@ -1,10 +1,11 @@ from enum import Enum +from django.conf import settings from django.db import models from django.utils import timezone from sentry.backup.scopes import RelocationScope -from sentry.db.models import Model, region_silo_model, sane_repr +from sentry.db.models import FlexibleForeignKey, Model, region_silo_model, sane_repr from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey @@ -83,3 +84,19 @@ class Meta: indexes = (models.Index(fields=("flag",)),) __repr__ = sane_repr("organization_id", "flag") + + +@region_silo_model +class FlagWebHookSigningSecretModel(Model): + __relocation_scope__ = RelocationScope.Excluded + + created_by = HybridCloudForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete="SET_NULL") + date_added = models.DateTimeField(default=timezone.now) + organization = FlexibleForeignKey("sentry.Organization") + provider = models.CharField(db_index=True) + secret = models.CharField() + + class Meta: + app_label = "flags" + db_table = "flags_webhooksigningsecret" + unique_together = (("organization", "provider", "secret"),) diff --git a/src/sentry/flags/providers.py b/src/sentry/flags/providers.py index eac1c8a5a5e82b..8232a6cf00be58 100644 --- a/src/sentry/flags/providers.py +++ b/src/sentry/flags/providers.py @@ -1,9 +1,17 @@ import datetime +import hashlib +import hmac from typing import Any, TypedDict +from django.http.request import HttpHeaders from rest_framework import serializers -from sentry.flags.models import ACTION_MAP, CREATED_BY_TYPE_MAP, FlagAuditLogModel +from sentry.flags.models import ( + ACTION_MAP, + CREATED_BY_TYPE_MAP, + FlagAuditLogModel, + FlagWebHookSigningSecretModel, +) from sentry.silo.base import SiloLimit @@ -62,6 +70,19 @@ def handle_provider_event( raise InvalidProvider(provider) +def validate_provider_event( + provider: str, + request_data: bytes, + request_headers: HttpHeaders, + organization_id: int, +) -> bool: + match provider: + case "launchdarkly": + return validate_launchdarkly_event(request_data, request_headers, organization_id) + case _: + raise InvalidProvider(provider) + + """LaunchDarkly provider.""" @@ -70,7 +91,7 @@ class LaunchDarklyItemSerializer(serializers.Serializer): date = serializers.IntegerField(required=True) member = serializers.DictField(required=True) name = serializers.CharField(max_length=100, required=True) - description = serializers.CharField(required=True) + description = serializers.CharField(allow_blank=True, required=True) SUPPORTED_LAUNCHDARKLY_ACTIONS = { @@ -112,6 +133,10 @@ def handle_launchdarkly_event( result = serializer.validated_data + access = result["accesses"][0] + if access["action"] not in SUPPORTED_LAUNCHDARKLY_ACTIONS: + return [] + return [ { "action": handle_launchdarkly_actions(access["action"]), @@ -122,11 +147,33 @@ def handle_launchdarkly_event( "organization_id": organization_id, "tags": {"description": result["description"]}, } - for access in result["accesses"] - if access["action"] in SUPPORTED_LAUNCHDARKLY_ACTIONS ] +def validate_launchdarkly_event( + request_data: bytes, + request_headers: HttpHeaders, + organization_id: int, +) -> bool: + """Return "true" if the launchdarkly payload is valid.""" + signature = request_headers.get("X-LD-Signature") + if signature is None: + return False + + models = FlagWebHookSigningSecretModel.objects.filter( + organization_id=organization_id, + provider="launchdarkly", + ).all() + for model in models: + if hmac_sha256_hex_digest(model.secret, request_data) == signature: + return True + return False + + +def hmac_sha256_hex_digest(key: str, message: bytes): + return hmac.new(key.encode(), message, hashlib.sha256).hexdigest() + + """Internal flag-pole provider. Allows us to skip the HTTP endpoint. diff --git a/src/sentry/grouping/__init__.py b/src/sentry/grouping/__init__.py index c9a6eac32bb86b..d7ba3b7c147978 100644 --- a/src/sentry/grouping/__init__.py +++ b/src/sentry/grouping/__init__.py @@ -87,7 +87,7 @@ Here an example of how components can be used:: function_name = 'lambda$1234' - threads = GroupingComponent( + threads = BaseGroupingComponent( id="function", values=[function_name], contributes=False, diff --git a/src/sentry/grouping/api.py b/src/sentry/grouping/api.py index 5c85088cc06fde..1e2ac52f4efe0d 100644 --- a/src/sentry/grouping/api.py +++ b/src/sentry/grouping/api.py @@ -1,14 +1,19 @@ from __future__ import annotations import re -from collections.abc import Sequence +from collections.abc import MutableMapping, Sequence from dataclasses import dataclass -from typing import TYPE_CHECKING, TypedDict +from typing import TYPE_CHECKING, Any, NotRequired, TypedDict import sentry_sdk from sentry import options -from sentry.grouping.component import GroupingComponent +from sentry.grouping.component import ( + AppGroupingComponent, + BaseGroupingComponent, + DefaultGroupingComponent, + SystemGroupingComponent, +) from sentry.grouping.enhancer import LATEST_VERSION, Enhancements from sentry.grouping.enhancer.exceptions import InvalidEnhancerConfig from sentry.grouping.strategies.base import DEFAULT_GROUPING_ENHANCEMENTS_BASE, GroupingContext @@ -33,13 +38,18 @@ if TYPE_CHECKING: from sentry.eventstore.models import Event - from sentry.grouping.fingerprinting import FingerprintingRules + from sentry.grouping.fingerprinting import FingerprintingRules, FingerprintRuleJSON from sentry.grouping.strategies.base import StrategyConfiguration from sentry.models.project import Project HASH_RE = re.compile(r"^[0-9a-f]{32}$") +class FingerprintInfo(TypedDict): + client_fingerprint: NotRequired[list[str]] + matched_rule: NotRequired[FingerprintRuleJSON] + + @dataclass class GroupHashInfo: config: GroupingConfig @@ -234,7 +244,9 @@ def get_fingerprinting_config_for_project( return rv -def apply_server_fingerprinting(event, config, allow_custom_title=True): +def apply_server_fingerprinting( + event: MutableMapping[str, Any], config: FingerprintingRules, allow_custom_title: bool = True +) -> None: fingerprint_info = {} client_fingerprint = event.get("fingerprint", []) @@ -264,10 +276,10 @@ def apply_server_fingerprinting(event, config, allow_custom_title=True): def _get_calculated_grouping_variants_for_event( event: Event, context: GroupingContext -) -> dict[str, GroupingComponent]: +) -> dict[str, AppGroupingComponent | SystemGroupingComponent | DefaultGroupingComponent]: winning_strategy: str | None = None precedence_hint: str | None = None - per_variant_components: dict[str, list[GroupingComponent]] = {} + per_variant_components: dict[str, list[BaseGroupingComponent]] = {} for strategy in context.config.iter_strategies(): # Defined in src/sentry/grouping/strategies/base.py @@ -292,7 +304,12 @@ def _get_calculated_grouping_variants_for_event( rv = {} for variant, components in per_variant_components.items(): - component = GroupingComponent(id=variant, values=components) + component_class_by_variant = { + "app": AppGroupingComponent, + "default": DefaultGroupingComponent, + "system": SystemGroupingComponent, + } + component = component_class_by_variant[variant](values=components) if not component.contributes and precedence_hint: component.update(hint=precedence_hint) rv[variant] = component @@ -336,7 +353,7 @@ def get_grouping_variants_for_event( # a materialized fingerprint info from server side fingerprinting we forward it to the # variants which can export additional information about them. fingerprint = event.data.get("fingerprint") or ["{{ default }}"] - fingerprint_info = event.data.get("_fingerprint_info") + fingerprint_info = event.data.get("_fingerprint_info", {}) defaults_referenced = sum(1 if is_default_fingerprint_var(d) else 0 for d in fingerprint) if config is None: @@ -359,7 +376,7 @@ def get_grouping_variants_for_event( rv[key] = ComponentVariant(component, context.config) fingerprint = resolve_fingerprint_values(fingerprint, event.data) - if (fingerprint_info or {}).get("matched_rule", {}).get("is_builtin") is True: + if fingerprint_info.get("matched_rule", {}).get("is_builtin") is True: rv["built_in_fingerprint"] = BuiltInFingerprintVariant(fingerprint, fingerprint_info) else: rv["custom_fingerprint"] = CustomFingerprintVariant(fingerprint, fingerprint_info) diff --git a/src/sentry/grouping/component.py b/src/sentry/grouping/component.py index be4e1bffdef7b1..cd1a83b820af1b 100644 --- a/src/sentry/grouping/component.py +++ b/src/sentry/grouping/component.py @@ -1,12 +1,12 @@ from __future__ import annotations +from abc import ABC, abstractmethod +from collections import Counter from collections.abc import Generator, Iterator, Sequence -from typing import Any +from typing import Any, Self from sentry.grouping.utils import hash_from_values -DEFAULT_HINTS = {"salt": "a static salt"} - # When a component ID appears here it has a human readable name which also # makes it a major component. A major component is described as such for # the UI. @@ -23,40 +23,41 @@ } -def _calculate_contributes(values: Sequence[str | GroupingComponent]) -> bool: +def _calculate_contributes[ValuesType](values: Sequence[ValuesType]) -> bool: for value in values or (): - if not isinstance(value, GroupingComponent) or value.contributes: + if not isinstance(value, BaseGroupingComponent) or value.contributes: return True return False -class GroupingComponent: +class BaseGroupingComponent[ValuesType: str | int | BaseGroupingComponent[Any]](ABC): """A grouping component is a recursive structure that is flattened into components to make a hash for grouping purposes. """ + hint: str | None = None + contributes: bool = False + values: Sequence[ValuesType] + def __init__( self, - id: str, hint: str | None = None, contributes: bool | None = None, - values: Sequence[str | GroupingComponent] | None = None, + values: Sequence[ValuesType] | None = None, variant_provider: bool = False, ): - self.id = id - - # Default values - self.hint = DEFAULT_HINTS.get(id) - self.contributes = contributes self.variant_provider = variant_provider - self.values: Sequence[str | GroupingComponent] = [] self.update( hint=hint, contributes=contributes, - values=values, + values=values or [], ) + @property + @abstractmethod + def id(self) -> str: ... + @property def name(self) -> str | None: return KNOWN_MAJOR_COMPONENT_NAMES.get(self.id) @@ -72,13 +73,15 @@ def description(self) -> str: # Keep track of the paths we walk so later we can pick the longest one paths = [] - def _walk_components(component: GroupingComponent, current_path: list[str | None]) -> None: + def _walk_components( + component: BaseGroupingComponent[Any], current_path: list[str | None] + ) -> None: # Keep track of the names of the nodes from the root of the component tree to here current_path.append(component.name) # Walk the tree, looking for contributing components. for value in component.values: - if isinstance(value, GroupingComponent) and value.contributes: + if isinstance(value, BaseGroupingComponent) and value.contributes: _walk_components(value, current_path) # Filter out the `None`s (which come from components not in `KNOWN_MAJOR_COMPONENT_NAMES`) @@ -99,16 +102,16 @@ def _walk_components(component: GroupingComponent, current_path: list[str | None def get_subcomponent( self, id: str, only_contributing: bool = False - ) -> str | GroupingComponent | None: + ) -> str | int | BaseGroupingComponent[Any] | None: """Looks up a subcomponent by the id and returns the first or `None`.""" return next(self.iter_subcomponents(id=id, only_contributing=only_contributing), None) def iter_subcomponents( self, id: str, recursive: bool = False, only_contributing: bool = False - ) -> Iterator[str | GroupingComponent | None]: + ) -> Iterator[str | int | BaseGroupingComponent[Any] | None]: """Finds all subcomponents matching an id, optionally recursively.""" for value in self.values: - if isinstance(value, GroupingComponent): + if isinstance(value, BaseGroupingComponent): if only_contributing and not value.contributes: continue if value.id == id: @@ -123,7 +126,7 @@ def update( self, hint: str | None = None, contributes: bool | None = None, - values: Sequence[str | GroupingComponent] | None = None, + values: Sequence[ValuesType] | None = None, ) -> None: """Updates an already existing component with new values.""" if hint is not None: @@ -135,20 +138,20 @@ def update( if contributes is not None: self.contributes = contributes - def shallow_copy(self) -> GroupingComponent: + def shallow_copy(self) -> Self: """Creates a shallow copy.""" rv = object.__new__(self.__class__) rv.__dict__.update(self.__dict__) rv.values = list(self.values) return rv - def iter_values(self) -> Generator[str | GroupingComponent]: + def iter_values(self) -> Generator[str | int]: """Recursively walks the component and flattens it into a list of values. """ if self.contributes: for value in self.values: - if isinstance(value, GroupingComponent): + if isinstance(value, BaseGroupingComponent): yield from value.iter_values() else: yield value @@ -170,7 +173,7 @@ def as_dict(self) -> dict[str, Any]: "values": [], } for value in self.values: - if isinstance(value, GroupingComponent): + if isinstance(value, BaseGroupingComponent): rv["values"].append(value.as_dict()) else: # This basically assumes that a value is only a primitive @@ -180,4 +183,224 @@ def as_dict(self) -> dict[str, Any]: return rv def __repr__(self) -> str: - return f"GroupingComponent({self.id!r}, hint={self.hint!r}, contributes={self.contributes!r}, values={self.values!r})" + return f"{self.__class__.__name__}({self.id!r}, hint={self.hint!r}, contributes={self.contributes!r}, values={self.values!r})" + + +# NOTE: In all of the classes below, the type(s) passed to `BaseGroupingComponent` represent +# the type(s) which can appear in the `values` attribute + + +# Error-related inner components + + +class ContextLineGroupingComponent(BaseGroupingComponent[str]): + id: str = "context-line" + + +class ErrorTypeGroupingComponent(BaseGroupingComponent[str]): + id: str = "type" + + +class ErrorValueGroupingComponent(BaseGroupingComponent[str]): + id: str = "value" + + +class FilenameGroupingComponent(BaseGroupingComponent[str]): + id: str = "filename" + + +class FunctionGroupingComponent(BaseGroupingComponent[str]): + id: str = "function" + + +class LineNumberGroupingComponent(BaseGroupingComponent[int]): + id: str = "lineno" + + +class ModuleGroupingComponent(BaseGroupingComponent[str]): + id: str = "module" + + +class NSErrorGroupingComponent(BaseGroupingComponent[str | int]): + id: str = "ns-error" + + +class SymbolGroupingComponent(BaseGroupingComponent[str]): + id: str = "symbol" + + +FrameGroupingComponentChildren = ( + ContextLineGroupingComponent + | FilenameGroupingComponent + | FunctionGroupingComponent + | LineNumberGroupingComponent # only in legacy config + | ModuleGroupingComponent + | SymbolGroupingComponent # only in legacy config +) + + +class FrameGroupingComponent(BaseGroupingComponent[FrameGroupingComponentChildren]): + id: str = "frame" + in_app: bool + + def __init__( + self, + values: Sequence[FrameGroupingComponentChildren], + in_app: bool, + hint: str | None = None, # only passed in legacy + contributes: bool | None = None, # only passed in legacy + ): + super().__init__(hint=hint, contributes=contributes, values=values) + self.in_app = in_app + + +# Security-related inner components + + +class HostnameGroupingComponent(BaseGroupingComponent[str]): + id: str = "hostname" + + +class SaltGroupingComponent(BaseGroupingComponent[str]): + id: str = "salt" + hint: str = "a static salt" + + +class ViolationGroupingComponent(BaseGroupingComponent[str]): + id: str = "violation" + + +class URIGroupingComponent(BaseGroupingComponent[str]): + id: str = "uri" + + +# Top-level components + + +class MessageGroupingComponent(BaseGroupingComponent[str]): + id: str = "message" + + +class StacktraceGroupingComponent(BaseGroupingComponent[FrameGroupingComponent]): + id: str = "stacktrace" + frame_counts: Counter[str] + + def __init__( + self, + values: Sequence[FrameGroupingComponent] | None = None, + hint: str | None = None, + contributes: bool | None = None, + frame_counts: Counter[str] | None = None, + ): + super().__init__(hint=hint, contributes=contributes, values=values) + self.frame_counts = frame_counts or Counter() + + +ExceptionGroupingComponentChildren = ( + ErrorTypeGroupingComponent + | ErrorValueGroupingComponent + | NSErrorGroupingComponent + | StacktraceGroupingComponent +) + + +class ExceptionGroupingComponent(BaseGroupingComponent[ExceptionGroupingComponentChildren]): + id: str = "exception" + frame_counts: Counter[str] + + def __init__( + self, + values: Sequence[ExceptionGroupingComponentChildren] | None = None, + hint: str | None = None, + contributes: bool | None = None, + frame_counts: Counter[str] | None = None, + ): + super().__init__(hint=hint, contributes=contributes, values=values) + self.frame_counts = frame_counts or Counter() + + +class ChainedExceptionGroupingComponent(BaseGroupingComponent[ExceptionGroupingComponent]): + id: str = "chained-exception" + frame_counts: Counter[str] + + def __init__( + self, + values: Sequence[ExceptionGroupingComponent] | None = None, + hint: str | None = None, + contributes: bool | None = None, + frame_counts: Counter[str] | None = None, + ): + super().__init__(hint=hint, contributes=contributes, values=values) + self.frame_counts = frame_counts or Counter() + + +class ThreadsGroupingComponent(BaseGroupingComponent[StacktraceGroupingComponent]): + id: str = "threads" + + +class CSPGroupingComponent( + BaseGroupingComponent[SaltGroupingComponent | ViolationGroupingComponent | URIGroupingComponent] +): + id: str = "csp" + + +class ExpectCTGroupingComponent( + BaseGroupingComponent[HostnameGroupingComponent | SaltGroupingComponent] +): + id: str = "expect-ct" + + +class ExpectStapleGroupingComponent( + BaseGroupingComponent[HostnameGroupingComponent | SaltGroupingComponent] +): + id: str = "expect-staple" + + +class HPKPGroupingComponent( + BaseGroupingComponent[HostnameGroupingComponent | SaltGroupingComponent] +): + id: str = "hpkp" + + +class TemplateGroupingComponent( + BaseGroupingComponent[ContextLineGroupingComponent | FilenameGroupingComponent] +): + id: str = "template" + + +# Wrapper components used to link component trees to variants + + +class DefaultGroupingComponent( + BaseGroupingComponent[ + CSPGroupingComponent + | ExpectCTGroupingComponent + | ExpectStapleGroupingComponent + | HPKPGroupingComponent + | MessageGroupingComponent + | TemplateGroupingComponent + ] +): + id: str = "default" + + +class AppGroupingComponent( + BaseGroupingComponent[ + ChainedExceptionGroupingComponent + | ExceptionGroupingComponent + | StacktraceGroupingComponent + | ThreadsGroupingComponent + ] +): + id: str = "app" + + +class SystemGroupingComponent( + BaseGroupingComponent[ + ChainedExceptionGroupingComponent + | ExceptionGroupingComponent + | StacktraceGroupingComponent + | ThreadsGroupingComponent + ] +): + id: str = "system" diff --git a/src/sentry/grouping/enhancer/__init__.py b/src/sentry/grouping/enhancer/__init__.py index 5b8ac2d2694d42..0499870a57e860 100644 --- a/src/sentry/grouping/enhancer/__init__.py +++ b/src/sentry/grouping/enhancer/__init__.py @@ -4,6 +4,7 @@ import logging import os import zlib +from collections import Counter from collections.abc import Sequence from typing import Any, Literal @@ -15,14 +16,14 @@ from sentry_ophio.enhancers import Enhancements as RustEnhancements from sentry import projectoptions -from sentry.grouping.component import GroupingComponent +from sentry.grouping.component import FrameGroupingComponent, StacktraceGroupingComponent from sentry.stacktraces.functions import set_in_app from sentry.utils.safe import get_path, set_path from .exceptions import InvalidEnhancerConfig from .matchers import create_match_frame from .parser import parse_enhancements -from .rules import Rule +from .rules import EnhancementRule logger = logging.getLogger(__name__) @@ -35,9 +36,13 @@ VALID_PROFILING_MATCHER_PREFIXES = ( "stack.abs_path", + "path", # stack.abs_path alias "stack.module", + "module", # stack.module alias "stack.function", + "function", # stack.function alias "stack.package", + "package", # stack.package ) VALID_PROFILING_ACTIONS_SET = frozenset(["+app", "-app"]) @@ -81,7 +86,7 @@ def parse_rust_enhancements( def make_rust_exception_data( - exception_data: dict[str, Any], + exception_data: dict[str, Any] | None, ) -> RustExceptionData: e = exception_data or {} e = { @@ -162,7 +167,13 @@ def apply_modifications_to_frame( if category is not None: set_path(frame, "data", "category", value=category) - def assemble_stacktrace_component(self, components, frames, platform, exception_data=None): + def assemble_stacktrace_component( + self, + components: list[FrameGroupingComponent], + frames: list[dict[str, Any]], + platform: str | None, + exception_data: dict[str, Any] | None = None, + ) -> tuple[StacktraceGroupingComponent, bool]: """ This assembles a `stacktrace` grouping component out of the given `frame` components and source frames. @@ -177,14 +188,21 @@ def assemble_stacktrace_component(self, components, frames, platform, exception_ match_frames, make_rust_exception_data(exception_data), rust_components ) + # Tally the number of each type of frame in the stacktrace. Later on, this will allow us to + # both collect metrics and use the information in decisions about whether to send the event + # to Seer + frame_counts: Counter[str] = Counter() + for py_component, rust_component in zip(components, rust_components): py_component.update(contributes=rust_component.contributes, hint=rust_component.hint) + key = f"{"in_app" if py_component.in_app else "system"}_{"contributing" if py_component.contributes else "non_contributing"}_frames" + frame_counts[key] += 1 - component = GroupingComponent( - id="stacktrace", + component = StacktraceGroupingComponent( values=components, hint=rust_results.hint, contributes=rust_results.contributes, + frame_counts=frame_counts, ) return component, rust_results.invert_stacktrace @@ -220,7 +238,7 @@ def _from_config_structure(cls, data, rust_enhancements: RustEnhancements) -> En if version not in VERSIONS: raise ValueError("Unknown version") return cls( - rules=[Rule._from_config_structure(x, version=version) for x in rules], + rules=[EnhancementRule._from_config_structure(x, version=version) for x in rules], rust_enhancements=rust_enhancements, version=version, bases=bases, diff --git a/src/sentry/grouping/enhancer/actions.py b/src/sentry/grouping/enhancer/actions.py index 9fbba4d59f52e4..10911971a0b23f 100644 --- a/src/sentry/grouping/enhancer/actions.py +++ b/src/sentry/grouping/enhancer/actions.py @@ -21,7 +21,7 @@ REVERSE_ACTION_FLAGS = {v: k for k, v in ACTION_FLAGS.items()} -class Action: +class EnhancementAction: _is_modifier: bool _is_updater: bool @@ -60,7 +60,7 @@ def _from_config_structure(cls, val, version: int): return FlagAction(ACTIONS[val & 0xF], flag, range) -class FlagAction(Action): +class FlagAction(EnhancementAction): def __init__(self, key: str, flag: bool, range: str | None) -> None: self.key = key self._is_updater = key in {"group", "app"} @@ -135,7 +135,7 @@ def update_frame_components_contributions( ) -class VarAction(Action): +class VarAction(EnhancementAction): range = None _VALUE_PARSERS: dict[str, Callable[[Any], Any]] = { diff --git a/src/sentry/grouping/enhancer/matchers.py b/src/sentry/grouping/enhancer/matchers.py index 50cd316216f5b3..4b60fadb9d5a79 100644 --- a/src/sentry/grouping/enhancer/matchers.py +++ b/src/sentry/grouping/enhancer/matchers.py @@ -104,7 +104,7 @@ def create_match_frame(frame_data: dict, platform: str | None) -> dict: return match_frame -class Match: +class EnhancementMatch: def matches_frame(self, frames, idx, exception_data, cache): raise NotImplementedError() @@ -115,9 +115,9 @@ def _to_config_structure(self, version): def _from_config_structure(obj, version): val = obj if val.startswith("|[") and val.endswith("]"): - return CalleeMatch(Match._from_config_structure(val[2:-1], version)) + return CalleeMatch(EnhancementMatch._from_config_structure(val[2:-1], version)) if val.startswith("[") and val.endswith("]|"): - return CallerMatch(Match._from_config_structure(val[1:-2], version)) + return CallerMatch(EnhancementMatch._from_config_structure(val[1:-2], version)) if val.startswith("!"): negated = True @@ -136,13 +136,13 @@ def _from_config_structure(obj, version): InstanceKey = tuple[str, str, bool] -class FrameMatch(Match): +class FrameMatch(EnhancementMatch): # Global registry of matchers - instances: dict[InstanceKey, Match] = {} + instances: dict[InstanceKey, EnhancementMatch] = {} field: Any = None @classmethod - def from_key(cls, key: str, pattern: str, negated: bool) -> Match: + def from_key(cls, key: str, pattern: str, negated: bool) -> EnhancementMatch: instance_key = (key, pattern, negated) if instance_key in cls.instances: instance = cls.instances[instance_key] @@ -153,7 +153,7 @@ def from_key(cls, key: str, pattern: str, negated: bool) -> Match: return instance @classmethod - def _from_key(cls, key: str, pattern: str, negated: bool) -> Match: + def _from_key(cls, key: str, pattern: str, negated: bool) -> EnhancementMatch: subclass = { "package": PackageMatch, "path": PathMatch, @@ -315,7 +315,7 @@ class ExceptionMechanismMatch(ExceptionFieldMatch): field_path = ["mechanism", "type"] -class CallerMatch(Match): +class CallerMatch(EnhancementMatch): def __init__(self, inner: FrameMatch): self.inner = inner @@ -330,7 +330,7 @@ def matches_frame(self, frames, idx, exception_data, cache): return idx > 0 and self.inner.matches_frame(frames, idx - 1, exception_data, cache) -class CalleeMatch(Match): +class CalleeMatch(EnhancementMatch): def __init__(self, inner: FrameMatch): self.inner = inner diff --git a/src/sentry/grouping/enhancer/parser.py b/src/sentry/grouping/enhancer/parser.py index 803d0d1379726f..433e8f9f918834 100644 --- a/src/sentry/grouping/enhancer/parser.py +++ b/src/sentry/grouping/enhancer/parser.py @@ -7,7 +7,7 @@ from .actions import FlagAction, VarAction from .exceptions import InvalidEnhancerConfig from .matchers import CalleeMatch, CallerMatch, FrameMatch -from .rules import Rule +from .rules import EnhancementRule # Grammar is defined in EBNF syntax. enhancements_grammar = Grammar( @@ -59,7 +59,7 @@ class EnhancementsVisitor(NodeVisitor): visit_comment = visit_empty = lambda *a: None unwrapped_exceptions = (InvalidEnhancerConfig,) - def visit_enhancements(self, node, children) -> list[Rule]: + def visit_enhancements(self, node, children) -> list[EnhancementRule]: rules = [] for child in children: if not isinstance(child, str) and child is not None: @@ -75,7 +75,7 @@ def visit_line(self, node, children): def visit_rule(self, node, children): _, matcher, actions = children - return Rule(matcher, actions) + return EnhancementRule(matcher, actions) def visit_matchers(self, node, children): caller_matcher, frame_matchers, callee_matcher = children @@ -141,7 +141,7 @@ def visit_quoted_ident(self, node, children): return node.match.groups()[0].lstrip("!") -def parse_enhancements(s: str) -> list[Rule]: +def parse_enhancements(s: str) -> list[EnhancementRule]: try: tree = enhancements_grammar.parse(s) return EnhancementsVisitor().visit(tree) diff --git a/src/sentry/grouping/enhancer/rules.py b/src/sentry/grouping/enhancer/rules.py index 477577bff9fd79..5bfae1d5b2b534 100644 --- a/src/sentry/grouping/enhancer/rules.py +++ b/src/sentry/grouping/enhancer/rules.py @@ -2,11 +2,11 @@ from typing import Any -from .actions import Action -from .matchers import ExceptionFieldMatch, Match +from .actions import EnhancementAction +from .matchers import EnhancementMatch, ExceptionFieldMatch -class Rule: +class EnhancementRule: def __init__(self, matchers, actions): self.matchers = matchers @@ -29,17 +29,17 @@ def matcher_description(self): rv = f"{rv} {action}" return rv - def _as_modifier_rule(self) -> Rule | None: + def _as_modifier_rule(self) -> EnhancementRule | None: actions = [action for action in self.actions if action.is_modifier] if actions: - return Rule(self.matchers, actions) + return EnhancementRule(self.matchers, actions) else: return None - def _as_updater_rule(self) -> Rule | None: + def _as_updater_rule(self) -> EnhancementRule | None: actions = [action for action in self.actions if action.is_updater] if actions: - return Rule(self.matchers, actions) + return EnhancementRule(self.matchers, actions) else: return None @@ -54,7 +54,7 @@ def get_matching_frame_actions( match_frames: list[dict[str, Any]], exception_data: dict[str, Any], in_memory_cache: dict[str, str], - ) -> list[tuple[int, Action]]: + ) -> list[tuple[int, EnhancementAction]]: """Given a frame returns all the matching actions based on this rule. If the rule does not match `None` is returned. """ @@ -87,7 +87,7 @@ def _to_config_structure(self, version): @classmethod def _from_config_structure(cls, tuple, version): - return Rule( - [Match._from_config_structure(x, version) for x in tuple[0]], - [Action._from_config_structure(x, version) for x in tuple[1]], + return EnhancementRule( + [EnhancementMatch._from_config_structure(x, version) for x in tuple[0]], + [EnhancementAction._from_config_structure(x, version) for x in tuple[1]], ) diff --git a/src/sentry/grouping/fingerprinting/__init__.py b/src/sentry/grouping/fingerprinting/__init__.py index 2b0490637a5c61..2da4cac8517c13 100644 --- a/src/sentry/grouping/fingerprinting/__init__.py +++ b/src/sentry/grouping/fingerprinting/__init__.py @@ -4,7 +4,7 @@ import logging from collections.abc import Generator, Mapping, Sequence from pathlib import Path -from typing import TYPE_CHECKING, Any, NotRequired, Self, TypedDict, TypeVar +from typing import TYPE_CHECKING, Any, NamedTuple, NotRequired, Self, TypedDict, TypeVar from django.conf import settings from parsimonious.exceptions import ParseError @@ -108,6 +108,41 @@ class _ReleaseInfo(TypedDict): release: str | None +class FingerprintRuleAttributes(TypedDict): + title: NotRequired[str] + + +class FingerprintWithAttributes(NamedTuple): + fingerprint: list[str] + attributes: FingerprintRuleAttributes + + +class FingerprintRuleConfig(TypedDict): + # Each matcher is a list of [, ] + matchers: list[list[str]] + fingerprint: list[str] + attributes: NotRequired[FingerprintRuleAttributes] + is_builtin: NotRequired[bool] + + +# This is just `FingerprintRuleConfig` with an extra `text` entry and with `attributes` required +# rather than optional. (Unfortunately, you can't overwrite lack of required-ness when subclassing a +# TypedDict, so we have to create the full type independently.) +class FingerprintRuleJSON(TypedDict): + text: str + # Each matcher is a list of [, ] + matchers: list[list[str]] + fingerprint: list[str] + attributes: FingerprintRuleAttributes + is_builtin: NotRequired[bool] + + +class FingerprintRuleMatch(NamedTuple): + matched_rule: FingerprintRule + fingerprint: list[str] + attributes: FingerprintRuleAttributes + + class EventAccess: def __init__(self, event: Mapping[str, Any]) -> None: self.event = event @@ -211,7 +246,7 @@ def get_values(self, match_group: str) -> list[dict[str, Any]]: class FingerprintingRules: def __init__( self, - rules: Sequence[Rule], + rules: Sequence[FingerprintRule], changelog: Sequence[object] | None = None, version: int | None = None, bases: Sequence[str] | None = None, @@ -223,7 +258,7 @@ def __init__( self.changelog = changelog self.bases = bases or [] - def iter_rules(self, include_builtin: bool = True) -> Generator[Rule]: + def iter_rules(self, include_builtin: bool = True) -> Generator[FingerprintRule]: if self.rules: yield from self.rules if include_builtin: @@ -231,14 +266,16 @@ def iter_rules(self, include_builtin: bool = True) -> Generator[Rule]: base_rules = FINGERPRINTING_BASES.get(base, []) yield from base_rules - def get_fingerprint_values_for_event(self, event: dict[str, object]) -> None | object: + def get_fingerprint_values_for_event( + self, event: Mapping[str, object] + ) -> None | FingerprintRuleMatch: if not (self.bases or self.rules): return None access = EventAccess(event) for rule in self.iter_rules(): new_values = rule.get_fingerprint_values_for_event_access(access) if new_values is not None: - return (rule,) + new_values + return FingerprintRuleMatch(rule, new_values.fingerprint, new_values.attributes) return None @classmethod @@ -249,7 +286,7 @@ def _from_config_structure( if version != VERSION: raise ValueError("Unknown version") return cls( - rules=[Rule._from_config_structure(x) for x in data["rules"]], + rules=[FingerprintRule._from_config_structure(x) for x in data["rules"]], version=version, bases=bases, ) @@ -337,7 +374,7 @@ def _from_config_structure( } -class Match: +class FingerprintMatch: def __init__(self, key: str, pattern: str, negated: bool = False) -> None: if key.startswith("tags."): self.key = key @@ -436,7 +473,7 @@ def _to_config_structure(self) -> list[str]: return [key, self.pattern] @classmethod - def _from_config_structure(cls, obj: Sequence[str]) -> Self: + def _from_config_structure(cls, obj: list[str]) -> Self: key = obj[0] if key.startswith("!"): key = key[1:] @@ -454,12 +491,12 @@ def text(self) -> str: ) -class Rule: +class FingerprintRule: def __init__( self, - matchers: Sequence[Match], + matchers: Sequence[FingerprintMatch], fingerprint: list[str], - attributes: dict[str, Any], + attributes: FingerprintRuleAttributes, is_builtin: bool = False, ) -> None: self.matchers = matchers @@ -469,8 +506,8 @@ def __init__( def get_fingerprint_values_for_event_access( self, event_access: EventAccess - ) -> None | tuple[list[str], dict[str, Any]]: - by_match_group: dict[str, list[Match]] = {} + ) -> None | FingerprintWithAttributes: + by_match_group: dict[str, list[FingerprintMatch]] = {} for matcher in self.matchers: by_match_group.setdefault(matcher.match_group, []).append(matcher) @@ -481,10 +518,10 @@ def get_fingerprint_values_for_event_access( else: return None - return self.fingerprint, self.attributes + return FingerprintWithAttributes(self.fingerprint, self.attributes) - def _to_config_structure(self) -> dict[str, Any]: - config_structure: dict[str, Any] = { + def _to_config_structure(self) -> FingerprintRuleJSON: + config_structure: FingerprintRuleJSON = { "text": self.text, "matchers": [x._to_config_structure() for x in self.matchers], "fingerprint": self.fingerprint, @@ -497,19 +534,19 @@ def _to_config_structure(self) -> dict[str, Any]: return config_structure @classmethod - def _from_config_structure(cls, obj: dict[str, Any]) -> Self: + def _from_config_structure(cls, obj: FingerprintRuleConfig | FingerprintRuleJSON) -> Self: return cls( - [Match._from_config_structure(x) for x in obj["matchers"]], + [FingerprintMatch._from_config_structure(x) for x in obj["matchers"]], obj["fingerprint"], obj.get("attributes") or {}, obj.get("is_builtin") or False, ) - def to_json(self) -> dict[str, Any]: + def to_json(self) -> FingerprintRuleJSON: return self._to_config_structure() @classmethod - def from_json(cls, json: dict[str, object]) -> Self: + def from_json(cls, json: FingerprintRuleConfig | FingerprintRuleJSON) -> Self: return cls._from_config_structure(json) @property @@ -539,7 +576,7 @@ def visit_comment(self, node: Node, _: object) -> str: return node.text def visit_fingerprinting_rules( - self, _: object, children: list[str | Rule | None] + self, _: object, children: list[str | FingerprintRule | None] ) -> FingerprintingRules: changelog = [] rules = [] @@ -560,8 +597,8 @@ def visit_fingerprinting_rules( ) def visit_line( - self, _: object, children: tuple[object, list[Rule | str | None], object] - ) -> Rule | str | None: + self, _: object, children: tuple[object, list[FingerprintRule | str | None], object] + ) -> FingerprintRule | str | None: _, line, _ = children comment_or_rule_or_empty = line[0] if comment_or_rule_or_empty: @@ -572,17 +609,17 @@ def visit_rule( self, _: object, children: tuple[ - object, list[Match], object, object, object, tuple[list[str], dict[str, Any]] + object, list[FingerprintMatch], object, object, object, FingerprintWithAttributes ], - ) -> Rule: + ) -> FingerprintRule: _, matcher, _, _, _, (fingerprint, attributes) = children - return Rule(matcher, fingerprint, attributes) + return FingerprintRule(matcher, fingerprint, attributes) def visit_matcher( self, _: object, children: tuple[object, list[str], str, object, str] - ) -> Match: + ) -> FingerprintMatch: _, negation, ty, _, argument = children - return Match(ty, argument, bool(negation)) + return FingerprintMatch(ty, argument, bool(negation)) def visit_matcher_type(self, _: object, children: list[str]) -> str: return children[0] @@ -594,16 +631,18 @@ def visit_argument(self, _: object, children: list[str]) -> str: def visit_fingerprint( self, _: object, children: list[str | tuple[str, str]] - ) -> tuple[list[str], dict[str, str]]: + ) -> FingerprintWithAttributes: fingerprint = [] - attributes = {} + attributes: FingerprintRuleAttributes = {} for item in children: if isinstance(item, tuple): - key, value = item - attributes[key] = value + # This should always be true, because otherwise an error would have been raised when + # we visited the child node in `visit_fp_attribute` + if item[0] == "title": + attributes["title"] = item[1] else: fingerprint.append(item) - return fingerprint, attributes + return FingerprintWithAttributes(fingerprint, attributes) def visit_fp_value(self, _: object, children: tuple[object, str, object, object]) -> str: _, argument, _, _ = children @@ -634,7 +673,7 @@ def visit_quoted_key(self, node: RegexNode, _: object) -> str: return node.match.groups()[0].lstrip("!") -def _load_configs() -> dict[str, list[Rule]]: +def _load_configs() -> dict[str, list[FingerprintRule]]: if not CONFIGS_DIR.exists(): logger.error( "Failed to load Fingerprinting Configs, invalid _config_dir: %s", @@ -645,7 +684,7 @@ def _load_configs() -> dict[str, list[Rule]]: f"Failed to load Fingerprinting Configs, invalid _config_dir: '{CONFIGS_DIR}'" ) - configs: dict[str, list[Rule]] = {} + configs: dict[str, list[FingerprintRule]] = {} for config_file_path in sorted(CONFIGS_DIR.glob("**/*.txt")): config_name = config_file_path.parent.name diff --git a/src/sentry/grouping/grouping_info.py b/src/sentry/grouping/grouping_info.py index 92e4d60e6bc8ac..5e7ff6e9f695da 100644 --- a/src/sentry/grouping/grouping_info.py +++ b/src/sentry/grouping/grouping_info.py @@ -1,4 +1,5 @@ import logging +from collections.abc import Mapping from typing import Any from sentry.api.exceptions import ResourceDoesNotExist @@ -88,7 +89,7 @@ def _check_for_mismatched_hashes( def get_grouping_info_from_variants( - variants: dict[str, BaseVariant], + variants: Mapping[str, BaseVariant], ) -> dict[str, dict[str, Any]]: """ Given a dictionary of variant objects, create and return a copy of the dictionary in which each diff --git a/src/sentry/grouping/ingest/config.py b/src/sentry/grouping/ingest/config.py index c620c8f559e63f..eaaf2d79a9ab0c 100644 --- a/src/sentry/grouping/ingest/config.py +++ b/src/sentry/grouping/ingest/config.py @@ -20,7 +20,9 @@ Job = MutableMapping[str, Any] # Used to migrate projects that have no activity via getsentry scripts -CONFIGS_TO_DEPRECATE = () +CONFIGS_TO_DEPRECATE = set(CONFIGURATIONS.keys()) - { + DEFAULT_GROUPING_CONFIG, +} def update_grouping_config_if_needed(project: Project, source: str) -> None: diff --git a/src/sentry/grouping/ingest/grouphash_metadata.py b/src/sentry/grouping/ingest/grouphash_metadata.py index 13a989a2ff216d..e30006d11a846d 100644 --- a/src/sentry/grouping/ingest/grouphash_metadata.py +++ b/src/sentry/grouping/ingest/grouphash_metadata.py @@ -1,15 +1,51 @@ from __future__ import annotations import logging +from typing import Any, cast + +from typing_extensions import TypeIs from sentry.eventstore.models import Event -from sentry.grouping.variants import BaseVariant +from sentry.grouping.component import ( + ChainedExceptionGroupingComponent, + CSPGroupingComponent, + ExceptionGroupingComponent, + ExpectCTGroupingComponent, + ExpectStapleGroupingComponent, + HPKPGroupingComponent, + MessageGroupingComponent, + StacktraceGroupingComponent, + TemplateGroupingComponent, + ThreadsGroupingComponent, +) +from sentry.grouping.variants import ( + BaseVariant, + ChecksumVariant, + ComponentVariant, + CustomFingerprintVariant, + HashedChecksumVariant, + SaltedComponentVariant, + VariantsByDescriptor, +) from sentry.models.grouphash import GroupHash from sentry.models.grouphashmetadata import GroupHashMetadata, HashBasis from sentry.models.project import Project +from sentry.types.grouphash_metadata import ( + ChecksumHashingMetadata, + FallbackHashingMetadata, + FingerprintHashingMetadata, + HashingMetadata, + MessageHashingMetadata, + SecurityHashingMetadata, + StacktraceHashingMetadata, + TemplateHashingMetadata, +) +from sentry.utils import metrics +from sentry.utils.metrics import MutableTags logger = logging.getLogger(__name__) + GROUPING_METHODS_BY_DESCRIPTION = { # All frames from a stacktrace at the top level of the event, in `exception`, or in # `threads` (top-level stacktraces come, for example, from using `attach_stacktrace` @@ -35,6 +71,8 @@ # Security reports (CSP, expect-ct, and the like) "URL": HashBasis.SECURITY_VIOLATION, "hostname": HashBasis.SECURITY_VIOLATION, + # CSP reports of `unsafe-inline` and `unsafe-eval` violations + "violation": HashBasis.SECURITY_VIOLATION, # Django template errors, which don't report a full stacktrace "template": HashBasis.TEMPLATE, # Hash set directly on the event by the client, under the key `checksum` @@ -44,8 +82,21 @@ "fallback": HashBasis.FALLBACK, } +# TODO: For now not including `csp_directive` and `csp_script_violation` - let's see if we end up +# wanting them +METRICS_TAGS_BY_HASH_BASIS = { + HashBasis.STACKTRACE: ["stacktrace_type", "stacktrace_location"], + HashBasis.MESSAGE: ["message_source", "message_parameterized"], + HashBasis.FINGERPRINT: ["fingerprint_source"], + HashBasis.SECURITY_VIOLATION: ["security_report_type"], + HashBasis.TEMPLATE: [], + HashBasis.CHECKSUM: [], + HashBasis.FALLBACK: ["fallback_reason"], + HashBasis.UNKNOWN: [], +} + -def create_or_update_grouphash_metadata( +def create_or_update_grouphash_metadata_if_needed( event: Event, project: Project, grouphash: GroupHash, @@ -57,12 +108,18 @@ def create_or_update_grouphash_metadata( # we'll have to override the metadata creation date for them. if created: - hash_basis = _get_hash_basis(event, project, variants) + with metrics.timer( + "grouping.grouphashmetadata.get_hash_basis_and_metadata" + ) as metrics_timer_tags: + hash_basis, hashing_metadata = get_hash_basis_and_metadata( + event, project, variants, metrics_timer_tags + ) GroupHashMetadata.objects.create( grouphash=grouphash, latest_grouping_config=grouping_config, hash_basis=hash_basis, + hashing_metadata=hashing_metadata, ) elif grouphash.metadata and grouphash.metadata.latest_grouping_config != grouping_config: # Keep track of the most recent config which computed this hash, so that once a @@ -71,8 +128,19 @@ def create_or_update_grouphash_metadata( grouphash.metadata.update(latest_grouping_config=grouping_config) -def _get_hash_basis(event: Event, project: Project, variants: dict[str, BaseVariant]) -> HashBasis: - main_variant = ( +def get_hash_basis_and_metadata( + event: Event, + project: Project, + variants: dict[str, BaseVariant], + metrics_timer_tags: MutableTags, +) -> tuple[HashBasis, HashingMetadata]: + hashing_metadata: HashingMetadata = {} + + # TODO: This (and `contributing_variant` below) are typed as `Any` so that we don't have to cast + # them to whatever specific subtypes of `BaseVariant` and `GroupingComponent` (respectively) + # each of the helper calls below requires. Casting once, to a type retrieved from a look-up, + # doesn't work, but maybe there's a better way? + contributing_variant: Any = ( variants["app"] # TODO: We won't need this 'if' once we stop returning both app and system contributing # variants @@ -87,20 +155,293 @@ def _get_hash_basis(event: Event, project: Project, variants: dict[str, BaseVari else [variant for variant in variants.values() if variant.contributes][0] ) ) + contributing_component: Any = ( + # There should only ever be a single contributing component here at the top level + [value for value in contributing_variant.component.values if value.contributes][0] + if hasattr(contributing_variant, "component") + else None + ) + + # Hybrid fingerprinting adds 'modified' to the beginning of the description of whatever method + # was used before the extra fingerprint was added. We classify events with hybrid fingerprints + # by the `{{ default }}` portion of their grouping, so strip the prefix before doing the + # look-up. + is_hybrid_fingerprint = contributing_variant.description.startswith("modified") + method_description = contributing_variant.description.replace("modified ", "") try: - hash_basis = GROUPING_METHODS_BY_DESCRIPTION[ - # Hybrid fingerprinting adds 'modified' to the beginning of the description of whatever - # method was used beore the extra fingerprint was added, so strip that off before - # looking it up - main_variant.description.replace("modified ", "") - ] + hash_basis = GROUPING_METHODS_BY_DESCRIPTION[method_description] except KeyError: logger.exception( "Encountered unknown grouping method '%s'.", - main_variant.description, - extra={"project": project.id, "event": event.event_id}, + contributing_variant.description, + extra={"project": project.id, "event_id": event.event_id}, + ) + return (HashBasis.UNKNOWN, {}) + + metrics_timer_tags["hash_basis"] = hash_basis + + # Gather different metadata depending on the grouping method + + if hash_basis == HashBasis.STACKTRACE: + hashing_metadata = _get_stacktrace_hashing_metadata( + contributing_variant, contributing_component + ) + + elif hash_basis == HashBasis.MESSAGE: + hashing_metadata = _get_message_hashing_metadata(contributing_component) + + elif hash_basis == HashBasis.FINGERPRINT: + hashing_metadata = _get_fingerprint_hashing_metadata(contributing_variant) + + elif hash_basis == HashBasis.SECURITY_VIOLATION: + hashing_metadata = _get_security_hashing_metadata(contributing_component) + + elif hash_basis == HashBasis.TEMPLATE: + hashing_metadata = _get_template_hashing_metadata(contributing_component) + + elif hash_basis == HashBasis.CHECKSUM: + hashing_metadata = _get_checksum_hashing_metadata(contributing_variant) + + elif hash_basis == HashBasis.FALLBACK: + hashing_metadata = _get_fallback_hashing_metadata( + # TODO: Once https://peps.python.org/pep-0728 is a thing (still in draft but + # theoretically on track for 3.14), we can mark `VariantsByDescriptor` as closed and + # annotate `variants` as a `VariantsByDescriptor` instance in the spot where it's created + # and in all of the spots where it gets passed function to function. (Without the + # closed-ness, the return values of `.items()` and `.values()` don't get typed as + # `BaseVariant`, so for now we need to keep `variants` typed as `dict[str, BaseVariant]` + # until we get here.) + cast(VariantsByDescriptor, variants) + ) + + if is_hybrid_fingerprint: + hashing_metadata.update( + _get_fingerprint_hashing_metadata(contributing_variant, is_hybrid=True) + ) + + return hash_basis, hashing_metadata + + +def record_grouphash_metadata_metrics(grouphash_metadata: GroupHashMetadata) -> None: + # TODO: Once https://peps.python.org/pep-0728 is a thing (still in draft but theoretically on + # track for 3.14), we can mark the various hashing metadata types as closed and that should + # narrow the types for the tag values such that we can stop stringifying everything + + # TODO: For now, until we backfill data for pre-existing hashes, these metrics are going + # to be somewhat skewed + + # Define a helper for this check so that it can double as a type guard + def is_stacktrace_hashing( + _hashing_metadata: HashingMetadata, + hash_basis: str, + ) -> TypeIs[StacktraceHashingMetadata]: + return hash_basis == HashBasis.STACKTRACE + + hash_basis = grouphash_metadata.hash_basis + hashing_metadata = grouphash_metadata.hashing_metadata + + if hash_basis: + hash_basis_tags: dict[str, str] = {"hash_basis": hash_basis} + if hashing_metadata: + hash_basis_tags["is_hybrid_fingerprint"] = str( + hashing_metadata.get("is_hybrid_fingerprint", False) + ) + metrics.incr( + "grouping.grouphashmetadata.event_hash_basis", sample_rate=1.0, tags=hash_basis_tags ) - hash_basis = HashBasis.UNKNOWN - return hash_basis + if hashing_metadata: + hashing_metadata_tags: dict[str, str | bool] = { + tag: str(hashing_metadata.get(tag)) + for tag in METRICS_TAGS_BY_HASH_BASIS[hash_basis] + } + if is_stacktrace_hashing(hashing_metadata, hash_basis): + hashing_metadata_tags["chained_exception"] = str( + int(hashing_metadata.get("num_stacktraces", 1)) > 1 + ) + if hashing_metadata_tags: + metrics.incr( + f"grouping.grouphashmetadata.event_hashing_metadata.{hash_basis}", + sample_rate=1.0, + tags=hashing_metadata_tags, + ) + + +def _get_stacktrace_hashing_metadata( + contributing_variant: ComponentVariant, + contributing_component: ( + StacktraceGroupingComponent + | ExceptionGroupingComponent + | ChainedExceptionGroupingComponent + | ThreadsGroupingComponent + ), +) -> StacktraceHashingMetadata: + return { + "stacktrace_type": "in_app" if "in-app" in contributing_variant.description else "system", + "stacktrace_location": ( + "exception" + if "exception" in contributing_variant.description + else "thread" if "thread" in contributing_variant.description else "top-level" + ), + "num_stacktraces": ( + len(contributing_component.values) + if contributing_component.id == "chained-exception" + else 1 + ), + } + + +def _get_message_hashing_metadata( + contributing_component: ( + MessageGroupingComponent | ExceptionGroupingComponent | ChainedExceptionGroupingComponent + ), +) -> MessageHashingMetadata: + # In the simplest case, we already have the component we need to check + if isinstance(contributing_component, MessageGroupingComponent): + return { + "message_source": "message", + "message_parameterized": ( + contributing_component.hint == "stripped event-specific values" + ), + } + + # Otherwise, we have to look in the nested structure to figure out if the message was + # parameterized. If it's a single exception, we can just check its subcomponents directly, but + # if it's a chained exception we have to dig in an extra level, and look at the subcomponents of + # all of its children. (The subcomponents are things like stacktrace, error type, error value, + # etc.) + exceptions_to_check: list[ExceptionGroupingComponent] = [] + if isinstance(contributing_component, ChainedExceptionGroupingComponent): + exceptions = contributing_component.values + exceptions_to_check = [exception for exception in exceptions if exception.contributes] + else: + exception = contributing_component + exceptions_to_check = [exception] + + for exception in exceptions_to_check: + for subcomponent in exception.values: + if subcomponent.contributes and subcomponent.hint == "stripped event-specific values": + return {"message_source": "exception", "message_parameterized": True} + + return {"message_source": "exception", "message_parameterized": False} + + +def _get_fingerprint_hashing_metadata( + contributing_variant: CustomFingerprintVariant | SaltedComponentVariant, is_hybrid: bool = False +) -> FingerprintHashingMetadata: + client_fingerprint = contributing_variant.info.get("client_fingerprint") + matched_rule = contributing_variant.info.get("matched_rule") + + metadata: FingerprintHashingMetadata = { + # For simplicity, we stringify fingerprint values (which are always lists) to keep + # `hashing_metadata` a flat structure + "fingerprint": str(contributing_variant.values), + "fingerprint_source": ( + "client" + if not matched_rule + else ( + "server_builtin_rule" + if contributing_variant.type == "built_in_fingerprint" + else "server_custom_rule" + ) + ), + "is_hybrid_fingerprint": is_hybrid, + } + + # Note that these two conditions are not mutually exclusive - you can set a fingerprint in the + # SDK and have your event match a server-based rule (in which case the latter will take + # precedence) + if matched_rule: + metadata["matched_fingerprinting_rule"] = matched_rule["text"] + if client_fingerprint: + metadata["client_fingerprint"] = str(client_fingerprint) + + return metadata + + +def _get_security_hashing_metadata( + contributing_component: ( + CSPGroupingComponent + | ExpectCTGroupingComponent + | ExpectStapleGroupingComponent + | HPKPGroupingComponent + ), +) -> SecurityHashingMetadata: + subcomponents_by_id = { + subcomponent.id: subcomponent for subcomponent in contributing_component.values + } + blocked_host_key = "uri" if contributing_component.id == "csp" else "hostname" + + metadata: SecurityHashingMetadata = { + "security_report_type": contributing_component.id, + # Having a string which includes the "this is a string" quotes is a *real* footgun in terms + # of querying, so strip those off before storing the value + "blocked_host": subcomponents_by_id[blocked_host_key].values[0].strip("'"), + } + + if contributing_component.id == "csp": + metadata["csp_directive"] = subcomponents_by_id["salt"].values[0] + if subcomponents_by_id["violation"].contributes: + metadata["csp_script_violation"] = subcomponents_by_id["violation"].values[0].strip("'") + + return metadata + + +def _get_template_hashing_metadata( + contributing_component: TemplateGroupingComponent, +) -> TemplateHashingMetadata: + metadata: TemplateHashingMetadata = {} + + subcomponents_by_id = { + subcomponent.id: subcomponent for subcomponent in contributing_component.values + } + + if subcomponents_by_id["filename"].values: + metadata["template_name"] = subcomponents_by_id["filename"].values[0] + if subcomponents_by_id["context-line"].values: + metadata["template_context_line"] = subcomponents_by_id["context-line"].values[0] + + return metadata + + +def _get_checksum_hashing_metadata( + contributing_variant: ChecksumVariant | HashedChecksumVariant, +) -> ChecksumHashingMetadata: + metadata: ChecksumHashingMetadata = {"checksum": contributing_variant.checksum} + + if isinstance(contributing_variant, HashedChecksumVariant): + metadata["raw_checksum"] = contributing_variant.raw_checksum + + return metadata + + +def _get_fallback_hashing_metadata( + variants: VariantsByDescriptor, +) -> FallbackHashingMetadata: + # TODO: All of the specific cases handled below relate to stacktrace frames. Let's how often we + # land in the `other` category and then we can decide how much further it's worthwhile to break + # it down. + + if ( + "app" in variants + and variants["app"].component.values[0].hint == "ignored because it contains no frames" + ): + reason = "no_frames" + + elif ( + "system" in variants + and variants["system"].component.values[0].hint + == "ignored because it contains no contributing frames" + ): + reason = "no_contributing_frames" + + elif "system" in variants and "min-frames" in ( + variants["system"].component.values[0].hint or "" + ): + reason = "insufficient_contributing_frames" + + else: + reason = "other" + + return {"fallback_reason": reason} diff --git a/src/sentry/grouping/ingest/hashing.py b/src/sentry/grouping/ingest/hashing.py index 73620569b04bb3..93b0c6e4865f61 100644 --- a/src/sentry/grouping/ingest/hashing.py +++ b/src/sentry/grouping/ingest/hashing.py @@ -21,7 +21,10 @@ load_grouping_config, ) from sentry.grouping.ingest.config import is_in_transition -from sentry.grouping.ingest.grouphash_metadata import create_or_update_grouphash_metadata +from sentry.grouping.ingest.grouphash_metadata import ( + create_or_update_grouphash_metadata_if_needed, + record_grouphash_metadata_metrics, +) from sentry.grouping.variants import BaseVariant from sentry.models.grouphash import GroupHash from sentry.models.project import Project @@ -230,9 +233,23 @@ def get_or_create_grouphashes( if options.get("grouping.grouphash_metadata.ingestion_writes_enabled") and features.has( "organizations:grouphash-metadata-creation", project.organization ): - create_or_update_grouphash_metadata( - event, project, grouphash, created, grouping_config, variants - ) + try: + # We don't expect this to throw any errors, but collecting this metadata + # shouldn't ever derail ingestion, so better to be safe + create_or_update_grouphash_metadata_if_needed( + event, project, grouphash, created, grouping_config, variants + ) + except Exception as exc: + sentry_sdk.capture_exception(exc) + + if grouphash.metadata: + record_grouphash_metadata_metrics(grouphash.metadata) + else: + # Collect a temporary metric to get a sense of how often we would be adding metadata to an + # existing hash. (Yes, this is an overestimate, because this will fire every time we see a given + # non-backfilled grouphash, not the once per non-backfilled grouphash we'd actually be doing a + # backfill, but it will give us a ceiling from which we can work down.) + metrics.incr("grouping.grouphashmetadata.backfill_needed") grouphashes.append(grouphash) diff --git a/src/sentry/grouping/ingest/seer.py b/src/sentry/grouping/ingest/seer.py index e8b1b15bb7db06..234759e73ddd8a 100644 --- a/src/sentry/grouping/ingest/seer.py +++ b/src/sentry/grouping/ingest/seer.py @@ -1,4 +1,5 @@ import logging +from collections.abc import Mapping from dataclasses import asdict from typing import Any @@ -16,9 +17,10 @@ from sentry.seer.similarity.similar_issues import get_similarity_data_from_seer from sentry.seer.similarity.types import SimilarIssuesEmbeddingsRequest from sentry.seer.similarity.utils import ( + ReferrerOptions, event_content_is_seer_eligible, filter_null_from_string, - get_stacktrace_string, + get_stacktrace_string_with_metrics, killswitch_enabled, ) from sentry.utils import metrics @@ -28,7 +30,7 @@ logger = logging.getLogger("sentry.events.grouping") -def should_call_seer_for_grouping(event: Event, variants: dict[str, BaseVariant]) -> bool: +def should_call_seer_for_grouping(event: Event, variants: Mapping[str, BaseVariant]) -> bool: """ Use event content, feature flags, rate limits, killswitches, seer health, etc. to determine whether a call to Seer should be made. @@ -46,6 +48,10 @@ def should_call_seer_for_grouping(event: Event, variants: dict[str, BaseVariant] _has_customized_fingerprint(event, variants) or killswitch_enabled(project.id, event) or _circuit_breaker_broken(event, project) + # The rate limit check has to be last (see below) but rate-limiting aside, call this after other checks + # because it calculates the stacktrace string, which we only want to spend the time to do if we already + # know the other checks have passed. + or _has_empty_stacktrace_string(event, variants) # **Do not add any new checks after this.** The rate limit check MUST remain the last of all # the checks. # @@ -80,7 +86,7 @@ def _project_has_similarity_grouping_enabled(project: Project) -> bool: # combined with some other value). To the extent to which we're then using this function to decide # whether or not to call Seer, this means that the calculations giving rise to the default part of # the value never involve Seer input. In the long run, we probably want to change that. -def _has_customized_fingerprint(event: Event, variants: dict[str, BaseVariant]) -> bool: +def _has_customized_fingerprint(event: Event, variants: Mapping[str, BaseVariant]) -> bool: fingerprint = event.data.get("fingerprint", []) if "{{ default }}" in fingerprint: @@ -176,9 +182,30 @@ def _circuit_breaker_broken(event: Event, project: Project) -> bool: return circuit_broken +def _has_empty_stacktrace_string(event: Event, variants: Mapping[str, BaseVariant]) -> bool: + stacktrace_string = get_stacktrace_string_with_metrics( + get_grouping_info_from_variants(variants), event.platform, ReferrerOptions.INGEST + ) + if not stacktrace_string: + if stacktrace_string == "": + metrics.incr( + "grouping.similarity.did_call_seer", + sample_rate=options.get("seer.similarity.metrics_sample_rate"), + tags={ + "call_made": False, + "blocker": "empty-stacktrace-string", + }, + ) + return True + # Store the stacktrace string in the event so we only calculate it once. We need to pop it + # later so it isn't stored in the database. + event.data["stacktrace_string"] = stacktrace_string + return False + + def get_seer_similar_issues( event: Event, - variants: dict[str, BaseVariant], + variants: Mapping[str, BaseVariant], num_neighbors: int = 1, ) -> tuple[dict[str, Any], GroupHash | None]: """ @@ -187,9 +214,31 @@ def get_seer_similar_issues( should go in (if any), or None if no neighbor was near enough. """ event_hash = event.get_primary_hash() - stacktrace_string = get_stacktrace_string(get_grouping_info_from_variants(variants)) exception_type = get_path(event.data, "exception", "values", -1, "type") + stacktrace_string = event.data.get( + "stacktrace_string", + get_stacktrace_string_with_metrics( + get_grouping_info_from_variants(variants), event.platform, ReferrerOptions.INGEST + ), + ) + + if not stacktrace_string: + # TODO: remove this log once we've confirmed it isn't happening + logger.info( + "get_seer_similar_issues.empty_stacktrace", + extra={ + "event_id": event.event_id, + "project_id": event.project.id, + "stacktrace_string": stacktrace_string, + }, + ) + similar_issues_metadata_empty = { + "results": [], + "similarity_model_version": SEER_SIMILARITY_MODEL_VERSION, + } + return (similar_issues_metadata_empty, None) + request_data: SimilarIssuesEmbeddingsRequest = { "event_id": event.event_id, "hash": event_hash, @@ -200,6 +249,7 @@ def get_seer_similar_issues( "referrer": "ingest", "use_reranking": options.get("seer.similarity.ingest.use_reranking"), } + event.data.pop("stacktrace_string", None) # Similar issues are returned with the closest match first seer_results = get_similarity_data_from_seer(request_data) @@ -231,7 +281,7 @@ def get_seer_similar_issues( def maybe_check_seer_for_matching_grouphash( - event: Event, variants: dict[str, BaseVariant], all_grouphashes: list[GroupHash] + event: Event, variants: Mapping[str, BaseVariant], all_grouphashes: list[GroupHash] ) -> GroupHash | None: seer_matched_grouphash = None @@ -262,6 +312,7 @@ def maybe_check_seer_for_matching_grouphash( # Once those two problems are fixed, there will only be one hash passed to this function # and we won't have to do this search to find the right one to update. primary_hash = event.get_primary_hash() + grouphash_sent = list( filter(lambda grouphash: grouphash.hash == primary_hash, all_grouphashes) )[0] diff --git a/src/sentry/grouping/strategies/base.py b/src/sentry/grouping/strategies/base.py index ab3e99949f8e4e..e7ed88a961ca4f 100644 --- a/src/sentry/grouping/strategies/base.py +++ b/src/sentry/grouping/strategies/base.py @@ -1,12 +1,19 @@ import inspect from collections.abc import Callable, Iterator, Sequence -from typing import Any, Generic, Protocol, TypeVar +from typing import Any, Generic, Protocol, TypeVar, overload from sentry import projectoptions from sentry.eventstore.models import Event -from sentry.grouping.component import GroupingComponent +from sentry.grouping.component import ( + BaseGroupingComponent, + ExceptionGroupingComponent, + FrameGroupingComponent, + StacktraceGroupingComponent, +) from sentry.grouping.enhancer import Enhancements from sentry.interfaces.base import Interface +from sentry.interfaces.exception import SingleException +from sentry.interfaces.stacktrace import Frame, Stacktrace STRATEGIES: dict[str, "Strategy[Any]"] = {} @@ -24,6 +31,9 @@ DEFAULT_GROUPING_ENHANCEMENTS_BASE = "common:2019-03-23" DEFAULT_GROUPING_FINGERPRINTING_BASES: list[str] = [] +# TODO: Hack to make `ReturnedVariants` (no pun intended) covariant. At some point we should +# probably turn `ReturnedVariants` into a Mapping (immutable), since in practice it's read-only. +GroupingComponent = TypeVar("GroupingComponent", bound=BaseGroupingComponent[Any]) ReturnedVariants = dict[str, GroupingComponent] ConcreteInterface = TypeVar("ConcreteInterface", bound=Interface, contravariant=True) @@ -115,9 +125,24 @@ def get_grouping_component( """ return self._get_strategy_dict(interface, event=event, **kwargs) + @overload + def get_single_grouping_component( + self, interface: Frame, *, event: Event, **kwargs: Any + ) -> FrameGroupingComponent: ... + + @overload + def get_single_grouping_component( + self, interface: SingleException, *, event: Event, **kwargs: Any + ) -> ExceptionGroupingComponent: ... + + @overload + def get_single_grouping_component( + self, interface: Stacktrace, *, event: Event, **kwargs: Any + ) -> StacktraceGroupingComponent: ... + def get_single_grouping_component( self, interface: Interface, *, event: Event, **kwargs: Any - ) -> GroupingComponent: + ) -> BaseGroupingComponent: """Invokes a delegate grouping strategy. If no such delegate is configured a fallback grouping component is returned. """ @@ -193,7 +218,7 @@ def variant_processor(self, func: VariantProcessor) -> VariantProcessor: def get_grouping_component( self, event: Event, context: GroupingContext, variant: str | None = None - ) -> None | GroupingComponent | ReturnedVariants: + ) -> None | BaseGroupingComponent | ReturnedVariants: """Given a specific variant this calculates the grouping component.""" args = [] iface = event.interfaces.get(self.interface) diff --git a/src/sentry/grouping/strategies/legacy.py b/src/sentry/grouping/strategies/legacy.py index 455deea1b95185..4e6dc1581f8323 100644 --- a/src/sentry/grouping/strategies/legacy.py +++ b/src/sentry/grouping/strategies/legacy.py @@ -3,7 +3,21 @@ from typing import Any from sentry.eventstore.models import Event -from sentry.grouping.component import GroupingComponent +from sentry.grouping.component import ( + ChainedExceptionGroupingComponent, + ContextLineGroupingComponent, + ErrorTypeGroupingComponent, + ErrorValueGroupingComponent, + ExceptionGroupingComponent, + FilenameGroupingComponent, + FrameGroupingComponent, + FunctionGroupingComponent, + LineNumberGroupingComponent, + ModuleGroupingComponent, + StacktraceGroupingComponent, + SymbolGroupingComponent, + ThreadsGroupingComponent, +) from sentry.grouping.strategies.base import ( GroupingContext, ReturnedVariants, @@ -164,17 +178,15 @@ def single_exception_legacy( interface: SingleException, event: Event, context: GroupingContext, **meta: Any ) -> ReturnedVariants: - type_component = GroupingComponent( - id="type", + type_component = ErrorTypeGroupingComponent( values=[interface.type] if interface.type else [], contributes=False, ) - value_component = GroupingComponent( - id="value", + value_component = ErrorValueGroupingComponent( values=[interface.value] if interface.value else [], contributes=False, ) - stacktrace_component = GroupingComponent(id="stacktrace") + stacktrace_component = StacktraceGroupingComponent() if interface.stacktrace is not None: stacktrace_component = context.get_single_grouping_component( @@ -195,8 +207,8 @@ def single_exception_legacy( value_component.update(contributes=True) return { - context["variant"]: GroupingComponent( - id="exception", values=[stacktrace_component, type_component, value_component] + context["variant"]: ExceptionGroupingComponent( + values=[stacktrace_component, type_component, value_component] ) } @@ -231,7 +243,7 @@ def chained_exception_legacy( if stacktrace_component is None or not stacktrace_component.contributes: value.update(contributes=False, hint="exception has no stacktrace") - return {context["variant"]: GroupingComponent(id="chained-exception", values=values)} + return {context["variant"]: ChainedExceptionGroupingComponent(values=values)} @chained_exception_legacy.variant_processor @@ -262,7 +274,7 @@ def frame_legacy( # Safari throws [native code] frames in for calls like ``forEach`` # whereas Chrome ignores these. Let's remove it from the hashing algo # so that they're more likely to group together - filename_component = GroupingComponent(id="filename") + filename_component = FilenameGroupingComponent() if interface.filename == "": filename_component.update( contributes=False, values=[interface.filename], hint="anonymous filename discarded" @@ -293,17 +305,10 @@ def frame_legacy( # if we have a module we use that for grouping. This will always # take precedence over the filename, even if the module is # considered unhashable. - module_component = GroupingComponent(id="module") + module_component = ModuleGroupingComponent() if interface.module: if is_unhashable_module_legacy(interface, platform): - module_component.update( - values=[ - GroupingComponent( - id="salt", values=[""], hint="normalized generated module name" - ) - ], - hint="ignored module", - ) + module_component.update(values=[""], hint="normalized generated module name") else: module_name, module_hint = remove_module_outliers_legacy(interface.module, platform) module_component.update(values=[module_name], hint=module_hint) @@ -313,7 +318,7 @@ def frame_legacy( ) # Context line when available is the primary contributor - context_line_component = GroupingComponent(id="context-line") + context_line_component = ContextLineGroupingComponent() if interface.context_line is not None: if len(interface.context_line) > 120: context_line_component.update(hint="discarded because line too long") @@ -322,9 +327,9 @@ def frame_legacy( else: context_line_component.update(values=[interface.context_line]) - symbol_component = GroupingComponent(id="symbol") - function_component = GroupingComponent(id="function") - lineno_component = GroupingComponent(id="lineno") + symbol_component = SymbolGroupingComponent() + function_component = FunctionGroupingComponent() + lineno_component = LineNumberGroupingComponent() # The context line grouping information is the most reliable one. # If we did not manage to find some information there, we want to @@ -346,11 +351,7 @@ def frame_legacy( elif func: if is_unhashable_function_legacy(func): function_component.update( - values=[ - GroupingComponent( - id="salt", values=[""], hint="normalized lambda function name" - ) - ] + values=[""], hint="normalized lambda function name" ) else: function, function_hint = remove_function_outliers_legacy(func) @@ -380,8 +381,7 @@ def frame_legacy( ) return { - context["variant"]: GroupingComponent( - id="frame", + context["variant"]: FrameGroupingComponent( values=[ module_component, filename_component, @@ -392,6 +392,7 @@ def frame_legacy( ], contributes=contributes, hint=hint, + in_app=interface.in_app, ) } @@ -461,24 +462,22 @@ def threads_legacy( thread_count = len(interface.values) if thread_count != 1: return { - context["variant"]: GroupingComponent( - id="threads", + context["variant"]: ThreadsGroupingComponent( contributes=False, hint="ignored because contains %d threads" % thread_count, ) } - stacktrace = interface.values[0].get("stacktrace") + stacktrace: Stacktrace = interface.values[0].get("stacktrace") if not stacktrace: return { - context["variant"]: GroupingComponent( - id="threads", contributes=False, hint="thread has no stacktrace" + context["variant"]: ThreadsGroupingComponent( + contributes=False, hint="thread has no stacktrace" ) } return { - context["variant"]: GroupingComponent( - id="threads", + context["variant"]: ThreadsGroupingComponent( values=[context.get_single_grouping_component(stacktrace, event=event, **meta)], ) } diff --git a/src/sentry/grouping/strategies/message.py b/src/sentry/grouping/strategies/message.py index 4c71541a6d3122..7615e98c23ca4b 100644 --- a/src/sentry/grouping/strategies/message.py +++ b/src/sentry/grouping/strategies/message.py @@ -4,7 +4,7 @@ from sentry import analytics from sentry.eventstore.models import Event from sentry.features.rollout import in_rollout_group -from sentry.grouping.component import GroupingComponent +from sentry.grouping.component import MessageGroupingComponent from sentry.grouping.parameterization import Parameterizer, UniqueIdExperiment from sentry.grouping.strategies.base import ( GroupingContext, @@ -108,17 +108,10 @@ def message_v1( raw = interface.message or interface.formatted or "" normalized = normalize_message_for_grouping(raw, event) hint = "stripped event-specific values" if raw != normalized else None - return { - context["variant"]: GroupingComponent( - id="message", - values=[normalized], - hint=hint, - ) - } + return {context["variant"]: MessageGroupingComponent(values=[normalized], hint=hint)} else: return { - context["variant"]: GroupingComponent( - id="message", + context["variant"]: MessageGroupingComponent( values=[interface.message or interface.formatted or ""], ) } diff --git a/src/sentry/grouping/strategies/newstyle.py b/src/sentry/grouping/strategies/newstyle.py index e6a6d0e1c92078..15daac601845bc 100644 --- a/src/sentry/grouping/strategies/newstyle.py +++ b/src/sentry/grouping/strategies/newstyle.py @@ -3,11 +3,25 @@ import itertools import logging import re +from collections import Counter from collections.abc import Generator from typing import Any from sentry.eventstore.models import Event -from sentry.grouping.component import GroupingComponent +from sentry.grouping.component import ( + ChainedExceptionGroupingComponent, + ContextLineGroupingComponent, + ErrorTypeGroupingComponent, + ErrorValueGroupingComponent, + ExceptionGroupingComponent, + FilenameGroupingComponent, + FrameGroupingComponent, + FunctionGroupingComponent, + ModuleGroupingComponent, + NSErrorGroupingComponent, + StacktraceGroupingComponent, + ThreadsGroupingComponent, +) from sentry.grouping.strategies.base import ( GroupingContext, ReturnedVariants, @@ -116,20 +130,17 @@ def get_filename_component( filename: str | None, platform: str | None, allow_file_origin: bool = False, -) -> GroupingComponent: +) -> FilenameGroupingComponent: """Attempt to normalize filenames by detecting special filenames and by using the basename only. """ if filename is None: - return GroupingComponent(id="filename") + return FilenameGroupingComponent() # Only use the platform independent basename for grouping and # lowercase it filename = _basename_re.split(filename)[-1].lower() - filename_component = GroupingComponent( - id="filename", - values=[filename], - ) + filename_component = FilenameGroupingComponent(values=[filename]) if has_url_origin(abs_path, allow_file_origin=allow_file_origin): filename_component.update(contributes=False, hint="ignored because frame points to a URL") @@ -151,17 +162,14 @@ def get_module_component( module: str | None, platform: str | None, context: GroupingContext, -) -> GroupingComponent: +) -> ModuleGroupingComponent: """Given an absolute path, module and platform returns the module component with some necessary cleaning performed. """ if module is None: - return GroupingComponent(id="module") + return ModuleGroupingComponent() - module_component = GroupingComponent( - id="module", - values=[module], - ) + module_component = ModuleGroupingComponent(values=[module]) if platform == "javascript" and "/" in module and abs_path and abs_path.endswith(module): module_component.update(contributes=False, hint="ignored bad javascript module") @@ -200,7 +208,7 @@ def get_function_component( platform: str | None, sourcemap_used: bool = False, context_line_available: bool = False, -) -> GroupingComponent: +) -> FunctionGroupingComponent: """ Attempt to normalize functions by removing common platform outliers. @@ -230,12 +238,9 @@ def get_function_component( func = trim_function_name(func, platform) if not func: - return GroupingComponent(id="function") + return FunctionGroupingComponent() - function_component = GroupingComponent( - id="function", - values=[func], - ) + function_component = FunctionGroupingComponent(values=[func]) if platform == "ruby": if func.startswith("block "): @@ -328,11 +333,16 @@ def frame( context_line_available=context_line_available, ) - values = [module_component, filename_component, function_component] + values: list[ + ContextLineGroupingComponent + | FilenameGroupingComponent + | FunctionGroupingComponent + | ModuleGroupingComponent + ] = [module_component, filename_component, function_component] if context_line_component is not None: values.append(context_line_component) - rv = GroupingComponent(id="frame", values=values) + rv = FrameGroupingComponent(values=values, in_app=frame.in_app) # if we are in javascript fuzzing mode we want to disregard some # frames consistently. These force common bad stacktraces together @@ -368,7 +378,7 @@ def frame( def get_contextline_component( frame: Frame, platform: str | None, function: str, context: GroupingContext -) -> GroupingComponent: +) -> ContextLineGroupingComponent: """Returns a contextline component. The caller's responsibility is to make sure context lines are only used for platforms where we trust the quality of the sourcecode. It does however protect against some bad @@ -376,12 +386,9 @@ def get_contextline_component( """ line = " ".join((frame.context_line or "").expandtabs(2).split()) if not line: - return GroupingComponent(id="context-line") + return ContextLineGroupingComponent() - component = GroupingComponent( - id="context-line", - values=[line], - ) + component = ContextLineGroupingComponent(values=[line]) if line: if len(frame.context_line) > 120: component.update(hint="discarded because line too long", contributes=False) @@ -498,8 +505,7 @@ def stacktrace_variant_processor( def single_exception( interface: SingleException, event: Event, context: GroupingContext, **meta: Any ) -> ReturnedVariants: - type_component = GroupingComponent( - id="type", + type_component = ErrorTypeGroupingComponent( values=[interface.type] if interface.type else [], ) system_type_component = type_component.shallow_copy() @@ -522,8 +528,7 @@ def single_exception( contributes=False, hint="ignored because exception is synthetic" ) if interface.mechanism.meta and "ns_error" in interface.mechanism.meta: - ns_error_component = GroupingComponent( - id="ns-error", + ns_error_component = NSErrorGroupingComponent( values=[ interface.mechanism.meta["ns_error"].get("domain"), interface.mechanism.meta["ns_error"].get("code"), @@ -533,18 +538,23 @@ def single_exception( if interface.stacktrace is not None: with context: context["exception_data"] = interface.to_json() - stacktrace_variants = context.get_grouping_component( - interface.stacktrace, event=event, **meta + stacktrace_variants: dict[str, StacktraceGroupingComponent] = ( + context.get_grouping_component(interface.stacktrace, event=event, **meta) ) else: stacktrace_variants = { - "app": GroupingComponent(id="stacktrace"), + "app": StacktraceGroupingComponent(), } rv = {} for variant, stacktrace_component in stacktrace_variants.items(): - values = [ + values: list[ + ErrorTypeGroupingComponent + | ErrorValueGroupingComponent + | NSErrorGroupingComponent + | StacktraceGroupingComponent + ] = [ stacktrace_component, system_type_component if variant == "system" else type_component, ] @@ -553,9 +563,7 @@ def single_exception( values.append(ns_error_component) if context["with_exception_value_fallback"]: - value_component = GroupingComponent( - id="value", - ) + value_component = ErrorValueGroupingComponent() raw = interface.value if raw is not None: @@ -587,7 +595,9 @@ def single_exception( values.append(value_component) - rv[variant] = GroupingComponent(id="exception", values=values) + rv[variant] = ExceptionGroupingComponent( + values=values, frame_counts=stacktrace_component.frame_counts + ) return rv @@ -628,7 +638,7 @@ def chained_exception( return exception_components[id(exceptions[0])] # Case 2: produce a component for each chained exception - by_name: dict[str, list[GroupingComponent]] = {} + by_name: dict[str, list[ExceptionGroupingComponent]] = {} for exception in exceptions: for name, component in exception_components[id(exception)].items(): @@ -637,9 +647,15 @@ def chained_exception( rv = {} for name, component_list in by_name.items(): - rv[name] = GroupingComponent( - id="chained-exception", + # Calculate an aggregate tally of the different types of frames (in-app vs system, + # contributing or not) across all of the exceptions in the chain + total_frame_counts: Counter[str] = Counter() + for exception_component in component_list: + total_frame_counts += exception_component.frame_counts + + rv[name] = ChainedExceptionGroupingComponent( values=component_list, + frame_counts=total_frame_counts, ) return rv @@ -777,8 +793,7 @@ def threads( return thread_variants return { - "app": GroupingComponent( - id="threads", + "app": ThreadsGroupingComponent( contributes=False, hint=( "ignored because does not contain exactly one crashing, " @@ -797,18 +812,14 @@ def _filtered_threads( stacktrace = threads[0].get("stacktrace") if not stacktrace: - return { - "app": GroupingComponent( - id="threads", contributes=False, hint="thread has no stacktrace" - ) - } + return {"app": ThreadsGroupingComponent(contributes=False, hint="thread has no stacktrace")} rv = {} for name, stacktrace_component in context.get_grouping_component( stacktrace, event=event, **meta ).items(): - rv[name] = GroupingComponent(id="threads", values=[stacktrace_component]) + rv[name] = ThreadsGroupingComponent(values=[stacktrace_component]) return rv diff --git a/src/sentry/grouping/strategies/security.py b/src/sentry/grouping/strategies/security.py index b1fe7aaceb2bc2..6e992081d646b3 100644 --- a/src/sentry/grouping/strategies/security.py +++ b/src/sentry/grouping/strategies/security.py @@ -1,44 +1,53 @@ from typing import Any from sentry.eventstore.models import Event -from sentry.grouping.component import GroupingComponent +from sentry.grouping.component import ( + CSPGroupingComponent, + ExpectCTGroupingComponent, + ExpectStapleGroupingComponent, + HostnameGroupingComponent, + HPKPGroupingComponent, + SaltGroupingComponent, + URIGroupingComponent, + ViolationGroupingComponent, +) from sentry.grouping.strategies.base import ( GroupingContext, ReturnedVariants, produces_variants, strategy, ) -from sentry.interfaces.security import Csp, ExpectCT, ExpectStaple, Hpkp, SecurityReport +from sentry.interfaces.security import Csp, ExpectCT, ExpectStaple, Hpkp -def _security_v1( - reported_id: str, obj: SecurityReport, context: GroupingContext, **meta: Any +@strategy(ids=["expect-ct:v1"], interface=ExpectCT, score=1000) +@produces_variants(["default"]) +def expect_ct_v1( + interface: ExpectCT, event: Event, context: GroupingContext, **meta: Any ) -> ReturnedVariants: return { - context["variant"]: GroupingComponent( - id=reported_id, + context["variant"]: ExpectCTGroupingComponent( values=[ - GroupingComponent(id="salt", values=[reported_id]), - GroupingComponent(id="hostname", values=[obj.hostname]), + SaltGroupingComponent(values=["expect-ct"]), + HostnameGroupingComponent(values=[interface.hostname]), ], ) } -@strategy(ids=["expect-ct:v1"], interface=ExpectCT, score=1000) -@produces_variants(["default"]) -def expect_ct_v1( - interface: ExpectCT, event: Event, context: GroupingContext, **meta: Any -) -> ReturnedVariants: - return _security_v1("expect-ct", interface, context=context, **meta) - - @strategy(ids=["expect-staple:v1"], interface=ExpectStaple, score=1001) @produces_variants(["default"]) def expect_staple_v1( interface: ExpectStaple, event: Event, context: GroupingContext, **meta: Any ) -> ReturnedVariants: - return _security_v1("expect-staple", interface, context=context, **meta) + return { + context["variant"]: ExpectStapleGroupingComponent( + values=[ + SaltGroupingComponent(values=["expect-staple"]), + HostnameGroupingComponent(values=[interface.hostname]), + ], + ) + } @strategy(ids=["hpkp:v1"], interface=Hpkp, score=1002) @@ -46,14 +55,21 @@ def expect_staple_v1( def hpkp_v1( interface: Hpkp, event: Event, context: GroupingContext, **meta: Any ) -> ReturnedVariants: - return _security_v1("hpkp", interface, context=context, **meta) + return { + context["variant"]: HPKPGroupingComponent( + values=[ + SaltGroupingComponent(values=["hpkp"]), + HostnameGroupingComponent(values=[interface.hostname]), + ], + ) + } @strategy(ids=["csp:v1"], interface=Csp, score=1003) @produces_variants(["default"]) def csp_v1(interface: Csp, event: Event, context: GroupingContext, **meta: Any) -> ReturnedVariants: - violation_component = GroupingComponent(id="violation") - uri_component = GroupingComponent(id="uri") + violation_component = ViolationGroupingComponent() + uri_component = URIGroupingComponent() if interface.local_script_violation_type: violation_component.update(values=["'%s'" % interface.local_script_violation_type]) @@ -67,10 +83,9 @@ def csp_v1(interface: Csp, event: Event, context: GroupingContext, **meta: Any) uri_component.update(values=[interface.normalized_blocked_uri]) return { - context["variant"]: GroupingComponent( - id="csp", + context["variant"]: CSPGroupingComponent( values=[ - GroupingComponent(id="salt", values=[interface.effective_directive]), + SaltGroupingComponent(values=[interface.effective_directive]), violation_component, uri_component, ], diff --git a/src/sentry/grouping/strategies/template.py b/src/sentry/grouping/strategies/template.py index 3c4e9584cd5c82..d116d7d2cd4b66 100644 --- a/src/sentry/grouping/strategies/template.py +++ b/src/sentry/grouping/strategies/template.py @@ -1,7 +1,11 @@ from typing import Any from sentry.eventstore.models import Event -from sentry.grouping.component import GroupingComponent +from sentry.grouping.component import ( + ContextLineGroupingComponent, + FilenameGroupingComponent, + TemplateGroupingComponent, +) from sentry.grouping.strategies.base import ( GroupingContext, ReturnedVariants, @@ -16,16 +20,16 @@ def template_v1( interface: Template, event: Event, context: GroupingContext, **meta: Any ) -> ReturnedVariants: - filename_component = GroupingComponent(id="filename") + filename_component = FilenameGroupingComponent() if interface.filename is not None: filename_component.update(values=[interface.filename]) - context_line_component = GroupingComponent(id="context-line") + context_line_component = ContextLineGroupingComponent() if interface.context_line is not None: context_line_component.update(values=[interface.context_line]) return { - context["variant"]: GroupingComponent( - id="template", values=[filename_component, context_line_component] + context["variant"]: TemplateGroupingComponent( + values=[filename_component, context_line_component] ) } diff --git a/src/sentry/grouping/variants.py b/src/sentry/grouping/variants.py index 4264d71f52ad2e..eafac8c45fc03e 100644 --- a/src/sentry/grouping/variants.py +++ b/src/sentry/grouping/variants.py @@ -1,16 +1,36 @@ from __future__ import annotations +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, NotRequired, TypedDict + +from sentry.grouping.component import ( + AppGroupingComponent, + DefaultGroupingComponent, + SystemGroupingComponent, +) +from sentry.grouping.fingerprinting import FingerprintRule from sentry.grouping.utils import hash_from_values, is_default_fingerprint_var from sentry.types.misc import KeyedList +if TYPE_CHECKING: + from sentry.grouping.api import FingerprintInfo + from sentry.grouping.strategies.base import StrategyConfiguration -class BaseVariant: - # The type of the variant that is reported to the UI. - type: str | None = None +class FingerprintVariantMetadata(TypedDict): + values: list[str] + client_values: NotRequired[list[str]] + matched_rule: NotRequired[str] + + +class BaseVariant(ABC): # This is true if `get_hash` does not return `None`. contributes = True + @property + @abstractmethod + def type(self) -> str: ... + def get_hash(self) -> str | None: return None @@ -105,12 +125,16 @@ def _get_metadata_as_dict(self): class ComponentVariant(BaseVariant): """A component variant is a variant that produces a hash from the - `GroupingComponent` it encloses. + `BaseGroupingComponent` it encloses. """ type = "component" - def __init__(self, component, config): + def __init__( + self, + component: AppGroupingComponent | SystemGroupingComponent | DefaultGroupingComponent, + config: StrategyConfiguration, + ): self.component = component self.config = config @@ -132,24 +156,24 @@ def __repr__(self): return super().__repr__() + f" contributes={self.contributes} ({self.description})" -def expose_fingerprint_dict(values, info=None): - rv = { +def expose_fingerprint_dict(values: list[str], info: FingerprintInfo) -> FingerprintVariantMetadata: + rv: FingerprintVariantMetadata = { "values": values, } - if not info: - return rv - - from sentry.grouping.fingerprinting import Rule client_values = info.get("client_fingerprint") if client_values and ( len(client_values) != 1 or not is_default_fingerprint_var(client_values[0]) ): rv["client_values"] = client_values + matched_rule = info.get("matched_rule") if matched_rule: - rule = Rule.from_json(matched_rule) - rv["matched_rule"] = rule.text + # TODO: Before late October 2024, we didn't store the rule text along with the matched rule, + # meaning there are still events out there whose `_fingerprint_info` entry doesn't have it. + # Once those events have aged out (in February or so), we can remove the default value here + # and the `test_old_event_with_no_fingerprint_rule_text` test in `test_variants.py`. + rv["matched_rule"] = matched_rule.get("text", FingerprintRule.from_json(matched_rule).text) return rv @@ -159,7 +183,7 @@ class CustomFingerprintVariant(BaseVariant): type = "custom_fingerprint" - def __init__(self, values, fingerprint_info=None): + def __init__(self, values: list[str], fingerprint_info: FingerprintInfo): self.values = values self.info = fingerprint_info @@ -170,7 +194,7 @@ def description(self): def get_hash(self) -> str | None: return hash_from_values(self.values) - def _get_metadata_as_dict(self): + def _get_metadata_as_dict(self) -> FingerprintVariantMetadata: return expose_fingerprint_dict(self.values, self.info) @@ -189,7 +213,13 @@ class SaltedComponentVariant(ComponentVariant): type = "salted_component" - def __init__(self, values, component, config, fingerprint_info=None): + def __init__( + self, + values: list[str], + component: AppGroupingComponent | SystemGroupingComponent | DefaultGroupingComponent, + config: StrategyConfiguration, + fingerprint_info: FingerprintInfo, + ): ComponentVariant.__init__(self, component, config) self.values = values self.info = fingerprint_info @@ -201,7 +231,7 @@ def description(self): def get_hash(self) -> str | None: if not self.component.contributes: return None - final_values = [] + final_values: list[str | int] = [] for value in self.values: if is_default_fingerprint_var(value): final_values.extend(self.component.iter_values()) @@ -213,3 +243,14 @@ def _get_metadata_as_dict(self): rv = ComponentVariant._get_metadata_as_dict(self) rv.update(expose_fingerprint_dict(self.values, self.info)) return rv + + +class VariantsByDescriptor(TypedDict, total=False): + system: ComponentVariant + app: ComponentVariant + custom_fingerprint: CustomFingerprintVariant + built_in_fingerprint: BuiltInFingerprintVariant + checksum: ChecksumVariant + hashed_checksum: HashedChecksumVariant + default: ComponentVariant + fallback: FallbackVariant diff --git a/src/sentry/hybridcloud/migrations/0003_add_scopes_to_api_key_replica.py b/src/sentry/hybridcloud/migrations/0003_add_scopes_to_api_key_replica.py index 320d680a87853f..092ed71bbc5cc9 100644 --- a/src/sentry/hybridcloud/migrations/0003_add_scopes_to_api_key_replica.py +++ b/src/sentry/hybridcloud/migrations/0003_add_scopes_to_api_key_replica.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("hybridcloud", "0002_add_slug_reservation_replica_model"), ] diff --git a/src/sentry/hybridcloud/migrations/0017_add_scoping_organization_apitokenreplica.py b/src/sentry/hybridcloud/migrations/0017_add_scoping_organization_apitokenreplica.py new file mode 100644 index 00000000000000..e70659d4095006 --- /dev/null +++ b/src/sentry/hybridcloud/migrations/0017_add_scoping_organization_apitokenreplica.py @@ -0,0 +1,36 @@ +# Generated by Django 5.1.1 on 2024-11-22 22:03 + +from django.db import migrations + +import sentry.db.models.fields.hybrid_cloud_foreign_key +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("hybridcloud", "0016_add_control_cacheversion"), + ] + + operations = [ + migrations.AddField( + model_name="apitokenreplica", + name="scoping_organization_id", + field=sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.Organization", db_index=True, null=True, on_delete="CASCADE" + ), + ), + ] diff --git a/src/sentry/hybridcloud/models/apitokenreplica.py b/src/sentry/hybridcloud/models/apitokenreplica.py index 21e3982dc808c5..d9ee2be13040dc 100644 --- a/src/sentry/hybridcloud/models/apitokenreplica.py +++ b/src/sentry/hybridcloud/models/apitokenreplica.py @@ -24,6 +24,9 @@ class ApiTokenReplica(Model, HasApiScopes): expires_at = models.DateTimeField(null=True) allowed_origins = models.TextField(blank=True, null=True) date_added = models.DateTimeField(default=timezone.now) + scoping_organization_id = HybridCloudForeignKey( + "sentry.Organization", null=True, on_delete="CASCADE" + ) class Meta: app_label = "hybridcloud" diff --git a/src/sentry/hybridcloud/rpc/caching/__init__.py b/src/sentry/hybridcloud/rpc/caching/__init__.py index 11122ac4f444fa..9774c31c977145 100644 --- a/src/sentry/hybridcloud/rpc/caching/__init__.py +++ b/src/sentry/hybridcloud/rpc/caching/__init__.py @@ -1,13 +1,15 @@ from .service import ( back_with_silo_cache, + back_with_silo_cache_list, back_with_silo_cache_many, control_caching_service, region_caching_service, ) __all__ = ( - "back_with_silo_cache_many", "back_with_silo_cache", + "back_with_silo_cache_list", + "back_with_silo_cache_many", "region_caching_service", "control_caching_service", ) diff --git a/src/sentry/hybridcloud/rpc/caching/service.py b/src/sentry/hybridcloud/rpc/caching/service.py index acf347c848856d..1638cc1f381953 100644 --- a/src/sentry/hybridcloud/rpc/caching/service.py +++ b/src/sentry/hybridcloud/rpc/caching/service.py @@ -43,6 +43,8 @@ class SiloCacheBackedCallable(Generic[_R]): When cache read returns no data, the wrapped function will be invoked. The result of the wrapped function is then stored in cache. + + Ideal for 'get by id' style methods """ silo_mode: SiloMode @@ -107,12 +109,87 @@ def get_one(self, object_id: int) -> _R | None: return _consume_generator(self.resolve_from(object_id, values)) +class SiloCacheBackedListCallable(Generic[_R]): + """ + Get a list of results from cache or wrapped function. + + When cache read returns no data, the wrapped function will be + invoked. The result of the wrapped function is then stored in cache. + + Ideal for 'get many X for organization' style methods + """ + + silo_mode: SiloMode + base_key: str + cb: Callable[[int], list[_R]] + type_: type[_R] + timeout: int | None + + def __init__( + self, + base_key: str, + silo_mode: SiloMode, + cb: Callable[[int], list[_R]], + t: type[_R], + timeout: int | None = None, + ): + self.base_key = base_key + self.silo_mode = silo_mode + self.cb = cb + self.type_ = t + self.timeout = timeout + + def __call__(self, object_id: int) -> list[_R]: + if ( + SiloMode.get_current_mode() != self.silo_mode + and SiloMode.get_current_mode() != SiloMode.MONOLITH + ): + return self.cb(object_id) + return self.get_results(object_id) + + def key_from(self, object_id: int) -> str: + return f"{self.base_key}:{object_id}" + + def resolve_from( + self, object_id: int, values: Mapping[str, int | str] + ) -> Generator[None, None, list[_R]]: + from .impl import _consume_generator, _delete_cache, _set_cache + + key = self.key_from(object_id) + value = values[key] + version: int + if isinstance(value, str): + try: + metrics.incr("hybridcloud.caching.list.cached", tags={"base_key": self.base_key}) + return [self.type_(**item) for item in json.loads(value)] + except (pydantic.ValidationError, JSONDecodeError, TypeError): + version = yield from _delete_cache(key, self.silo_mode) + else: + version = value + + metrics.incr("hybridcloud.caching.list.rpc", tags={"base_key": self.base_key}) + result = self.cb(object_id) + if result is not None: + cache_value = json.dumps([item.json() for item in result]) + _consume_generator(_set_cache(key, cache_value, version, self.timeout)) + return result + + def get_results(self, object_id: int) -> list[_R]: + from .impl import _consume_generator, _get_cache + + key = self.key_from(object_id) + values = _consume_generator(_get_cache([key], self.silo_mode)) + return _consume_generator(self.resolve_from(object_id, values)) + + class SiloCacheManyBackedCallable(Generic[_R]): """ Get a multiple records from cache or wrapped function. When cache read returns no or partial data, the wrapped function will be invoked with keys missing data. The result of the wrapped function will then be stored in cache. + + Ideal for 'get many by id' style methods. """ silo_mode: SiloMode @@ -238,6 +315,30 @@ def wrapper(cb: Callable[[list[int]], list[_R]]) -> "SiloCacheManyBackedCallable return wrapper +def back_with_silo_cache_list( + base_key: str, silo_mode: SiloMode, t: type[_R], timeout: int | None = None +) -> Callable[[Callable[[int], list[_R]]], "SiloCacheBackedListCallable[_R]"]: + """ + Decorator for adding local caching to RPC operations for list results + + This decorator can be applied to RPC methods that fetch a list of results + based on a single input id. This works well with methods that get a list + of results based on an organization or user id. + + If the cache read for the id value fails, the decorated function will be called and + its result will be stored in cache. The decorator also adds method on the wrapped + function for generating keys to clear cache entires with + with region_caching_service and control_caching_service. + + See app_service.installations_for_organization() for an example usage. + """ + + def wrapper(cb: Callable[[int], list[_R]]) -> "SiloCacheBackedListCallable[_R]": + return SiloCacheBackedListCallable(base_key, silo_mode, cb, t, timeout) + + return wrapper + + region_caching_service = RegionCachingService.create_delegation() diff --git a/src/sentry/hybridcloud/services/replica/impl.py b/src/sentry/hybridcloud/services/replica/impl.py index ea45a5d32fd3cf..25d5936ebc740f 100644 --- a/src/sentry/hybridcloud/services/replica/impl.py +++ b/src/sentry/hybridcloud/services/replica/impl.py @@ -163,6 +163,7 @@ def upsert_replicated_api_token(self, *, api_token: RpcApiToken, region_name: st "\n".join(api_token.allowed_origins) if api_token.allowed_origins else None ), user_id=api_token.user_id, + scoping_organization_id=api_token.scoping_organization_id, ) handle_replication(ApiToken, destination) diff --git a/src/sentry/identity/github_enterprise/provider.py b/src/sentry/identity/github_enterprise/provider.py index c23da6e32b3114..1f4abb0b42aca4 100644 --- a/src/sentry/identity/github_enterprise/provider.py +++ b/src/sentry/identity/github_enterprise/provider.py @@ -1,42 +1,13 @@ -from django.core.exceptions import PermissionDenied +from typing import NoReturn -from sentry import http from sentry.identity.oauth2 import OAuth2Provider -def get_user_info(url, access_token): - with http.build_session() as session: - resp = session.get( - f"https://{url}/api/v3/user", - headers={ - "Accept": "application/vnd.github.machine-man-preview+json", - "Authorization": f"token {access_token}", - }, - verify=False, - ) - resp.raise_for_status() - return resp.json() - - class GitHubEnterpriseIdentityProvider(OAuth2Provider): key = "github_enterprise" name = "GitHub Enterprise" oauth_scopes = () - def build_identity(self, data): - data = data["data"] - access_token = data.get("access_token") - if not access_token: - raise PermissionDenied() - - # todo(meredith): this doesn't work yet, need to pass in the base url - user = get_user_info(access_token) - - return { - "type": "github_enterprise", - "id": user["id"], - "email": user["email"], - "scopes": [], # GitHub apps do not have user scopes - "data": self.get_oauth_data(data), - } + def build_identity(self, data: object) -> NoReturn: + raise NotImplementedError diff --git a/src/sentry/identity/oauth2.py b/src/sentry/identity/oauth2.py index 5eaad9e3a39b13..f7e7b521956c0b 100644 --- a/src/sentry/identity/oauth2.py +++ b/src/sentry/identity/oauth2.py @@ -322,7 +322,7 @@ def exchange_token(self, request: Request, pipeline, code): "identity.oauth2.ssl-error", extra={"url": self.access_token_url, "verify_ssl": verify_ssl}, ) - lifecycle.record_failure({"failure_reason": "ssl_error"}) + lifecycle.record_failure("ssl_error") url = self.access_token_url return { "error": "Could not verify SSL certificate", @@ -331,14 +331,14 @@ def exchange_token(self, request: Request, pipeline, code): except ConnectionError: url = self.access_token_url logger.info("identity.oauth2.connection-error", extra={"url": url}) - lifecycle.record_failure({"failure_reason": "connection_error"}) + lifecycle.record_failure("connection_error") return { "error": "Could not connect to host or service", "error_description": f"Ensure that {url} is open to connections", } except orjson.JSONDecodeError: logger.info("identity.oauth2.json-error", extra={"url": self.access_token_url}) - lifecycle.record_failure({"failure_reason": "json_error"}) + lifecycle.record_failure("json_error") return { "error": "Could not decode a JSON Response", "error_description": "We were not able to parse a JSON response, please try again.", @@ -355,9 +355,9 @@ def dispatch(self, request: Request, pipeline) -> HttpResponse: if error: pipeline.logger.info("identity.token-exchange-error", extra={"error": error}) lifecycle.record_failure( - {"failure_reason": "token_exchange_error", "msg": ERR_INVALID_STATE} + "token_exchange_error", extra={"failure_info": ERR_INVALID_STATE} ) - return pipeline.error(ERR_INVALID_STATE) + return pipeline.error(f"{ERR_INVALID_STATE}\nError: {error}") if state != pipeline.fetch_state("state"): pipeline.logger.info( @@ -370,7 +370,7 @@ def dispatch(self, request: Request, pipeline) -> HttpResponse: }, ) lifecycle.record_failure( - {"failure_reason": "token_exchange_error", "msg": ERR_INVALID_STATE} + "token_exchange_error", extra={"failure_info": ERR_INVALID_STATE} ) return pipeline.error(ERR_INVALID_STATE) @@ -384,7 +384,7 @@ def dispatch(self, request: Request, pipeline) -> HttpResponse: if "error" in data: pipeline.logger.info("identity.token-exchange-error", extra={"error": data["error"]}) - return pipeline.error(ERR_TOKEN_RETRIEVAL) + return pipeline.error(f"{ERR_TOKEN_RETRIEVAL}\nError: {data['error']}") # we can either expect the API to be implicit and say "im looking for # blah within state data" or we need to pass implementation + call a diff --git a/src/sentry/identity/slack/provider.py b/src/sentry/identity/slack/provider.py index 35420b37a4b887..faf99532e86335 100644 --- a/src/sentry/identity/slack/provider.py +++ b/src/sentry/identity/slack/provider.py @@ -78,12 +78,8 @@ class SlackOAuth2LoginView(OAuth2LoginView): user_scope = "" - def __init__( - self, authorize_url=None, client_id=None, scope=None, user_scope=None, *args, **kwargs - ): - super().__init__( - authorize_url=authorize_url, client_id=client_id, scope=scope, *args, **kwargs - ) + def __init__(self, authorize_url=None, client_id=None, scope=None, user_scope=None): + super().__init__(authorize_url=authorize_url, client_id=client_id, scope=scope) if user_scope is not None: self.user_scope = user_scope diff --git a/src/sentry/identity/vsts/provider.py b/src/sentry/identity/vsts/provider.py index bc17268b06d4a1..6cd3cfe481ed70 100644 --- a/src/sentry/identity/vsts/provider.py +++ b/src/sentry/identity/vsts/provider.py @@ -3,7 +3,8 @@ from rest_framework.request import Request from sentry import http, options -from sentry.identity.oauth2 import OAuth2CallbackView, OAuth2LoginView, OAuth2Provider +from sentry.identity.oauth2 import OAuth2CallbackView, OAuth2LoginView, OAuth2Provider, record_event +from sentry.integrations.utils.metrics import IntegrationPipelineViewType from sentry.utils.http import absolute_uri @@ -120,21 +121,27 @@ def exchange_token(self, request: Request, pipeline, code): from sentry.http import safe_urlopen, safe_urlread from sentry.utils.http import absolute_uri - req = safe_urlopen( - url=self.access_token_url, - headers={"Content-Type": "application/x-www-form-urlencoded", "Content-Length": "1322"}, - data={ - "client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer", - "client_assertion": self.client_secret, - "grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer", - "assertion": code, - "redirect_uri": absolute_uri(pipeline.redirect_url()), - }, - ) - body = safe_urlread(req) - if req.headers["Content-Type"].startswith("application/x-www-form-urlencoded"): - return dict(parse_qsl(body)) - return orjson.loads(body) + with record_event( + IntegrationPipelineViewType.TOKEN_EXCHANGE, pipeline.provider.key + ).capture(): + req = safe_urlopen( + url=self.access_token_url, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Content-Length": "1322", + }, + data={ + "client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer", + "client_assertion": self.client_secret, + "grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer", + "assertion": code, + "redirect_uri": absolute_uri(pipeline.redirect_url()), + }, + ) + body = safe_urlread(req) + if req.headers["Content-Type"].startswith("application/x-www-form-urlencoded"): + return dict(parse_qsl(body)) + return orjson.loads(body) # TODO(iamrajjoshi): Make this the default provider @@ -232,18 +239,24 @@ def exchange_token(self, request: Request, pipeline, code): from sentry.http import safe_urlopen, safe_urlread from sentry.utils.http import absolute_uri - req = safe_urlopen( - url=self.access_token_url, - headers={"Content-Type": "application/x-www-form-urlencoded", "Content-Length": "1322"}, - data={ - "grant_type": "authorization_code", - "client_id": self.client_id, - "client_secret": self.client_secret, - "code": code, - "redirect_uri": absolute_uri(pipeline.redirect_url()), - }, - ) - body = safe_urlread(req) - if req.headers["Content-Type"].startswith("application/x-www-form-urlencoded"): - return dict(parse_qsl(body)) - return orjson.loads(body) + with record_event( + IntegrationPipelineViewType.TOKEN_EXCHANGE, pipeline.provider.key + ).capture(): + req = safe_urlopen( + url=self.access_token_url, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Content-Length": "1322", + }, + data={ + "grant_type": "authorization_code", + "client_id": self.client_id, + "client_secret": self.client_secret, + "code": code, + "redirect_uri": absolute_uri(pipeline.redirect_url()), + }, + ) + body = safe_urlread(req) + if req.headers["Content-Type"].startswith("application/x-www-form-urlencoded"): + return dict(parse_qsl(body)) + return orjson.loads(body) diff --git a/src/sentry/incidents/charts.py b/src/sentry/incidents/charts.py index b9797c5cce07c6..4d352763137508 100644 --- a/src/sentry/incidents/charts.py +++ b/src/sentry/incidents/charts.py @@ -266,7 +266,7 @@ def build_metric_alert_chart( query_str = build_query_strings(subscription=subscription, snuba_query=snuba_query).query_string query = ( query_str - if is_crash_free_alert + if is_crash_free_alert or dataset == Dataset.EventsAnalyticsPlatform else apply_dataset_query_conditions( SnubaQuery.Type(snuba_query.type), query_str, @@ -292,6 +292,11 @@ def build_metric_alert_chart( else: if query_type == SnubaQuery.Type.PERFORMANCE and dataset == Dataset.PerformanceMetrics: query_params["dataset"] = "metrics" + elif ( + query_type == SnubaQuery.Type.PERFORMANCE and dataset == Dataset.EventsAnalyticsPlatform + ): + query_params["dataset"] = "spans" + query_params["useRpc"] = "1" elif query_type == SnubaQuery.Type.ERROR: query_params["dataset"] = "errors" else: diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_available_action_index.py b/src/sentry/incidents/endpoints/organization_alert_rule_available_action_index.py index dc4d4e3e607684..2d1d228e9fcd72 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_available_action_index.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_available_action_index.py @@ -119,7 +119,7 @@ def get(self, request: Request, organization) -> Response: # Add all alertable SentryApps to the list. elif registered_type.service_type == AlertRuleTriggerAction.Type.SENTRY_APP: - installs = app_service.get_installed_for_organization( + installs = app_service.installations_for_organization( organization_id=organization.id ) actions += [ diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_details.py b/src/sentry/incidents/endpoints/organization_alert_rule_details.py index aecdf6f86137c6..0aeea3a1fb0df0 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_details.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_details.py @@ -73,7 +73,7 @@ def update_alert_rule(request: Request, organization, alert_rule): "access": request.access, "user": request.user, "ip_address": request.META.get("REMOTE_ADDR"), - "installations": app_service.get_installed_for_organization( + "installations": app_service.installations_for_organization( organization_id=organization.id ), }, @@ -148,7 +148,7 @@ class OrganizationAlertRuleDetailsPutSerializer(serializers.Serializer): - `label`: One of `critical` or `warning`. A `critical` trigger is always required. - `alertThreshold`: The value that the subscription needs to reach to trigger the alert rule. -- `actions`: A list of actions that take place when the threshold is met. Set as an empty list if no actions are to take place. +- `actions`: A list of actions that take place when the threshold is met. ```json triggers: [ { @@ -212,9 +212,6 @@ class OrganizationAlertRuleDetailsPutSerializer(serializers.Serializer): owner = ActorField( required=False, allow_null=True, help_text="The ID of the team or user that owns the rule." ) - excludedProjects = serializers.ListField( - child=ProjectField(scope="project:read"), required=False - ) thresholdPeriod = serializers.IntegerField(required=False, default=1, min_value=1, max_value=20) monitorType = serializers.IntegerField( required=False, diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_index.py b/src/sentry/incidents/endpoints/organization_alert_rule_index.py index 0c6e64b83e5786..db45347717d093 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_index.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_index.py @@ -111,7 +111,7 @@ def create_metric_alert( "access": request.access, "user": request.user, "ip_address": request.META.get("REMOTE_ADDR"), - "installations": app_service.get_installed_for_organization( + "installations": app_service.installations_for_organization( organization_id=organization.id ), }, @@ -346,7 +346,7 @@ class OrganizationAlertRuleIndexPostSerializer(serializers.Serializer): - `label`: One of `critical` or `warning`. A `critical` trigger is always required. - `alertThreshold`: The value that the subscription needs to reach to trigger the alert rule. -- `actions`: A list of actions that take place when the threshold is met. Set as an empty list if no actions are to take place. +- `actions`: A list of actions that take place when the threshold is met. ```json triggers: [ { @@ -409,9 +409,6 @@ class OrganizationAlertRuleIndexPostSerializer(serializers.Serializer): owner = ActorField( required=False, allow_null=True, help_text="The ID of the team or user that owns the rule." ) - excludedProjects = serializers.ListField( - child=ProjectField(scope="project:read"), required=False - ) thresholdPeriod = serializers.IntegerField(required=False, default=1, min_value=1, max_value=20) monitorType = serializers.IntegerField( required=False, @@ -442,10 +439,14 @@ def check_can_create_alert(self, request: Request, organization: Organization) - permission, then we must verify that the user is a team admin with "alerts:write" access to the project(s) in their request. """ - # - if request.access.has_scope("alerts:write"): + # if the requesting user has any of these org-level permissions, then they can create an alert + if ( + request.access.has_scope("alerts:write") + or request.access.has_scope("org:admin") + or request.access.has_scope("org:write") + ): return - # team admins should be able to crete alerts for the projects they have access to + # team admins should be able to create alerts for the projects they have access to projects = self.get_projects(request, organization) # team admins will have alerts:write scoped to their projects, members will not team_admin_has_access = all( diff --git a/src/sentry/incidents/endpoints/organization_incident_activity_index.py b/src/sentry/incidents/endpoints/organization_incident_activity_index.py deleted file mode 100644 index 0437f5fa249a03..00000000000000 --- a/src/sentry/incidents/endpoints/organization_incident_activity_index.py +++ /dev/null @@ -1,33 +0,0 @@ -from rest_framework.request import Request -from rest_framework.response import Response - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases.incident import IncidentEndpoint, IncidentPermission -from sentry.api.paginator import OffsetPaginator -from sentry.api.serializers import serialize -from sentry.incidents.logic import get_incident_activity - - -@region_silo_endpoint -class OrganizationIncidentActivityIndexEndpoint(IncidentEndpoint): - owner = ApiOwner.ISSUES - publish_status = { - "GET": ApiPublishStatus.UNKNOWN, - } - permission_classes = (IncidentPermission,) - - def get(self, request: Request, organization, incident) -> Response: - if request.GET.get("desc", "1") == "1": - order_by = "-date_added" - else: - order_by = "date_added" - - return self.paginate( - request=request, - queryset=get_incident_activity(incident), - order_by=order_by, - paginator_cls=OffsetPaginator, - on_results=lambda x: serialize(x, request.user), - ) diff --git a/src/sentry/incidents/endpoints/organization_incident_comment_details.py b/src/sentry/incidents/endpoints/organization_incident_comment_details.py deleted file mode 100644 index fdfc47e8a31cfb..00000000000000 --- a/src/sentry/incidents/endpoints/organization_incident_comment_details.py +++ /dev/null @@ -1,88 +0,0 @@ -from rest_framework import serializers -from rest_framework.exceptions import PermissionDenied -from rest_framework.request import Request -from rest_framework.response import Response - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases.incident import IncidentEndpoint, IncidentPermission -from sentry.api.exceptions import ResourceDoesNotExist -from sentry.api.serializers import serialize -from sentry.incidents.models.incident import IncidentActivity, IncidentActivityType - - -class CommentSerializer(serializers.Serializer): - comment = serializers.CharField(required=True) - - -class CommentDetailsEndpoint(IncidentEndpoint): - def convert_args(self, request: Request, activity_id, *args, **kwargs): - # See GroupNotesDetailsEndpoint: - # We explicitly don't allow a request with an ApiKey - # since an ApiKey is bound to the Organization, not - # an individual. Not sure if we'd want to allow an ApiKey - # to delete/update other users' comments - if not request.user.is_authenticated: - raise PermissionDenied(detail="Key doesn't have permission to delete Note") - - args, kwargs = super().convert_args(request, *args, **kwargs) - - try: - # Superusers may mutate any comment - user_filter = {} if request.user.is_superuser else {"user_id": request.user.id} - - kwargs["activity"] = IncidentActivity.objects.get( - id=activity_id, - incident=kwargs["incident"], - # Only allow modifying comments - type=IncidentActivityType.COMMENT.value, - **user_filter, - ) - except IncidentActivity.DoesNotExist: - raise ResourceDoesNotExist - - return args, kwargs - - -@region_silo_endpoint -class OrganizationIncidentCommentDetailsEndpoint(CommentDetailsEndpoint): - owner = ApiOwner.ISSUES - publish_status = { - "DELETE": ApiPublishStatus.UNKNOWN, - "PUT": ApiPublishStatus.UNKNOWN, - } - permission_classes = (IncidentPermission,) - - def delete(self, request: Request, organization, incident, activity) -> Response: - """ - Delete a comment - ```````````````` - :auth: required - """ - - try: - activity.delete() - except IncidentActivity.DoesNotExist: - raise ResourceDoesNotExist - - return Response(status=204) - - def put(self, request: Request, organization, incident, activity) -> Response: - """ - Update an existing comment - `````````````````````````` - :auth: required - """ - - serializer = CommentSerializer(data=request.data) - if serializer.is_valid(): - result = serializer.validated_data - - try: - comment = activity.update(comment=result.get("comment")) - except IncidentActivity.DoesNotExist: - raise ResourceDoesNotExist - - return Response(serialize(comment, request.user), status=200) - return Response(serializer.errors, status=400) diff --git a/src/sentry/incidents/endpoints/organization_incident_comment_index.py b/src/sentry/incidents/endpoints/organization_incident_comment_index.py deleted file mode 100644 index b2a534ed598b23..00000000000000 --- a/src/sentry/incidents/endpoints/organization_incident_comment_index.py +++ /dev/null @@ -1,56 +0,0 @@ -from rest_framework import serializers -from rest_framework.request import Request -from rest_framework.response import Response - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases.incident import IncidentEndpoint, IncidentPermission -from sentry.api.fields.actor import ActorField -from sentry.api.serializers import serialize -from sentry.api.serializers.rest_framework.mentions import ( - MentionsMixin, - extract_user_ids_from_mentions, -) -from sentry.incidents.logic import create_incident_activity -from sentry.incidents.models.incident import IncidentActivityType -from sentry.users.services.user.serial import serialize_generic_user - - -class CommentSerializer(serializers.Serializer, MentionsMixin): - comment = serializers.CharField(required=True) - mentions = serializers.ListField(child=ActorField(), required=False) - external_id = serializers.CharField(allow_null=True, required=False) - - -@region_silo_endpoint -class OrganizationIncidentCommentIndexEndpoint(IncidentEndpoint): - owner = ApiOwner.ISSUES - publish_status = { - "POST": ApiPublishStatus.UNKNOWN, - } - permission_classes = (IncidentPermission,) - - def post(self, request: Request, organization, incident) -> Response: - serializer = CommentSerializer( - data=request.data, - context={ - "projects": incident.projects.all(), - "organization": organization, - "organization_id": organization.id, - }, - ) - if serializer.is_valid(): - mentions = extract_user_ids_from_mentions( - organization.id, serializer.validated_data.get("mentions", []) - ) - mentioned_user_ids = mentions["users"] | mentions["team_users"] - activity = create_incident_activity( - incident, - IncidentActivityType.COMMENT, - user=serialize_generic_user(request.user), - comment=serializer.validated_data["comment"], - mentioned_user_ids=mentioned_user_ids, - ) - return Response(serialize(activity, request.user), status=201) - return Response(serializer.errors, status=400) diff --git a/src/sentry/incidents/endpoints/organization_incident_details.py b/src/sentry/incidents/endpoints/organization_incident_details.py index 7edc9a96a8bd5c..20f217883b7d50 100644 --- a/src/sentry/incidents/endpoints/organization_incident_details.py +++ b/src/sentry/incidents/endpoints/organization_incident_details.py @@ -10,7 +10,6 @@ from sentry.incidents.endpoints.serializers.incident import DetailedIncidentSerializer from sentry.incidents.logic import update_incident_status from sentry.incidents.models.incident import IncidentStatus, IncidentStatusMethod -from sentry.users.services.user.serial import serialize_generic_user class IncidentSerializer(serializers.Serializer): @@ -56,8 +55,6 @@ def put(self, request: Request, organization, incident) -> Response: incident = update_incident_status( incident=incident, status=result["status"], - user=serialize_generic_user(request.user), - comment=result.get("comment"), status_method=IncidentStatusMethod.MANUAL, ) return Response( diff --git a/src/sentry/incidents/endpoints/organization_incident_seen.py b/src/sentry/incidents/endpoints/organization_incident_seen.py deleted file mode 100644 index 869596a422b021..00000000000000 --- a/src/sentry/incidents/endpoints/organization_incident_seen.py +++ /dev/null @@ -1,53 +0,0 @@ -from django.utils import timezone as django_timezone -from rest_framework.request import Request -from rest_framework.response import Response - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases.incident import IncidentEndpoint, IncidentPermission -from sentry.incidents.models.incident import Incident, IncidentProject, IncidentSeen -from sentry.models.organization import Organization -from sentry.users.services.user import RpcUser -from sentry.users.services.user.serial import serialize_generic_user - - -@region_silo_endpoint -class OrganizationIncidentSeenEndpoint(IncidentEndpoint): - owner = ApiOwner.ISSUES - publish_status = { - "POST": ApiPublishStatus.UNKNOWN, - } - permission_classes = (IncidentPermission,) - - def post(self, request: Request, organization: Organization, incident: Incident) -> Response: - """ - Mark an incident as seen by the user - ```````````````````````````````````` - - :auth: required - """ - - user = serialize_generic_user(request.user) - if user is not None: - _set_incident_seen(incident, user) - return Response({}, status=201) - - -def _set_incident_seen(incident: Incident, user: RpcUser) -> None: - """ - Updates the incident to be seen - """ - - def is_project_member() -> bool: - incident_projects = IncidentProject.objects.filter(incident=incident) - for incident_project in incident_projects.select_related("project"): - if incident_project.project.member_set.filter(user_id=user.id).exists(): - return True - return False - - is_org_member = incident.organization.has_access(user) - if is_org_member and is_project_member(): - IncidentSeen.objects.create_or_update( - incident=incident, user_id=user.id, values={"last_seen": django_timezone.now()} - ) diff --git a/src/sentry/incidents/endpoints/organization_incident_subscription_index.py b/src/sentry/incidents/endpoints/organization_incident_subscription_index.py deleted file mode 100644 index dd22eea9d5a32e..00000000000000 --- a/src/sentry/incidents/endpoints/organization_incident_subscription_index.py +++ /dev/null @@ -1,52 +0,0 @@ -from rest_framework.request import Request -from rest_framework.response import Response - -from sentry.api.api_owners import ApiOwner -from sentry.api.api_publish_status import ApiPublishStatus -from sentry.api.base import region_silo_endpoint -from sentry.api.bases.incident import IncidentEndpoint, IncidentPermission -from sentry.incidents.logic import subscribe_to_incident, unsubscribe_from_incident - - -class IncidentSubscriptionPermission(IncidentPermission): - scope_map = IncidentPermission.scope_map.copy() - scope_map["DELETE"] = [ - "org:write", - "org:admin", - "project:read", - "project:write", - "project:admin", - ] - - -@region_silo_endpoint -class OrganizationIncidentSubscriptionIndexEndpoint(IncidentEndpoint): - owner = ApiOwner.ISSUES - publish_status = { - "DELETE": ApiPublishStatus.UNKNOWN, - "POST": ApiPublishStatus.UNKNOWN, - } - permission_classes = (IncidentSubscriptionPermission,) - - def post(self, request: Request, organization, incident) -> Response: - """ - Subscribes the authenticated user to the incident. - `````````````````````````````````````````````````` - Subscribes the user to the incident. If they are already subscribed - then no-op. - :auth: required - """ - - subscribe_to_incident(incident, request.user.id) - return Response({}, status=201) - - def delete(self, request: Request, organization, incident) -> Response: - """ - Unsubscribes the authenticated user from the incident. - `````````````````````````````````````````````````````` - Unsubscribes the user from the incident. If they are not subscribed then - no-op. - :auth: required - """ - unsubscribe_from_incident(incident, request.user.id) - return Response({}, status=200) diff --git a/src/sentry/incidents/endpoints/serializers/alert_rule.py b/src/sentry/incidents/endpoints/serializers/alert_rule.py index 0ec511f596727c..9d6e31d3c65935 100644 --- a/src/sentry/incidents/endpoints/serializers/alert_rule.py +++ b/src/sentry/incidents/endpoints/serializers/alert_rule.py @@ -17,7 +17,6 @@ AlertRule, AlertRuleActivity, AlertRuleActivityType, - AlertRuleExcludedProjects, AlertRuleTrigger, AlertRuleTriggerAction, ) @@ -40,7 +39,6 @@ class AlertRuleSerializerResponseOptional(TypedDict, total=False): environment: str | None projects: list[str] | None - excludedProjects: list[dict] | None queryType: int | None resolveThreshold: float | None dataset: str | None @@ -63,8 +61,6 @@ class AlertRuleSerializerResponseOptional(TypedDict, total=False): "status", "resolution", "thresholdPeriod", - "includeAllProjects", - "excludedProjects", "weeklyAvg", "totalThisWeek", "latestIncident", @@ -89,7 +85,6 @@ class AlertRuleSerializerResponse(AlertRuleSerializerResponseOptional): resolution: float thresholdPeriod: int triggers: list[dict] - includeAllProjects: bool dateModified: datetime dateCreated: datetime createdBy: dict @@ -309,7 +304,6 @@ def serialize( "thresholdPeriod": obj.threshold_period, "triggers": attrs.get("triggers", []), "projects": sorted(attrs.get("projects", [])), - "includeAllProjects": obj.include_all_projects, "owner": attrs.get("owner", None), "originalAlertRuleId": attrs.get("originalAlertRuleId", None), "comparisonDelta": obj.comparison_delta / 60 if obj.comparison_delta else None, @@ -343,13 +337,6 @@ def get_attrs( self, item_list: Sequence[Any], user: User | RpcUser, **kwargs: Any ) -> defaultdict[AlertRule, Any]: result = super().get_attrs(item_list, user, **kwargs) - alert_rules = {item.id: item for item in item_list} - for alert_rule_id, project_slug in AlertRuleExcludedProjects.objects.filter( - alert_rule__in=item_list - ).values_list("alert_rule_id", "project__slug"): - exclusions = result[alert_rules[alert_rule_id]].setdefault("excluded_projects", []) - exclusions.append(project_slug) - query_to_alert_rule = {ar.snuba_query_id: ar for ar in item_list} for event_type in SnubaQueryEventType.objects.filter( @@ -366,7 +353,6 @@ def serialize( self, obj: AlertRule, attrs: Mapping[Any, Any], user: User | RpcUser, **kwargs ) -> AlertRuleSerializerResponse: data = super().serialize(obj, attrs, user) - data["excludedProjects"] = sorted(attrs.get("excluded_projects", [])) data["eventTypes"] = sorted(attrs.get("event_types", [])) data["snooze"] = False return data diff --git a/src/sentry/incidents/endpoints/serializers/alert_rule_trigger.py b/src/sentry/incidents/endpoints/serializers/alert_rule_trigger.py index c188c507c94349..66aca9a48658ae 100644 --- a/src/sentry/incidents/endpoints/serializers/alert_rule_trigger.py +++ b/src/sentry/incidents/endpoints/serializers/alert_rule_trigger.py @@ -5,11 +5,7 @@ from sentry.api.serializers import Serializer, register, serialize from sentry.incidents.endpoints.utils import translate_threshold -from sentry.incidents.models.alert_rule import ( - AlertRuleTrigger, - AlertRuleTriggerAction, - AlertRuleTriggerExclusion, -) +from sentry.incidents.models.alert_rule import AlertRuleTrigger, AlertRuleTriggerAction @register(AlertRuleTrigger) @@ -45,20 +41,3 @@ def serialize(self, obj, attrs, user, **kwargs): "dateCreated": obj.date_added, "actions": attrs.get("actions", []), } - - -class DetailedAlertRuleTriggerSerializer(AlertRuleTriggerSerializer): - def get_attrs(self, item_list, user, **kwargs): - triggers = {item.id: item for item in item_list} - result: dict[str, dict[str, list[str]]] = defaultdict(lambda: defaultdict(list)) - for trigger_id, project_slug in AlertRuleTriggerExclusion.objects.filter( - alert_rule_trigger__in=item_list - ).values_list("alert_rule_trigger_id", "query_subscription__project__slug"): - if project_slug is not None: - result[triggers[trigger_id]]["excludedProjects"].append(project_slug) - return result - - def serialize(self, obj, attrs, user, **kwargs): - data = super().serialize(obj, attrs, user, **kwargs) - data["excludedProjects"] = sorted(attrs.get("excludedProjects", [])) - return data diff --git a/src/sentry/incidents/endpoints/serializers/incident.py b/src/sentry/incidents/endpoints/serializers/incident.py index c2ad42429c2936..4df9bc2015ab85 100644 --- a/src/sentry/incidents/endpoints/serializers/incident.py +++ b/src/sentry/incidents/endpoints/serializers/incident.py @@ -4,13 +4,7 @@ from sentry.api.serializers import Serializer, register, serialize from sentry.incidents.endpoints.serializers.alert_rule import AlertRuleSerializer -from sentry.incidents.models.incident import ( - Incident, - IncidentActivity, - IncidentProject, - IncidentSeen, - IncidentSubscription, -) +from sentry.incidents.models.incident import Incident, IncidentActivity, IncidentProject from sentry.snuba.entity_subscription import apply_dataset_query_conditions from sentry.snuba.models import SnubaQuery @@ -45,21 +39,6 @@ def get_attrs(self, item_list, user, **kwargs): serialize(incident.activation) if incident.activation else [] ) - if "seen_by" in self.expand: - incident_seen_list = list( - IncidentSeen.objects.filter(incident__in=item_list).order_by("-last_seen") - ) - incident_seen_dict = defaultdict(list) - for incident_seen, serialized_seen_by in zip( - incident_seen_list, serialize(incident_seen_list) - ): - incident_seen_dict[incident_seen.incident_id].append(serialized_seen_by) - for incident in item_list: - seen_by = incident_seen_dict[incident.id] - has_seen = any(seen for seen in seen_by if seen["id"] == str(user.id)) - results[incident]["seen_by"] = seen_by - results[incident]["has_seen"] = has_seen # type: ignore[assignment] - if "activities" in self.expand: # There could be many activities. An incident could seesaw between error/warning for a long period. # e.g - every 1 minute for 10 months @@ -81,8 +60,6 @@ def serialize(self, obj, attrs, user, **kwargs): "projects": attrs["projects"], "alertRule": attrs["alert_rule"], "activities": attrs["activities"] if "activities" in self.expand else None, - "seenBy": attrs["seen_by"] if "seen_by" in self.expand else None, - "hasSeen": attrs["has_seen"] if "seen_by" in self.expand else None, "status": obj.status, "statusMethod": obj.status_method, "type": obj.type, @@ -99,29 +76,12 @@ class DetailedIncidentSerializer(IncidentSerializer): def __init__(self, expand=None): if expand is None: expand = [] - if "seen_by" not in expand: - expand.append("seen_by") if "original_alert_rule" not in expand: expand.append("original_alert_rule") super().__init__(expand=expand) - def get_attrs(self, item_list, user, **kwargs): - results = super().get_attrs(item_list, user=user, **kwargs) - subscribed_incidents = set() - if user.is_authenticated: - subscribed_incidents = set( - IncidentSubscription.objects.filter( - incident__in=item_list, user_id=user.id - ).values_list("incident_id", flat=True) - ) - - for item in item_list: - results[item]["is_subscribed"] = item.id in subscribed_incidents - return results - def serialize(self, obj, attrs, user, **kwargs): context = super().serialize(obj, attrs, user) - context["isSubscribed"] = attrs["is_subscribed"] # The query we should use to get accurate results in Discover. context["discoverQuery"] = self._build_discover_query(obj) diff --git a/src/sentry/incidents/events.py b/src/sentry/incidents/events.py index 0c12286a8a7375..03a00e89049097 100644 --- a/src/sentry/incidents/events.py +++ b/src/sentry/incidents/events.py @@ -21,14 +21,5 @@ class IncidentStatusUpdatedEvent(BaseIncidentEvent): ) -class IncidentCommentCreatedEvent(BaseIncidentEvent): - type = "incident.comment" - attributes = BaseIncidentEvent.attributes + ( - analytics.Attribute("user_id", required=False), - analytics.Attribute("activity_id", required=False), - ) - - analytics.register(IncidentCreatedEvent) analytics.register(IncidentStatusUpdatedEvent) -analytics.register(IncidentCommentCreatedEvent) diff --git a/src/sentry/incidents/grouptype.py b/src/sentry/incidents/grouptype.py index 6973a4b8ed5c82..b7078c7c2f7a40 100644 --- a/src/sentry/incidents/grouptype.py +++ b/src/sentry/incidents/grouptype.py @@ -5,7 +5,7 @@ from sentry.issues.grouptype import GroupCategory, GroupType from sentry.ratelimits.sliding_windows import Quota from sentry.types.group import PriorityLevel -from sentry.workflow_engine.processors.detector import StatefulDetectorHandler +from sentry.workflow_engine.handlers.detector import StatefulDetectorHandler class MetricAlertDetectorHandler(StatefulDetectorHandler[QuerySubscriptionUpdate]): diff --git a/src/sentry/incidents/logic.py b/src/sentry/incidents/logic.py index b5984b75504ce6..a59fc409354300 100644 --- a/src/sentry/incidents/logic.py +++ b/src/sentry/incidents/logic.py @@ -49,23 +49,24 @@ IncidentProject, IncidentStatus, IncidentStatusMethod, - IncidentSubscription, IncidentTrigger, IncidentType, TriggerStatus, ) from sentry.integrations.services.integration import RpcIntegration, integration_service from sentry.models.environment import Environment -from sentry.models.notificationaction import ActionService, ActionTarget from sentry.models.organization import Organization from sentry.models.project import Project +from sentry.notifications.models.notificationaction import ActionService, ActionTarget from sentry.relay.config.metric_extraction import on_demand_metrics_feature_flags +from sentry.search.eap.types import SearchResolverConfig from sentry.search.events.builder.base import BaseQueryBuilder from sentry.search.events.constants import ( METRICS_LAYER_UNSUPPORTED_TRANSACTION_METRICS_FUNCTIONS, SPANS_METRICS_FUNCTIONS, ) from sentry.search.events.fields import is_function, resolve_field +from sentry.search.events.types import SnubaParams from sentry.seer.anomaly_detection.delete_rule import delete_rule_in_seer from sentry.seer.anomaly_detection.store_data import send_new_rule_data, update_rule_data from sentry.sentry_apps.services.app import RpcSentryAppInstallation, app_service @@ -74,7 +75,8 @@ DuplicateDisplayNameError, IntegrationError, ) -from sentry.snuba.dataset import Dataset +from sentry.snuba import spans_rpc +from sentry.snuba.dataset import Dataset, EntityKey from sentry.snuba.entity_subscription import ( ENTITY_TIME_COLUMNS, EntitySubscription, @@ -85,6 +87,7 @@ from sentry.snuba.metrics.extraction import should_use_on_demand_metrics from sentry.snuba.metrics.naming_layer.mri import get_available_operations, is_mri, parse_mri from sentry.snuba.models import QuerySubscription, SnubaQuery, SnubaQueryEventType +from sentry.snuba.referrer import Referrer from sentry.snuba.subscriptions import ( bulk_delete_snuba_subscriptions, bulk_disable_snuba_subscriptions, @@ -94,6 +97,7 @@ ) from sentry.tasks.relay import schedule_invalidate_project_config from sentry.types.actor import Actor +from sentry.users.models.user import User from sentry.users.services.user import RpcUser from sentry.utils import metrics from sentry.utils.audit import create_audit_entry_from_user @@ -194,8 +198,6 @@ def create_incident( def update_incident_status( incident: Incident, status: IncidentStatus, - user: RpcUser | None = None, - comment: str | None = None, status_method: IncidentStatusMethod = IncidentStatusMethod.RULE_TRIGGERED, date_closed: datetime | None = None, ) -> Incident: @@ -211,13 +213,9 @@ def update_incident_status( create_incident_activity( incident, IncidentActivityType.STATUS_CHANGE, - user=user, value=status.value, previous_value=incident.status, - comment=comment, ) - if user: - subscribe_to_incident(incident, user.id) prev_status = incident.status kwargs: dict[str, Any] = {"status": status.value, "status_method": status_method.value} @@ -252,15 +250,11 @@ def update_incident_status( def create_incident_activity( incident: Incident, activity_type: IncidentActivityType, - user: RpcUser | None = None, + user: RpcUser | User | None = None, value: str | int | None = None, previous_value: str | int | None = None, - comment: str | None = None, - mentioned_user_ids: Collection[int] = (), date_added: datetime | None = None, ) -> IncidentActivity: - if activity_type == IncidentActivityType.COMMENT and user: - subscribe_to_incident(incident, user.id) value = str(value) if value is not None else None previous_value = str(previous_value) if previous_value is not None else None kwargs = {} @@ -272,40 +266,9 @@ def create_incident_activity( user_id=user.id if user else None, value=value, previous_value=previous_value, - comment=comment, notification_uuid=uuid4(), **kwargs, ) - - if mentioned_user_ids: - user_ids_to_subscribe = set(mentioned_user_ids) - set( - IncidentSubscription.objects.filter( - incident=incident, user_id__in=mentioned_user_ids - ).values_list("user_id", flat=True) - ) - if user_ids_to_subscribe: - IncidentSubscription.objects.bulk_create( - [ - IncidentSubscription(incident=incident, user_id=mentioned_user_id) - for mentioned_user_id in user_ids_to_subscribe - ] - ) - transaction.on_commit( - lambda: tasks.send_subscriber_notifications.apply_async( - kwargs={"activity_id": activity.id}, countdown=10 - ), - router.db_for_write(IncidentSubscription), - ) - if activity_type == IncidentActivityType.COMMENT: - analytics.record( - "incident.comment", - incident_id=incident.id, - organization_id=incident.organization_id, - incident_type=incident.type, - user_id=user.id if user else None, - activity_id=activity.id, - ) - return activity @@ -416,36 +379,64 @@ def get_incident_aggregates( snuba_query, incident.organization_id, ) - query_builder = _build_incident_query_builder( - incident, entity_subscription, start, end, windowed_stats - ) - try: - results = query_builder.run_query(referrer="incidents.get_incident_aggregates") - except Exception: - metrics.incr( - "incidents.get_incident_aggregates.snql.query.error", - tags={ - "dataset": snuba_query.dataset, - "entity": get_entity_key_from_query_builder(query_builder).value, - }, + if entity_subscription.dataset == Dataset.EventsAnalyticsPlatform: + start, end = _calculate_incident_time_range( + incident, start, end, windowed_stats=windowed_stats ) - raise - - aggregated_result = entity_subscription.aggregate_query_results(results["data"], alias="count") - return aggregated_result[0] + project_ids = list( + IncidentProject.objects.filter(incident=incident).values_list("project_id", flat=True) + ) -def subscribe_to_incident(incident: Incident, user_id: int) -> IncidentSubscription: - subscription, _ = IncidentSubscription.objects.get_or_create(incident=incident, user_id=user_id) - return subscription - + params = SnubaParams( + environments=[snuba_query.environment], + projects=[Project.objects.get_from_cache(id=project_id) for project_id in project_ids], + organization=Organization.objects.get_from_cache(id=incident.organization_id), + start=start, + end=end, + ) -def unsubscribe_from_incident(incident: Incident, user_id: int) -> None: - IncidentSubscription.objects.filter(incident=incident, user_id=user_id).delete() + try: + results = spans_rpc.run_table_query( + params, + query_string=snuba_query.query, + selected_columns=[entity_subscription.aggregate], + orderby=None, + offset=0, + limit=1, + referrer=Referrer.API_ALERTS_ALERT_RULE_CHART.value, + config=SearchResolverConfig( + auto_fields=True, + ), + ) + except Exception: + metrics.incr( + "incidents.get_incident_aggregates.snql.query.error", + tags={ + "dataset": snuba_query.dataset, + "entity": EntityKey.EAPSpans.value, + }, + ) + raise + else: + query_builder = _build_incident_query_builder( + incident, entity_subscription, start, end, windowed_stats + ) + try: + results = query_builder.run_query(referrer="incidents.get_incident_aggregates") + except Exception: + metrics.incr( + "incidents.get_incident_aggregates.snql.query.error", + tags={ + "dataset": snuba_query.dataset, + "entity": get_entity_key_from_query_builder(query_builder).value, + }, + ) + raise -def get_incident_subscribers(incident: Incident) -> Iterable[IncidentSubscription]: - return IncidentSubscription.objects.filter(incident=incident) + aggregated_result = entity_subscription.aggregate_query_results(results["data"], alias="count") + return aggregated_result[0] def get_incident_activity(incident: Incident) -> Iterable[IncidentActivity]: @@ -671,7 +662,7 @@ def create_alert_rule( return alert_rule -def snapshot_alert_rule(alert_rule: AlertRule, user: RpcUser | None = None) -> None: +def snapshot_alert_rule(alert_rule: AlertRule, user: RpcUser | User | None = None) -> None: def nullify_id(model: Model) -> None: """Set the id field to null. @@ -1638,7 +1629,7 @@ def _get_alert_rule_trigger_action_sentry_app( from sentry.sentry_apps.services.app import app_service if installations is None: - installations = app_service.get_installed_for_organization(organization_id=organization.id) + installations = app_service.installations_for_organization(organization_id=organization.id) for installation in installations: if installation.sentry_app.id == sentry_app_id: @@ -1850,7 +1841,7 @@ def get_slack_actions_with_async_lookups( "access": SystemAccess(), "user": user, "input_channel_id": action.get("inputChannelId"), - "installations": app_service.get_installed_for_organization( + "installations": app_service.installations_for_organization( organization_id=organization.id ), }, diff --git a/src/sentry/incidents/models/alert_rule.py b/src/sentry/incidents/models/alert_rule.py index 1493ff50c56bc6..75eb7696b6c8f6 100644 --- a/src/sentry/incidents/models/alert_rule.py +++ b/src/sentry/incidents/models/alert_rule.py @@ -32,10 +32,14 @@ from sentry.incidents.models.incident import Incident, IncidentStatus, IncidentTrigger from sentry.incidents.utils.constants import INCIDENTS_SNUBA_SUBSCRIPTION_TYPE from sentry.incidents.utils.types import AlertRuleActivationConditionType -from sentry.models.notificationaction import AbstractNotificationAction, ActionService, ActionTarget from sentry.models.organization import Organization from sentry.models.project import Project from sentry.models.team import Team +from sentry.notifications.models.notificationaction import ( + AbstractNotificationAction, + ActionService, + ActionTarget, +) from sentry.seer.anomaly_detection.delete_rule import delete_rule_in_seer from sentry.snuba.models import QuerySubscription from sentry.snuba.subscriptions import bulk_create_snuba_subscriptions, delete_snuba_subscription @@ -237,26 +241,6 @@ def conditionally_subscribe_project_to_alert_rules( return [] -@region_silo_model -class AlertRuleExcludedProjects(Model): - """ - Excludes a specific project from an AlertRule - - NOTE: This feature is not currently utilized. - """ - - __relocation_scope__ = RelocationScope.Organization - - alert_rule = FlexibleForeignKey("sentry.AlertRule", db_index=False) - project = FlexibleForeignKey("sentry.Project", db_constraint=False) - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_alertruleexcludedprojects" - unique_together = (("alert_rule", "project"),) - - @region_silo_model class AlertRuleProjects(Model): """ @@ -297,15 +281,6 @@ class AlertRule(Model): user_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete="SET_NULL") team = FlexibleForeignKey("sentry.Team", null=True, on_delete=models.SET_NULL) - - excluded_projects = models.ManyToManyField( - "sentry.Project", related_name="alert_rule_exclusions", through=AlertRuleExcludedProjects - ) # NOTE: This feature is not currently utilized. - # Determines whether we include all current and future projects from this - # organization in this rule. - include_all_projects = models.BooleanField( - default=False - ) # NOTE: This feature is not currently utilized. name = models.TextField() status = models.SmallIntegerField(default=AlertRuleStatus.PENDING.value) threshold_type = models.SmallIntegerField(null=True) @@ -478,24 +453,6 @@ class Meta: unique_together = (("alert_rule", "label"),) -@region_silo_model -class AlertRuleTriggerExclusion(Model): - """ - Allows us to define a specific trigger to be excluded from a query subscription - """ - - __relocation_scope__ = RelocationScope.Organization - - alert_rule_trigger = FlexibleForeignKey("sentry.AlertRuleTrigger", related_name="exclusions") - query_subscription = FlexibleForeignKey("sentry.QuerySubscription") - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_alertruletriggerexclusion" - unique_together = (("alert_rule_trigger", "query_subscription"),) - - class AlertRuleTriggerActionMethod(StrEnum): FIRE = "fire" RESOLVE = "resolve" diff --git a/src/sentry/incidents/models/incident.py b/src/sentry/incidents/models/incident.py index 5339fc055652a8..1d64379b142813 100644 --- a/src/sentry/incidents/models/incident.py +++ b/src/sentry/incidents/models/incident.py @@ -22,7 +22,6 @@ OneToOneCascadeDeletes, UUIDField, region_silo_model, - sane_repr, ) from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey from sentry.db.models.manager.base import BaseManager @@ -45,20 +44,6 @@ class Meta: unique_together = (("project", "incident"),) -@region_silo_model -class IncidentSeen(Model): - __relocation_scope__ = RelocationScope.Excluded - - incident = FlexibleForeignKey("sentry.Incident") - user_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, on_delete="CASCADE", db_index=False) - last_seen = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_incidentseen" - unique_together = (("user_id", "incident"),) - - class IncidentManager(BaseManager["Incident"]): CACHE_KEY = "incidents:active:%s:%s:%s" @@ -185,7 +170,7 @@ class Incident(Model): - UI should be able to handle multiple active incidents """ - __relocation_scope__ = RelocationScope.Organization + __relocation_scope__ = RelocationScope.Global objects: ClassVar[IncidentManager] = IncidentManager() @@ -250,7 +235,7 @@ def normalize_before_relocation_import( @region_silo_model class PendingIncidentSnapshot(Model): - __relocation_scope__ = RelocationScope.Organization + __relocation_scope__ = RelocationScope.Global incident = OneToOneCascadeDeletes("sentry.Incident", db_constraint=False) target_run_date = models.DateTimeField(db_index=True, default=timezone.now) @@ -263,7 +248,7 @@ class Meta: @region_silo_model class IncidentSnapshot(Model): - __relocation_scope__ = RelocationScope.Organization + __relocation_scope__ = RelocationScope.Global incident = OneToOneCascadeDeletes("sentry.Incident", db_constraint=False) event_stats_snapshot = FlexibleForeignKey("sentry.TimeSeriesSnapshot", db_constraint=False) @@ -278,7 +263,7 @@ class Meta: @region_silo_model class TimeSeriesSnapshot(Model): - __relocation_scope__ = RelocationScope.Organization + __relocation_scope__ = RelocationScope.Global __relocation_dependencies__ = {"sentry.Incident"} start = models.DateTimeField() @@ -303,7 +288,6 @@ def query_for_relocation_export(cls, q: models.Q, pk_map: PrimaryKeyMap) -> mode class IncidentActivityType(Enum): CREATED = 1 STATUS_CHANGE = 2 - COMMENT = 3 DETECTED = 4 @@ -313,7 +297,7 @@ class IncidentActivity(Model): An IncidentActivity is a record of a change that occurred in an Incident. This could be a status change, """ - __relocation_scope__ = RelocationScope.Organization + __relocation_scope__ = RelocationScope.Global incident = FlexibleForeignKey("sentry.Incident") user_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, on_delete="CASCADE", null=True) @@ -341,27 +325,6 @@ def normalize_before_relocation_import( return old_pk -@region_silo_model -class IncidentSubscription(Model): - """ - IncidentSubscription is a record of a user being subscribed to an incident. - Not to be confused with a snuba QuerySubscription - """ - - __relocation_scope__ = RelocationScope.Organization - - incident = FlexibleForeignKey("sentry.Incident", db_index=False) - user_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, on_delete="CASCADE") - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_incidentsubscription" - unique_together = (("incident", "user_id"),) - - __repr__ = sane_repr("incident_id", "user_id") - - class TriggerStatus(Enum): ACTIVE = 0 RESOLVED = 1 @@ -405,7 +368,7 @@ class IncidentTrigger(Model): NOTE: dissimilar to an AlertRuleTrigger which represents the trigger threshold required to initialize an Incident """ - __relocation_scope__ = RelocationScope.Organization + __relocation_scope__ = RelocationScope.Global objects: ClassVar[IncidentTriggerManager] = IncidentTriggerManager() diff --git a/src/sentry/incidents/serializers/alert_rule.py b/src/sentry/incidents/serializers/alert_rule.py index 74d631d95d8ae5..4c0c6ddac49405 100644 --- a/src/sentry/incidents/serializers/alert_rule.py +++ b/src/sentry/incidents/serializers/alert_rule.py @@ -76,9 +76,6 @@ class AlertRuleSerializer(CamelSnakeModelSerializer[AlertRule]): required=False, max_length=1, ) - excluded_projects = serializers.ListField( - child=ProjectField(scope="project:read"), required=False - ) triggers = serializers.ListField(required=True) query_type = serializers.IntegerField(required=False) dataset = serializers.CharField(required=False) @@ -123,8 +120,6 @@ class Meta: "comparison_delta", "aggregate", "projects", - "include_all_projects", - "excluded_projects", "triggers", "event_types", "monitor_type", @@ -136,7 +131,6 @@ class Meta: ] extra_kwargs = { "name": {"min_length": 1, "max_length": 256}, - "include_all_projects": {"default": False}, "threshold_type": {"required": True}, "resolve_threshold": {"required": False}, } diff --git a/src/sentry/incidents/serializers/alert_rule_trigger_action.py b/src/sentry/incidents/serializers/alert_rule_trigger_action.py index b2a34227bb6d11..53266d2472f0e9 100644 --- a/src/sentry/incidents/serializers/alert_rule_trigger_action.py +++ b/src/sentry/incidents/serializers/alert_rule_trigger_action.py @@ -14,9 +14,9 @@ from sentry.integrations.opsgenie.utils import OPSGENIE_CUSTOM_PRIORITIES from sentry.integrations.pagerduty.utils import PAGERDUTY_CUSTOM_PRIORITIES from sentry.integrations.slack.utils.channel import validate_channel_id -from sentry.models.notificationaction import ActionService from sentry.models.organizationmember import OrganizationMember from sentry.models.team import Team +from sentry.notifications.models.notificationaction import ActionService from sentry.shared_integrations.exceptions import ApiRateLimitedError diff --git a/src/sentry/incidents/subscription_processor.py b/src/sentry/incidents/subscription_processor.py index 865c0160bf3722..4654948204437a 100644 --- a/src/sentry/incidents/subscription_processor.py +++ b/src/sentry/incidents/subscription_processor.py @@ -399,12 +399,15 @@ def process_update(self, subscription_update: QuerySubscriptionUpdate) -> None: has_anomaly_detection and self.alert_rule.detection_type == AlertRuleDetectionType.DYNAMIC ): - potential_anomalies = get_anomaly_data_from_seer( - alert_rule=self.alert_rule, - subscription=self.subscription, - last_update=self.last_update.timestamp(), - aggregation_value=aggregation_value, - ) + with metrics.timer( + "incidents.subscription_processor.process_update.get_anomaly_data_from_seer" + ): + potential_anomalies = get_anomaly_data_from_seer( + alert_rule=self.alert_rule, + subscription=self.subscription, + last_update=self.last_update.timestamp(), + aggregation_value=aggregation_value, + ) if potential_anomalies is None: logger.info( "No potential anomalies found", diff --git a/src/sentry/incidents/tasks.py b/src/sentry/incidents/tasks.py index 5fe389e68082b5..a8ca7387a08491 100644 --- a/src/sentry/incidents/tasks.py +++ b/src/sentry/incidents/tasks.py @@ -2,21 +2,15 @@ import logging from typing import Any -from urllib.parse import urlencode -from django.urls import reverse - -from sentry.auth.access import from_user from sentry.incidents.models.alert_rule import ( AlertRuleStatus, AlertRuleTriggerAction, AlertRuleTriggerActionMethod, ) from sentry.incidents.models.incident import ( - INCIDENT_STATUS, Incident, IncidentActivity, - IncidentActivityType, IncidentStatus, IncidentStatusMethod, ) @@ -31,107 +25,11 @@ from sentry.snuba.models import QuerySubscription from sentry.snuba.query_subscriptions.consumer import register_subscriber from sentry.tasks.base import instrumented_task -from sentry.users.models.user import User -from sentry.users.services.user import RpcUser -from sentry.users.services.user.service import user_service from sentry.utils import metrics -from sentry.utils.email import MessageBuilder -from sentry.utils.http import absolute_uri logger = logging.getLogger(__name__) -@instrumented_task( - name="sentry.incidents.tasks.send_subscriber_notifications", - queue="incidents", - silo_mode=SiloMode.REGION, -) -def send_subscriber_notifications(activity_id: int) -> None: - from sentry.incidents.logic import get_incident_subscribers, unsubscribe_from_incident - - try: - activity = IncidentActivity.objects.select_related( - "incident", "incident__organization" - ).get(id=activity_id) - except IncidentActivity.DoesNotExist: - return - - if activity.user_id is None: - return - - activity_user = user_service.get_user(user_id=activity.user_id) - - # Only send notifications for specific activity types. - if activity.type not in ( - IncidentActivityType.COMMENT.value, - IncidentActivityType.STATUS_CHANGE.value, - ): - return - - # Check that the user still has access to at least one of the projects - # related to the incident. If not then unsubscribe them. - projects = list(activity.incident.projects.all()) - for subscriber in get_incident_subscribers(activity.incident): - subscriber_user = user_service.get_user(user_id=subscriber.user_id) - if subscriber_user is None: - continue - - access = from_user(subscriber_user, activity.incident.organization) - if not any(project for project in projects if access.has_project_access(project)): - unsubscribe_from_incident(activity.incident, subscriber_user.id) - elif subscriber_user.id != activity.user_id: - msg = generate_incident_activity_email(activity, subscriber_user, activity_user) - msg.send_async([subscriber_user.email]) - - -def generate_incident_activity_email( - activity: IncidentActivity, user: RpcUser | User, activity_user: RpcUser | User | None = None -) -> MessageBuilder: - incident = activity.incident - return MessageBuilder( - subject=f"Activity on Alert {incident.title} (#{incident.identifier})", - template="sentry/emails/incidents/activity.txt", - html_template="sentry/emails/incidents/activity.html", - type="incident.activity", - context=build_activity_context(activity, user, activity_user), - ) - - -def build_activity_context( - activity: IncidentActivity, user: RpcUser, activity_user: RpcUser | None = None -) -> dict[str, Any]: - if activity_user is None: - activity_user = user_service.get_user(user_id=activity.user_id) - - if activity.type == IncidentActivityType.COMMENT.value: - action = "left a comment" - else: - action = "changed status from {} to {}".format( - INCIDENT_STATUS[IncidentStatus(int(activity.previous_value))], - INCIDENT_STATUS[IncidentStatus(int(activity.value))], - ) - incident = activity.incident - - action = f"{action} on alert {incident.title} (#{incident.identifier})" - - return { - "user_name": activity_user.name if activity_user else "Sentry", - "action": action, - "link": absolute_uri( - reverse( - "sentry-metric-alert", - kwargs={ - "organization_slug": incident.organization.slug, - "incident_id": incident.identifier, - }, - ) - ) - + "?" - + urlencode({"referrer": "incident_activity_email"}), - "comment": activity.comment, - } - - @register_subscriber(SUBSCRIPTION_METRICS_LOGGER) def handle_subscription_metrics_logger( subscription_update: QuerySubscriptionUpdate, subscription: QuerySubscription @@ -265,7 +163,6 @@ def auto_resolve_snapshot_incidents(alert_rule_id: int, **kwargs: Any) -> None: update_incident_status( incident, IncidentStatus.CLOSED, - comment="This alert has been auto-resolved because the rule that triggered it has been modified or deleted.", status_method=IncidentStatusMethod.RULE_UPDATED, ) diff --git a/src/sentry/ingest/billing_metrics_consumer.py b/src/sentry/ingest/billing_metrics_consumer.py index 688d1e71a7e70f..471855aa356f6d 100644 --- a/src/sentry/ingest/billing_metrics_consumer.py +++ b/src/sentry/ingest/billing_metrics_consumer.py @@ -14,7 +14,6 @@ from django.core.cache import cache from sentry_kafka_schemas.schema_types.snuba_generic_metrics_v1 import GenericMetric -from sentry import options from sentry.constants import DataCategory from sentry.models.project import Project from sentry.sentry_metrics.indexer.strings import ( @@ -105,13 +104,6 @@ def _count_processed_items(self, generic_metric: GenericMetric) -> Mapping[DataC items = {data_category: quantity} - if not options.get("profiling.emit_outcomes_in_profiling_consumer.enabled"): - if self._has_profile(generic_metric): - # The bucket is tagged with the "has_profile" tag, - # so we also count the quantity of this bucket towards profiles. - # This assumes a "1 to 0..1" relationship between transactions / spans and profiles. - items[DataCategory.PROFILE] = quantity - return items def _has_profile(self, generic_metric: GenericMetric) -> bool: diff --git a/src/sentry/ingest/consumer/processors.py b/src/sentry/ingest/consumer/processors.py index b4dc48ae05f5ce..c03b71d96e4d6a 100644 --- a/src/sentry/ingest/consumer/processors.py +++ b/src/sentry/ingest/consumer/processors.py @@ -25,6 +25,7 @@ from sentry.utils import metrics from sentry.utils.cache import cache_key_for_event from sentry.utils.dates import to_datetime +from sentry.utils.event_tracker import TransactionStageStatus, track_sampled_event from sentry.utils.sdk import set_current_event_project from sentry.utils.snuba import RateLimitExceeded @@ -107,11 +108,6 @@ def process_event( remote_addr = message.get("remote_addr") attachments = message.get("attachments") or () - if consumer_type == ConsumerType.Transactions: - processing_store = transaction_processing_store - else: - processing_store = event_processing_store - sentry_sdk.set_extra("event_id", event_id) sentry_sdk.set_extra("len_attachments", len(attachments)) @@ -166,6 +162,13 @@ def process_event( with sentry_sdk.start_span(op="orjson.loads"): data = orjson.loads(payload) + # We also need to check "type" as transactions are also sent to ingest-attachments + # along with other event types if they have attachments. + if consumer_type == ConsumerType.Transactions or data.get("type") == "transaction": + processing_store = transaction_processing_store + else: + processing_store = event_processing_store + sentry_sdk.set_extra("event_type", data.get("type")) with sentry_sdk.start_span( @@ -202,6 +205,11 @@ def process_event( else: with metrics.timer("ingest_consumer._store_event"): cache_key = processing_store.store(data) + if consumer_type == ConsumerType.Transactions: + track_sampled_event( + data["event_id"], ConsumerType.Transactions, TransactionStageStatus.REDIS_PUT + ) + save_attachments(attachments, cache_key) try: diff --git a/src/sentry/ingest/types.py b/src/sentry/ingest/types.py index d9ca4198dbc01b..4b0c4596d72796 100644 --- a/src/sentry/ingest/types.py +++ b/src/sentry/ingest/types.py @@ -1,4 +1,7 @@ -class ConsumerType: +from enum import StrEnum + + +class ConsumerType(StrEnum): """ Defines the types of ingestion consumers """ diff --git a/src/sentry/integrations/api/bases/integration.py b/src/sentry/integrations/api/bases/integration.py index 2273d83622fd15..db35ffec89d120 100644 --- a/src/sentry/integrations/api/bases/integration.py +++ b/src/sentry/integrations/api/bases/integration.py @@ -42,14 +42,16 @@ class IntegrationEndpoint(ControlSiloOrganizationEndpoint): Baseclass for integration endpoints in control silo that need integration exception handling """ - def handle_exception( + def handle_exception_with_details( self, request: Request, exc: Exception, *args: Any, **kwds: Any, ) -> Response: - return _handle_exception(exc) or super().handle_exception(request, exc, *args, **kwds) + return _handle_exception(exc) or super().handle_exception_with_details( + request, exc, *args, **kwds + ) class RegionIntegrationEndpoint(OrganizationEndpoint): @@ -57,11 +59,13 @@ class RegionIntegrationEndpoint(OrganizationEndpoint): Baseclass for integration endpoints in region silo that need integration exception handling """ - def handle_exception( + def handle_exception_with_details( self, request: Request, exc: Exception, *args: Any, **kwds: Any, ) -> Response: - return _handle_exception(exc) or super().handle_exception(request, exc, *args, **kwds) + return _handle_exception(exc) or super().handle_exception_with_details( + request, exc, *args, **kwds + ) diff --git a/src/sentry/integrations/api/endpoints/doc_integration_details.py b/src/sentry/integrations/api/endpoints/doc_integration_details.py index a06c47481bd5ee..e6974c13876c38 100644 --- a/src/sentry/integrations/api/endpoints/doc_integration_details.py +++ b/src/sentry/integrations/api/endpoints/doc_integration_details.py @@ -22,9 +22,9 @@ class DocIntegrationDetailsEndpoint(DocIntegrationBaseEndpoint): owner = ApiOwner.INTEGRATIONS publish_status = { - "DELETE": ApiPublishStatus.UNKNOWN, - "GET": ApiPublishStatus.UNKNOWN, - "PUT": ApiPublishStatus.UNKNOWN, + "DELETE": ApiPublishStatus.PRIVATE, + "GET": ApiPublishStatus.PRIVATE, + "PUT": ApiPublishStatus.PRIVATE, } def get(self, request: Request, doc_integration: DocIntegration) -> Response: diff --git a/src/sentry/integrations/api/endpoints/doc_integrations_index.py b/src/sentry/integrations/api/endpoints/doc_integrations_index.py index 9358d0bc0d2b66..869079209a13c1 100644 --- a/src/sentry/integrations/api/endpoints/doc_integrations_index.py +++ b/src/sentry/integrations/api/endpoints/doc_integrations_index.py @@ -23,8 +23,8 @@ class DocIntegrationsEndpoint(DocIntegrationsBaseEndpoint): owner = ApiOwner.INTEGRATIONS publish_status = { - "GET": ApiPublishStatus.UNKNOWN, - "POST": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, + "POST": ApiPublishStatus.PRIVATE, } def get(self, request: Request): diff --git a/src/sentry/integrations/api/endpoints/integration_proxy.py b/src/sentry/integrations/api/endpoints/integration_proxy.py index 5db2f5848659fe..f0e86cf77b02d4 100644 --- a/src/sentry/integrations/api/endpoints/integration_proxy.py +++ b/src/sentry/integrations/api/endpoints/integration_proxy.py @@ -221,7 +221,7 @@ def http_method_not_allowed(self, request): ) return response - def handle_exception( # type: ignore[override] + def handle_exception_with_details( self, request: DRFRequest, exc: Exception, @@ -240,4 +240,4 @@ def handle_exception( # type: ignore[override] logger.info("hybrid_cloud.integration_proxy.host_timeout_error", extra=self.log_extra) return self.respond(status=exc.code) - return super().handle_exception(request, exc, handler_context, scope) + return super().handle_exception_with_details(request, exc, handler_context, scope) diff --git a/src/sentry/integrations/base.py b/src/sentry/integrations/base.py index 3858ffa0b0140f..6e3f51ddbd11c8 100644 --- a/src/sentry/integrations/base.py +++ b/src/sentry/integrations/base.py @@ -138,7 +138,7 @@ class IntegrationDomain(StrEnum): class IntegrationProviderSlug(StrEnum): SLACK = "slack" DISCORD = "discord" - MSTeams = "msteams" + MSTEAMS = "msteams" JIRA = "jira" JIRA_SERVER = "jira_server" AZURE_DEVOPS = "vsts" @@ -155,7 +155,7 @@ class IntegrationProviderSlug(StrEnum): IntegrationDomain.MESSAGING: [ IntegrationProviderSlug.SLACK, IntegrationProviderSlug.DISCORD, - IntegrationProviderSlug.MSTeams, + IntegrationProviderSlug.MSTEAMS, ], IntegrationDomain.PROJECT_MANAGEMENT: [ IntegrationProviderSlug.JIRA, diff --git a/src/sentry/integrations/bitbucket/integration.py b/src/sentry/integrations/bitbucket/integration.py index f3d05641111f0f..de83ca35c06414 100644 --- a/src/sentry/integrations/bitbucket/integration.py +++ b/src/sentry/integrations/bitbucket/integration.py @@ -269,7 +269,7 @@ def dispatch(self, request: Request, pipeline) -> Response: request, BitbucketIntegrationProvider.key ) except AtlassianConnectValidationError as e: - lifecycle.record_failure({"failure_reason": str(e)}) + lifecycle.record_failure(str(e)) return pipeline.error("Unable to verify installation.") pipeline.bind_state("external_id", integration.external_id) diff --git a/src/sentry/integrations/bitbucket/issues.py b/src/sentry/integrations/bitbucket/issues.py index f4844827da3952..50880706e9e6c3 100644 --- a/src/sentry/integrations/bitbucket/issues.py +++ b/src/sentry/integrations/bitbucket/issues.py @@ -6,9 +6,6 @@ from django.urls import reverse from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration -from sentry.integrations.source_code_management.metrics import ( - SourceCodeIssueIntegrationInteractionType, -) from sentry.models.group import Group from sentry.shared_integrations.exceptions import ApiError, IntegrationFormError from sentry.silo.base import all_silo_function @@ -122,25 +119,24 @@ def get_link_issue_config(self, group: Group, **kwargs) -> list[dict[str, Any]]: ] def create_issue(self, data, **kwargs): - with self.record_event(SourceCodeIssueIntegrationInteractionType.CREATE_ISSUE).capture(): - client = self.get_client() - if not data.get("repo"): - raise IntegrationFormError({"repo": ["Repository is required"]}) + client = self.get_client() + if not data.get("repo"): + raise IntegrationFormError({"repo": ["Repository is required"]}) - data["content"] = {"raw": data["description"]} - del data["description"] + data["content"] = {"raw": data["description"]} + del data["description"] - try: - issue = client.create_issue(data.get("repo"), data) - except ApiError as e: - self.raise_error(e) + try: + issue = client.create_issue(data.get("repo"), data) + except ApiError as e: + self.raise_error(e) - return { - "key": issue["id"], - "title": issue["title"], - "description": issue["content"]["html"], # users content rendered as html - "repo": data.get("repo"), - } + return { + "key": issue["id"], + "title": issue["title"], + "description": issue["content"]["html"], # users content rendered as html + "repo": data.get("repo"), + } def get_issue(self, issue_id, **kwargs): client = self.get_client() diff --git a/src/sentry/integrations/bitbucket/search.py b/src/sentry/integrations/bitbucket/search.py index 708341f88b8585..234229e3919646 100644 --- a/src/sentry/integrations/bitbucket/search.py +++ b/src/sentry/integrations/bitbucket/search.py @@ -9,6 +9,10 @@ from sentry.integrations.bitbucket.integration import BitbucketIntegration from sentry.integrations.models.integration import Integration from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration +from sentry.integrations.source_code_management.metrics import ( + SCMIntegrationInteractionType, + SourceCodeSearchEndpointHaltReason, +) from sentry.integrations.source_code_management.search import SourceCodeSearchEndpoint from sentry.shared_integrations.exceptions import ApiError @@ -37,32 +41,37 @@ def installation_class(self): return BitbucketIntegration def handle_search_issues(self, installation: T, query: str, repo: str | None) -> Response: - assert repo + with self.record_event( + SCMIntegrationInteractionType.HANDLE_SEARCH_ISSUES + ).capture() as lifecycle: + assert repo - full_query = f'title~"{query}"' - try: - response = installation.search_issues(query=full_query, repo=repo) - except ApiError as e: - if "no issue tracker" in str(e): - logger.info( - "bitbucket.issue-search-no-issue-tracker", - extra={"installation_id": installation.model.id, "repo": repo}, - ) - return Response( - {"detail": "Bitbucket Repository has no issue tracker."}, status=400 - ) - raise + full_query = f'title~"{query}"' + try: + response = installation.search_issues(query=full_query, repo=repo) + except ApiError as e: + if "no issue tracker" in str(e): + lifecycle.record_halt(str(SourceCodeSearchEndpointHaltReason.NO_ISSUE_TRACKER)) + logger.info( + "bitbucket.issue-search-no-issue-tracker", + extra={"installation_id": installation.model.id, "repo": repo}, + ) + return Response( + {"detail": "Bitbucket Repository has no issue tracker."}, status=400 + ) + raise - assert isinstance(response, dict) - return Response( - [ - {"label": "#{} {}".format(i["id"], i["title"]), "value": i["id"]} - for i in response.get("values", []) - ] - ) + assert isinstance(response, dict) + return Response( + [ + {"label": "#{} {}".format(i["id"], i["title"]), "value": i["id"]} + for i in response.get("values", []) + ] + ) def handle_search_repositories( self, integration: Integration, installation: T, query: str ) -> Response: - result = installation.get_repositories(query) - return Response([{"label": i["name"], "value": i["name"]} for i in result]) + with self.record_event(SCMIntegrationInteractionType.HANDLE_SEARCH_REPOSITORIES).capture(): + result = installation.get_repositories(query) + return Response([{"label": i["name"], "value": i["name"]} for i in result]) diff --git a/src/sentry/integrations/bitbucket/webhook.py b/src/sentry/integrations/bitbucket/webhook.py index d3c6a7264bc2d7..2460e7aca6c8c5 100644 --- a/src/sentry/integrations/bitbucket/webhook.py +++ b/src/sentry/integrations/bitbucket/webhook.py @@ -1,5 +1,6 @@ import ipaddress import logging +from abc import ABC, abstractmethod from collections.abc import Mapping from datetime import timezone from typing import Any @@ -14,7 +15,9 @@ from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import Endpoint, region_silo_endpoint +from sentry.integrations.base import IntegrationDomain from sentry.integrations.bitbucket.constants import BITBUCKET_IP_RANGES, BITBUCKET_IPS +from sentry.integrations.utils.metrics import IntegrationWebhookEvent, IntegrationWebhookEventType from sentry.models.commit import Commit from sentry.models.commitauthor import CommitAuthor from sentry.models.organization import Organization @@ -27,7 +30,13 @@ PROVIDER_NAME = "integrations:bitbucket" -class Webhook: +class Webhook(ABC): + @property + @abstractmethod + def event_type(self) -> IntegrationWebhookEventType: + raise NotImplementedError + + @abstractmethod def __call__(self, organization: Organization, event: Mapping[str, Any]): raise NotImplementedError @@ -61,6 +70,11 @@ def update_repo_data(self, repo, event): class PushEventWebhook(Webhook): # https://confluence.atlassian.com/bitbucket/event-payloads-740262817.html#EventPayloads-Push + + @property + def event_type(self) -> IntegrationWebhookEventType: + return IntegrationWebhookEventType.PUSH + def __call__(self, organization: Organization, event: Mapping[str, Any]): authors = {} @@ -186,5 +200,13 @@ def post(self, request: HttpRequest, organization_id: int) -> HttpResponse: ) return HttpResponse(status=400) - handler()(organization, event) + event_handler = handler() + + with IntegrationWebhookEvent( + interaction_type=event_handler.event_type, + domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT, + provider_key="bitbucket", + ).capture(): + event_handler(organization, event) + return HttpResponse(status=204) diff --git a/src/sentry/integrations/bitbucket_server/integration.py b/src/sentry/integrations/bitbucket_server/integration.py index 04d5b7e5ea06b7..8ccdf435b30a2a 100644 --- a/src/sentry/integrations/bitbucket_server/integration.py +++ b/src/sentry/integrations/bitbucket_server/integration.py @@ -185,14 +185,12 @@ def dispatch(self, request: Request, pipeline) -> HttpResponse: try: request_token = client.get_request_token() except ApiError as error: - lifecycle.record_failure({"failure_reason": str(error), "url": config.get("url")}) + lifecycle.record_failure(str(error), extra={"url": config.get("url")}) return pipeline.error(f"Could not fetch a request token from Bitbucket. {error}") pipeline.bind_state("request_token", request_token) if not request_token.get("oauth_token"): - lifecycle.record_failure( - {"failure_reason": "missing oauth_token", "url": config.get("url")} - ) + lifecycle.record_failure("missing oauth_token", extra={"url": config.get("url")}) return pipeline.error("Missing oauth_token") authorize_url = client.get_authorize_url(request_token) @@ -230,7 +228,7 @@ def dispatch(self, request: Request, pipeline) -> HttpResponse: return pipeline.next_step() except ApiError as error: - lifecycle.record_failure({"failure_reason": str(error)}) + lifecycle.record_failure(str(error)) return pipeline.error( f"Could not fetch an access token from Bitbucket. {str(error)}" ) diff --git a/src/sentry/integrations/bitbucket_server/webhook.py b/src/sentry/integrations/bitbucket_server/webhook.py index 0bf345d42a35e5..f18c226c43a36d 100644 --- a/src/sentry/integrations/bitbucket_server/webhook.py +++ b/src/sentry/integrations/bitbucket_server/webhook.py @@ -1,4 +1,5 @@ import logging +from abc import ABC, abstractmethod from collections.abc import Mapping from datetime import datetime, timezone from typing import Any @@ -12,7 +13,9 @@ from django.views.decorators.csrf import csrf_exempt from django.views.generic.base import View +from sentry.integrations.base import IntegrationDomain from sentry.integrations.models.integration import Integration +from sentry.integrations.utils.metrics import IntegrationWebhookEvent, IntegrationWebhookEventType from sentry.models.commit import Commit from sentry.models.commitauthor import CommitAuthor from sentry.models.organization import Organization @@ -26,7 +29,13 @@ PROVIDER_NAME = "integrations:bitbucket_server" -class Webhook: +class Webhook(ABC): + @property + @abstractmethod + def event_type(self) -> IntegrationWebhookEventType: + raise NotImplementedError + + @abstractmethod def __call__(self, organization: Organization, integration_id: int, event: Mapping[str, Any]): raise NotImplementedError @@ -41,6 +50,10 @@ def update_repo_data(self, repo, event): class PushEventWebhook(Webhook): + @property + def event_type(self) -> IntegrationWebhookEventType: + return IntegrationWebhookEventType.PUSH + def __call__( self, organization: Organization, integration_id: int, event: Mapping[str, Any] ) -> HttpResponse: @@ -176,4 +189,11 @@ def post(self, request: HttpRequest, organization_id, integration_id) -> HttpRes ) return HttpResponse(status=400) - return handler()(organization, integration_id, event) + event_handler = handler() + + with IntegrationWebhookEvent( + interaction_type=event_handler.event_type, + domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT, + provider_key="bitbucket-server", + ).capture(): + return event_handler(organization, integration_id, event) diff --git a/src/sentry/integrations/discord/actions/metric_alert.py b/src/sentry/integrations/discord/actions/metric_alert.py index c21997f4f2d0fc..beff2896743e6f 100644 --- a/src/sentry/integrations/discord/actions/metric_alert.py +++ b/src/sentry/integrations/discord/actions/metric_alert.py @@ -38,7 +38,7 @@ def send_incident_alert_notification( # We can't send a message if we don't know the channel logger.warning( "discord.metric_alert.no_channel", - extra={"guild_id": incident.identifier}, + extra={"incident_id": incident.id}, ) return False @@ -56,7 +56,7 @@ def send_incident_alert_notification( except Exception as error: logger.warning( "discord.metric_alert.message_send_failure", - extra={"error": error, "guild_id": incident.identifier, "channel_id": channel}, + extra={"error": error, "incident_id": incident.id, "channel_id": channel}, ) return False else: diff --git a/src/sentry/integrations/discord/spec.py b/src/sentry/integrations/discord/spec.py index 4899f7ec1ee1de..da96fd9d6fad02 100644 --- a/src/sentry/integrations/discord/spec.py +++ b/src/sentry/integrations/discord/spec.py @@ -6,7 +6,7 @@ MessagingIdentityLinkViewSet, MessagingIntegrationSpec, ) -from sentry.models.notificationaction import ActionService +from sentry.notifications.models.notificationaction import ActionService from sentry.rules.actions import IntegrationEventAction diff --git a/src/sentry/integrations/discord/webhooks/command.py b/src/sentry/integrations/discord/webhooks/command.py index 471b7bdcbead1d..3736d7ff9e55e9 100644 --- a/src/sentry/integrations/discord/webhooks/command.py +++ b/src/sentry/integrations/discord/webhooks/command.py @@ -1,4 +1,4 @@ -from collections.abc import Callable, Iterable +from collections.abc import Iterable from dataclasses import dataclass from rest_framework.response import Response @@ -11,12 +11,18 @@ from sentry.integrations.discord.webhooks.handler import DiscordInteractionHandler from sentry.integrations.messaging import commands from sentry.integrations.messaging.commands import ( + CommandHandler, CommandInput, CommandNotMatchedError, MessagingIntegrationCommand, MessagingIntegrationCommandDispatcher, ) +from sentry.integrations.messaging.metrics import ( + MessageCommandFailureReason, + MessageCommandHaltReason, +) from sentry.integrations.messaging.spec import MessagingIntegrationSpec +from sentry.integrations.types import EventLifecycleOutcome, IntegrationResponse LINK_USER_MESSAGE = "[Click here]({url}) to link your Discord account to your Sentry account." ALREADY_LINKED_MESSAGE = "You are already linked to the Sentry account with email: `{email}`." @@ -52,7 +58,7 @@ def handle(self) -> Response: "discord.interaction.command.unknown", extra={"command": command_name, **self.request.logging_data}, ) - message = dispatcher.help(cmd_input) + message = dispatcher.dispatch(CommandInput("help")) return self.send_message(message) @@ -65,19 +71,25 @@ class DiscordCommandDispatcher(MessagingIntegrationCommandDispatcher[str]): def integration_spec(self) -> MessagingIntegrationSpec: return DiscordMessagingSpec() - @property - def command_handlers( - self, - ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], str]]]: - yield commands.HELP, self.help - yield commands.LINK_IDENTITY, self.link_user - yield commands.UNLINK_IDENTITY, self.unlink_user + def help_handler(self, input: CommandInput) -> IntegrationResponse[str]: + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=HELP_MESSAGE, + ) - def link_user(self, _: CommandInput) -> str: + def link_user_handler(self, _: CommandInput) -> IntegrationResponse[str]: if self.request.has_identity(): - return ALREADY_LINKED_MESSAGE.format(email=self.request.get_identity_str()) + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=ALREADY_LINKED_MESSAGE.format(email=self.request.get_identity_str()), + outcome_reason=str(MessageCommandHaltReason.ALREADY_LINKED), + context_data={ + "email": self.request.get_identity_str(), + }, + ) if not self.request.integration or not self.request.user_id: + # TODO: remove this logger logger.warning( "discord.interaction.command.missing.integration", extra={ @@ -85,18 +97,33 @@ def link_user(self, _: CommandInput) -> str: "hasUserId": self.request.user_id, }, ) - return MISSING_DATA_MESSAGE + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.FAILURE, + response=MISSING_DATA_MESSAGE, + outcome_reason=str(MessageCommandFailureReason.MISSING_DATA), + context_data={ + "has_integration": bool(self.request.integration), + "has_user_id": bool(self.request.user_id), + }, + ) link_url = build_linking_url( integration=self.request.integration, discord_id=self.request.user_id, ) - return LINK_USER_MESSAGE.format(url=link_url) + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=LINK_USER_MESSAGE.format(url=link_url), + ) - def unlink_user(self, _: CommandInput) -> str: + def unlink_user_handler(self, input: CommandInput) -> IntegrationResponse[str]: if not self.request.has_identity(): - return NOT_LINKED_MESSAGE + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=NOT_LINKED_MESSAGE, + outcome_reason=str(MessageCommandHaltReason.NOT_LINKED), + ) # if self.request.has_identity() then these must not be None assert self.request.integration is not None @@ -107,7 +134,16 @@ def unlink_user(self, _: CommandInput) -> str: discord_id=self.request.user_id, ) - return UNLINK_USER_MESSAGE.format(url=unlink_url) + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=UNLINK_USER_MESSAGE.format(url=unlink_url), + ) + + @property + def command_handlers( + self, + ) -> Iterable[tuple[MessagingIntegrationCommand, CommandHandler[str]]]: - def help(self, _: CommandInput) -> str: - return HELP_MESSAGE + yield commands.HELP, self.help_handler + yield commands.LINK_IDENTITY, self.link_user_handler + yield commands.UNLINK_IDENTITY, self.unlink_user_handler diff --git a/src/sentry/integrations/github/integration.py b/src/sentry/integrations/github/integration.py index 8da637d0cde63d..9e6a4c7db04596 100644 --- a/src/sentry/integrations/github/integration.py +++ b/src/sentry/integrations/github/integration.py @@ -450,7 +450,7 @@ def dispatch(self, request: Request, pipeline) -> HttpResponseBase: # At this point, we are past the GitHub "authorize" step if request.GET.get("state") != pipeline.signature: - lifecycle.record_failure({"failure_reason": GitHubInstallationError.INVALID_STATE}) + lifecycle.record_failure(GitHubInstallationError.INVALID_STATE) return error( request, self.active_organization, @@ -474,7 +474,7 @@ def dispatch(self, request: Request, pipeline) -> HttpResponseBase: payload = {} if "access_token" not in payload: - lifecycle.record_failure({"failure_reason": GitHubInstallationError.MISSING_TOKEN}) + lifecycle.record_failure(GitHubInstallationError.MISSING_TOKEN) return error( request, self.active_organization, @@ -483,7 +483,7 @@ def dispatch(self, request: Request, pipeline) -> HttpResponseBase: authenticated_user_info = get_user_info(payload["access_token"]) if "login" not in authenticated_user_info: - lifecycle.record_failure({"failure_reason": GitHubInstallationError.MISSING_LOGIN}) + lifecycle.record_failure(GitHubInstallationError.MISSING_LOGIN) return error( request, self.active_organization, @@ -525,9 +525,7 @@ def dispatch(self, request: Request, pipeline: Pipeline) -> HttpResponseBase: ).exists() if integration_pending_deletion_exists: - lifecycle.record_failure( - {"failure_reason": GitHubInstallationError.PENDING_DELETION} - ) + lifecycle.record_failure(GitHubInstallationError.PENDING_DELETION) return error( request, self.active_organization, @@ -545,9 +543,7 @@ def dispatch(self, request: Request, pipeline: Pipeline) -> HttpResponseBase: return pipeline.next_step() if installations_exist: - lifecycle.record_failure( - {"failure_reason": GitHubInstallationError.INSTALLATION_EXISTS} - ) + lifecycle.record_failure(GitHubInstallationError.INSTALLATION_EXISTS) return error( request, self.active_organization, @@ -561,9 +557,7 @@ def dispatch(self, request: Request, pipeline: Pipeline) -> HttpResponseBase: external_id=installation_id, status=ObjectStatus.ACTIVE ) except Integration.DoesNotExist: - lifecycle.record_failure( - {"failure_reason": GitHubInstallationError.MISSING_INTEGRATION} - ) + lifecycle.record_failure(GitHubInstallationError.MISSING_INTEGRATION) return error(request, self.active_organization) # Check that the authenticated GitHub user is the same as who installed the app. @@ -571,7 +565,7 @@ def dispatch(self, request: Request, pipeline: Pipeline) -> HttpResponseBase: pipeline.fetch_state("github_authenticated_user") != integration.metadata["sender"]["login"] ): - lifecycle.record_failure({"failure_reason": GitHubInstallationError.USER_MISMATCH}) + lifecycle.record_failure(GitHubInstallationError.USER_MISMATCH) return error( request, self.active_organization, diff --git a/src/sentry/integrations/github/issues.py b/src/sentry/integrations/github/issues.py index 0599396e229a0b..cdd510da75cdaf 100644 --- a/src/sentry/integrations/github/issues.py +++ b/src/sentry/integrations/github/issues.py @@ -11,9 +11,6 @@ from sentry.integrations.mixins.issues import MAX_CHAR from sentry.integrations.models.external_issue import ExternalIssue from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration -from sentry.integrations.source_code_management.metrics import ( - SourceCodeIssueIntegrationInteractionType, -) from sentry.issues.grouptype import GroupCategory from sentry.models.group import Group from sentry.organizations.services.organization.service import organization_service @@ -173,34 +170,33 @@ def get_create_issue_config( ] def create_issue(self, data: Mapping[str, Any], **kwargs: Any) -> Mapping[str, Any]: - with self.record_event(SourceCodeIssueIntegrationInteractionType.CREATE_ISSUE).capture(): - client = self.get_client() + client = self.get_client() - repo = data.get("repo") + repo = data.get("repo") - if not repo: - raise IntegrationError("repo kwarg must be provided") + if not repo: + raise IntegrationError("repo kwarg must be provided") - try: - issue = client.create_issue( - repo=repo, - data={ - "title": data["title"], - "body": data["description"], - "assignee": data.get("assignee"), - "labels": data.get("labels"), - }, - ) - except ApiError as e: - raise IntegrationError(self.message_from_error(e)) + try: + issue = client.create_issue( + repo=repo, + data={ + "title": data["title"], + "body": data["description"], + "assignee": data.get("assignee"), + "labels": data.get("labels"), + }, + ) + except ApiError as e: + raise IntegrationError(self.message_from_error(e)) - return { - "key": issue["number"], - "title": issue["title"], - "description": issue["body"], - "url": issue["html_url"], - "repo": repo, - } + return { + "key": issue["number"], + "title": issue["title"], + "description": issue["body"], + "url": issue["html_url"], + "repo": repo, + } def get_link_issue_config(self, group: Group, **kwargs: Any) -> list[dict[str, Any]]: params = kwargs.pop("params", {}) diff --git a/src/sentry/integrations/github/search.py b/src/sentry/integrations/github/search.py index 523b9d61e4a5db..19d013fac4befd 100644 --- a/src/sentry/integrations/github/search.py +++ b/src/sentry/integrations/github/search.py @@ -7,6 +7,10 @@ from sentry.integrations.github_enterprise.integration import GitHubEnterpriseIntegration from sentry.integrations.models.integration import Integration from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration +from sentry.integrations.source_code_management.metrics import ( + SCMIntegrationInteractionType, + SourceCodeSearchEndpointHaltReason, +) from sentry.integrations.source_code_management.search import SourceCodeSearchEndpoint from sentry.shared_integrations.exceptions import ApiError @@ -30,42 +34,53 @@ def installation_class(self): return (GitHubIntegration, GitHubEnterpriseIntegration) def handle_search_issues(self, installation: T, query: str, repo: str | None) -> Response: - assert repo + with self.record_event( + SCMIntegrationInteractionType.HANDLE_SEARCH_ISSUES + ).capture() as lifecycle: + assert repo - try: - response = installation.search_issues(query=f"repo:{repo} {query}") - except ApiError as err: - if err.code == 403: - return Response({"detail": "Rate limit exceeded"}, status=429) - raise + try: + response = installation.search_issues(query=f"repo:{repo} {query}") + except ApiError as err: + if err.code == 403: + lifecycle.record_halt(str(SourceCodeSearchEndpointHaltReason.RATE_LIMITED)) + return Response({"detail": "Rate limit exceeded"}, status=429) + raise - assert isinstance(response, dict) - return Response( - [ - {"label": "#{} {}".format(i["number"], i["title"]), "value": i["number"]} - for i in response.get("items", []) - ] - ) + assert isinstance(response, dict) + return Response( + [ + {"label": "#{} {}".format(i["number"], i["title"]), "value": i["number"]} + for i in response.get("items", []) + ] + ) def handle_search_repositories( self, integration: Integration, installation: T, query: str ) -> Response: - assert isinstance(installation, self.installation_class) + with self.record_event( + SCMIntegrationInteractionType.HANDLE_SEARCH_REPOSITORIES + ).capture() as lifecyle: + assert isinstance(installation, self.installation_class) - full_query = build_repository_query(integration.metadata, integration.name, query) - try: - response = installation.get_client().search_repositories(full_query) - except ApiError as err: - if err.code == 403: - return Response({"detail": "Rate limit exceeded"}, status=429) - if err.code == 422: - return Response( - { - "detail": "Repositories could not be searched because they do not exist, or you do not have access to them." - }, - status=404, - ) - raise - return Response( - [{"label": i["name"], "value": i["full_name"]} for i in response.get("items", [])] - ) + full_query = build_repository_query(integration.metadata, integration.name, query) + try: + response = installation.get_client().search_repositories(full_query) + except ApiError as err: + if err.code == 403: + lifecyle.record_halt(str(SourceCodeSearchEndpointHaltReason.RATE_LIMITED)) + return Response({"detail": "Rate limit exceeded"}, status=429) + if err.code == 422: + lifecyle.record_halt( + str(SourceCodeSearchEndpointHaltReason.MISSING_REPOSITORY_OR_NO_ACCESS) + ) + return Response( + { + "detail": "Repositories could not be searched because they do not exist, or you do not have access to them." + }, + status=404, + ) + raise + return Response( + [{"label": i["name"], "value": i["full_name"]} for i in response.get("items", [])] + ) diff --git a/src/sentry/integrations/github/tasks/link_all_repos.py b/src/sentry/integrations/github/tasks/link_all_repos.py index 860ca2b883cb88..168b296b59c4df 100644 --- a/src/sentry/integrations/github/tasks/link_all_repos.py +++ b/src/sentry/integrations/github/tasks/link_all_repos.py @@ -2,6 +2,11 @@ from sentry.constants import ObjectStatus from sentry.integrations.services.integration import integration_service +from sentry.integrations.source_code_management.metrics import ( + LinkAllReposHaltReason, + SCMIntegrationInteractionEvent, + SCMIntegrationInteractionType, +) from sentry.organizations.services.organization import organization_service from sentry.plugins.providers.integration_repository import ( RepoExistsError, @@ -35,52 +40,71 @@ def link_all_repos( integration_id: int, organization_id: int, ): - integration = integration_service.get_integration( - integration_id=integration_id, status=ObjectStatus.ACTIVE - ) - if not integration: - logger.error( - "%s.link_all_repos.integration_missing", - integration_key, - extra={"organization_id": organization_id}, - ) - metrics.incr("github.link_all_repos.error", tags={"type": "missing_integration"}) - return - - rpc_org = organization_service.get(id=organization_id) - if rpc_org is None: - logger.error( - "%s.link_all_repos.organization_missing", - integration_key, - extra={"organization_id": organization_id}, - ) - metrics.incr( - f"{integration_key}.link_all_repos.error", - tags={"type": "missing_organization"}, - ) - return - - installation = integration.get_installation(organization_id=organization_id) - client = installation.get_client() + with SCMIntegrationInteractionEvent( + interaction_type=SCMIntegrationInteractionType.LINK_ALL_REPOS, + provider_key=integration_key, + ).capture() as lifecycle: + lifecycle.add_extra("organization_id", organization_id) + integration = integration_service.get_integration( + integration_id=integration_id, status=ObjectStatus.ACTIVE + ) + if not integration: + # TODO: Remove this logger in favor of context manager + logger.error( + "%s.link_all_repos.integration_missing", + integration_key, + extra={"organization_id": organization_id}, + ) + metrics.incr("github.link_all_repos.error", tags={"type": "missing_integration"}) + lifecycle.record_failure(str(LinkAllReposHaltReason.MISSING_INTEGRATION)) + return - try: - repositories = client.get_repositories(fetch_max_pages=True) - except ApiError as e: - if installation.is_rate_limited_error(e): + rpc_org = organization_service.get(id=organization_id) + if rpc_org is None: + logger.error( + "%s.link_all_repos.organization_missing", + integration_key, + extra={"organization_id": organization_id}, + ) + metrics.incr( + f"{integration_key}.link_all_repos.error", + tags={"type": "missing_organization"}, + ) + lifecycle.record_failure(str(LinkAllReposHaltReason.MISSING_ORGANIZATION)) return - metrics.incr(f"{integration_key}.link_all_repos.api_error") - raise + installation = integration.get_installation(organization_id=organization_id) - integration_repo_provider = get_integration_repository_provider(integration) + client = installation.get_client() - for repo in repositories: try: - config = get_repo_config(repo, integration_id) - integration_repo_provider.create_repository(repo_config=config, organization=rpc_org) - except KeyError: - continue - except RepoExistsError: - metrics.incr("sentry.integration_repo_provider.repo_exists") - continue + repositories = client.get_repositories(fetch_max_pages=True) + except ApiError as e: + if installation.is_rate_limited_error(e): + lifecycle.record_halt(str(LinkAllReposHaltReason.RATE_LIMITED)) + return + + metrics.incr(f"{integration_key}.link_all_repos.api_error") + raise + + integration_repo_provider = get_integration_repository_provider(integration) + + # If we successfully create any repositories, we'll set this to True + success = False + + for repo in repositories: + try: + config = get_repo_config(repo, integration_id) + integration_repo_provider.create_repository( + repo_config=config, organization=rpc_org + ) + success = True + except KeyError: + continue + except RepoExistsError: + metrics.incr("sentry.integration_repo_provider.repo_exists") + continue + + if not success: + lifecycle.record_halt(str(LinkAllReposHaltReason.REPOSITORY_NOT_CREATED)) diff --git a/src/sentry/integrations/github/tasks/pr_comment.py b/src/sentry/integrations/github/tasks/pr_comment.py index 93536c32b9e96b..e40805f0a843ca 100644 --- a/src/sentry/integrations/github/tasks/pr_comment.py +++ b/src/sentry/integrations/github/tasks/pr_comment.py @@ -216,9 +216,7 @@ def github_comment_workflow(pullrequest_id: int, project_id: int): top_24_issues = issue_list[:24] # 24 is the P99 for issues-per-PR - enabled_copilot = features.has("projects:ai-autofix", project) or features.has( - "organizations:autofix", organization - ) + enabled_copilot = features.has("organizations:gen-ai-features", organization) github_copilot_actions = ( [ { diff --git a/src/sentry/integrations/github/webhook.py b/src/sentry/integrations/github/webhook.py index 679a70880de7f5..d5066d0e908673 100644 --- a/src/sentry/integrations/github/webhook.py +++ b/src/sentry/integrations/github/webhook.py @@ -3,7 +3,8 @@ import hashlib import hmac import logging -from collections.abc import Callable, Mapping, MutableMapping +from abc import ABC, abstractmethod +from collections.abc import Mapping, MutableMapping from datetime import timezone from typing import Any @@ -22,6 +23,7 @@ from sentry.autofix.webhooks import handle_github_pr_webhook_for_autofix from sentry.constants import EXTENSION_LANGUAGE_MAP, ObjectStatus from sentry.identity.services.identity.service import identity_service +from sentry.integrations.base import IntegrationDomain from sentry.integrations.github.tasks.open_pr_comment import open_pr_comment_workflow from sentry.integrations.pipeline import ensure_integration from sentry.integrations.services.integration.model import ( @@ -30,6 +32,7 @@ ) from sentry.integrations.services.integration.service import integration_service from sentry.integrations.services.repository.service import repository_service +from sentry.integrations.utils.metrics import IntegrationWebhookEvent, IntegrationWebhookEventType from sentry.integrations.utils.scope import clear_tags_and_context from sentry.models.commit import Commit from sentry.models.commitauthor import CommitAuthor @@ -70,13 +73,19 @@ def get_file_language(filename: str) -> str | None: return language -class Webhook: +class Webhook(ABC): """ Base class for GitHub webhooks handled in region silos. """ provider = "github" + @property + @abstractmethod + def event_type(self) -> IntegrationWebhookEventType: + raise NotImplementedError + + @abstractmethod def _handle( self, integration: RpcIntegration, @@ -209,6 +218,10 @@ class InstallationEventWebhook: provider = "github" + @property + def event_type(self) -> IntegrationWebhookEventType: + return IntegrationWebhookEventType.INSTALLATION + def __call__(self, event: Mapping[str, Any], host: str | None = None) -> None: installation = event["installation"] @@ -284,6 +297,10 @@ def _handle_delete( class PushEventWebhook(Webhook): """https://developer.github.com/v3/activity/events/types/#pushevent""" + @property + def event_type(self) -> IntegrationWebhookEventType: + return IntegrationWebhookEventType.PUSH + def is_anonymous_email(self, email: str) -> bool: return email[-25:] == "@users.noreply.github.com" @@ -460,6 +477,10 @@ def _handle( class PullRequestEventWebhook(Webhook): """https://developer.github.com/v3/activity/events/types/#pullrequestevent""" + @property + def event_type(self) -> IntegrationWebhookEventType: + return IntegrationWebhookEventType.PULL_REQUEST + def is_anonymous_email(self, email: str) -> bool: return email[-25:] == "@users.noreply.github.com" @@ -591,13 +612,13 @@ class GitHubIntegrationsWebhookEndpoint(Endpoint): "POST": ApiPublishStatus.PRIVATE, } - _handlers: dict[str, Callable[[], Callable[[Any], Any]]] = { + _handlers: dict[str, type[Webhook] | type[InstallationEventWebhook]] = { "push": PushEventWebhook, "pull_request": PullRequestEventWebhook, "installation": InstallationEventWebhook, } - def get_handler(self, event_type: str) -> Callable[[], Callable[[Any], Any]] | None: + def get_handler(self, event_type: str) -> type[Webhook] | type[InstallationEventWebhook] | None: return self._handlers.get(event_type) def is_valid_signature(self, method: str, body: bytes, secret: str, signature: str) -> bool: @@ -673,5 +694,12 @@ def handle(self, request: HttpRequest) -> HttpResponse: logger.exception("Invalid JSON.") return HttpResponse(status=400) - handler()(event) + event_handler = handler() + + with IntegrationWebhookEvent( + interaction_type=event_handler.event_type, + domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT, + provider_key="github", + ).capture(): + event_handler(event) return HttpResponse(status=204) diff --git a/src/sentry/integrations/github_enterprise/integration.py b/src/sentry/integrations/github_enterprise/integration.py index 31bac578fb1da8..09d28c4ac88c48 100644 --- a/src/sentry/integrations/github_enterprise/integration.py +++ b/src/sentry/integrations/github_enterprise/integration.py @@ -9,7 +9,6 @@ from rest_framework.request import Request from sentry import http -from sentry.identity.github_enterprise import get_user_info from sentry.identity.pipeline import IdentityProviderPipeline from sentry.integrations.base import ( FeatureDescription, @@ -36,6 +35,21 @@ from .client import GitHubEnterpriseApiClient from .repository import GitHubEnterpriseRepositoryProvider + +def get_user_info(url, access_token): + with http.build_session() as session: + resp = session.get( + f"https://{url}/api/v3/user", + headers={ + "Accept": "application/vnd.github.machine-man-preview+json", + "Authorization": f"token {access_token}", + }, + verify=False, + ) + resp.raise_for_status() + return resp.json() + + DESCRIPTION = """ Connect your Sentry organization into your on-premises GitHub Enterprise instances. Take a step towards augmenting your sentry issues with commits from diff --git a/src/sentry/integrations/github_enterprise/webhook.py b/src/sentry/integrations/github_enterprise/webhook.py index 706544f1d9272a..230ab9e069434f 100644 --- a/src/sentry/integrations/github_enterprise/webhook.py +++ b/src/sentry/integrations/github_enterprise/webhook.py @@ -4,8 +4,6 @@ import hmac import logging import re -from collections.abc import Callable -from typing import Any import orjson import sentry_sdk @@ -18,12 +16,15 @@ from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.constants import ObjectStatus +from sentry.integrations.base import IntegrationDomain from sentry.integrations.github.webhook import ( InstallationEventWebhook, PullRequestEventWebhook, PushEventWebhook, + Webhook, get_github_external_id, ) +from sentry.integrations.utils.metrics import IntegrationWebhookEvent from sentry.integrations.utils.scope import clear_tags_and_context from sentry.utils import metrics from sentry.utils.sdk import Scope @@ -127,7 +128,7 @@ class GitHubEnterpriseWebhookBase(Endpoint): authentication_classes = () permission_classes = () - _handlers: dict[str, Callable[[], Callable[[Any], Any]]] = {} + _handlers: dict[str, type[InstallationEventWebhook] | type[Webhook]] = {} # https://developer.github.com/webhooks/ def get_handler(self, event_type): @@ -296,7 +297,14 @@ def handle(self, request: HttpRequest) -> HttpResponse: sentry_sdk.capture_exception(e) return HttpResponse(MALFORMED_SIGNATURE_ERROR, status=400) - handler()(event, host) + event_handler = handler() + with IntegrationWebhookEvent( + interaction_type=event_handler.event_type, + domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT, + provider_key="github-enterprise", + ).capture(): + event_handler(event, host) + return HttpResponse(status=204) diff --git a/src/sentry/integrations/gitlab/issues.py b/src/sentry/integrations/gitlab/issues.py index ac2c8dc48a14b6..4a9e2ce16d7bcd 100644 --- a/src/sentry/integrations/gitlab/issues.py +++ b/src/sentry/integrations/gitlab/issues.py @@ -7,9 +7,6 @@ from django.urls import reverse from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration -from sentry.integrations.source_code_management.metrics import ( - SourceCodeIssueIntegrationInteractionType, -) from sentry.models.group import Group from sentry.shared_integrations.exceptions import ApiError, ApiUnauthorized, IntegrationError from sentry.silo.base import all_silo_function @@ -80,32 +77,31 @@ def get_create_issue_config( ] def create_issue(self, data, **kwargs): - with self.record_event(SourceCodeIssueIntegrationInteractionType.CREATE_ISSUE).capture(): - client = self.get_client() - - project_id = data.get("project") - - if not project_id: - raise IntegrationError("project kwarg must be provided") - - try: - issue = client.create_issue( - project=project_id, - data={"title": data["title"], "description": data["description"]}, - ) - project = client.get_project(project_id) - except ApiError as e: - raise IntegrationError(self.message_from_error(e)) - - project_and_issue_iid = "{}#{}".format(project["path_with_namespace"], issue["iid"]) - return { - "key": project_and_issue_iid, - "title": issue["title"], - "description": issue["description"], - "url": issue["web_url"], - "project": project_id, - "metadata": {"display_name": project_and_issue_iid}, - } + client = self.get_client() + + project_id = data.get("project") + + if not project_id: + raise IntegrationError("project kwarg must be provided") + + try: + issue = client.create_issue( + project=project_id, + data={"title": data["title"], "description": data["description"]}, + ) + project = client.get_project(project_id) + except ApiError as e: + raise IntegrationError(self.message_from_error(e)) + + project_and_issue_iid = "{}#{}".format(project["path_with_namespace"], issue["iid"]) + return { + "key": project_and_issue_iid, + "title": issue["title"], + "description": issue["description"], + "url": issue["web_url"], + "project": project_id, + "metadata": {"display_name": project_and_issue_iid}, + } def after_link_issue(self, external_issue, **kwargs): data = kwargs["data"] diff --git a/src/sentry/integrations/gitlab/search.py b/src/sentry/integrations/gitlab/search.py index 45ef39b78a1496..5423110e0eef40 100644 --- a/src/sentry/integrations/gitlab/search.py +++ b/src/sentry/integrations/gitlab/search.py @@ -6,6 +6,7 @@ from sentry.integrations.gitlab.integration import GitlabIntegration from sentry.integrations.models.integration import Integration from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration +from sentry.integrations.source_code_management.metrics import SCMIntegrationInteractionType from sentry.integrations.source_code_management.search import SourceCodeSearchEndpoint from sentry.shared_integrations.exceptions import ApiError @@ -27,43 +28,51 @@ def installation_class(self): return GitlabIntegration def handle_search_issues(self, installation: T, query: str, repo: str | None) -> Response: - assert repo + with self.record_event( + SCMIntegrationInteractionType.HANDLE_SEARCH_ISSUES + ).capture() as lifecycle: + assert repo - full_query: str | None = query + full_query: str | None = query - try: - iids = [int(query)] - full_query = None - except ValueError: - iids = None + try: + iids = [int(query)] + full_query = None + except ValueError: + iids = None - try: - response = installation.search_issues(query=full_query, project_id=repo, iids=iids) - except ApiError as e: - return Response({"detail": str(e)}, status=400) + try: + response = installation.search_issues(query=full_query, project_id=repo, iids=iids) + except ApiError as e: + lifecycle.record_failure(e) + return Response({"detail": str(e)}, status=400) - assert isinstance(response, list) - return Response( - [ - { - "label": "(#{}) {}".format(i["iid"], i["title"]), - "value": "{}#{}".format(i["project_id"], i["iid"]), - } - for i in response - ] - ) + assert isinstance(response, list) + return Response( + [ + { + "label": "(#{}) {}".format(i["iid"], i["title"]), + "value": "{}#{}".format(i["project_id"], i["iid"]), + } + for i in response + ] + ) def handle_search_repositories( self, integration: Integration, installation: T, query: str ) -> Response: - assert isinstance(installation, self.installation_class) - try: - response = installation.search_projects(query) - except ApiError as e: - return Response({"detail": str(e)}, status=400) - return Response( - [ - {"label": project["name_with_namespace"], "value": project["id"]} - for project in response - ] - ) + with self.record_event( + SCMIntegrationInteractionType.HANDLE_SEARCH_REPOSITORIES + ).capture() as lifecyle: + assert isinstance(installation, self.installation_class) + try: + response = installation.search_projects(query) + except ApiError as e: + lifecyle.record_failure(e) + return Response({"detail": str(e)}, status=400) + return Response( + [ + {"label": project["name_with_namespace"], "value": project["id"]} + for project in response + ] + ) diff --git a/src/sentry/integrations/gitlab/webhooks.py b/src/sentry/integrations/gitlab/webhooks.py index 9d24ce00dc3dc1..6d2f3cfd0f5c5f 100644 --- a/src/sentry/integrations/gitlab/webhooks.py +++ b/src/sentry/integrations/gitlab/webhooks.py @@ -1,6 +1,7 @@ from __future__ import annotations import logging +from abc import ABC, abstractmethod from collections.abc import Mapping from datetime import timezone from typing import Any @@ -16,8 +17,10 @@ from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import Endpoint, region_silo_endpoint +from sentry.integrations.base import IntegrationDomain from sentry.integrations.services.integration import integration_service from sentry.integrations.services.integration.model import RpcIntegration +from sentry.integrations.utils.metrics import IntegrationWebhookEvent, IntegrationWebhookEventType from sentry.integrations.utils.scope import clear_tags_and_context from sentry.models.commit import Commit from sentry.models.commitauthor import CommitAuthor @@ -33,7 +36,13 @@ GITHUB_WEBHOOK_SECRET_INVALID_ERROR = """Gitlab's webhook secret does not match. Refresh token (or re-install the integration) by following this https://docs.sentry.io/organization/integrations/integration-platform/public-integration/#refreshing-tokens.""" -class Webhook: +class Webhook(ABC): + @property + @abstractmethod + def event_type(self) -> IntegrationWebhookEventType: + raise NotImplementedError + + @abstractmethod def __call__( self, integration: RpcIntegration, organization: RpcOrganization, event: Mapping[str, Any] ): @@ -92,6 +101,10 @@ class MergeEventWebhook(Webhook): See https://docs.gitlab.com/ee/user/project/integrations/webhooks.html#merge-request-events """ + @property + def event_type(self) -> IntegrationWebhookEventType: + return IntegrationWebhookEventType.PULL_REQUEST + def __call__( self, integration: RpcIntegration, organization: RpcOrganization, event: Mapping[str, Any] ): @@ -156,6 +169,10 @@ class PushEventWebhook(Webhook): See https://docs.gitlab.com/ee/user/project/integrations/webhooks.html#push-events """ + @property + def event_type(self) -> IntegrationWebhookEventType: + return IntegrationWebhookEventType.PUSH + def __call__( self, integration: RpcIntegration, organization: RpcOrganization, event: Mapping[str, Any] ): @@ -244,7 +261,10 @@ class GitlabWebhookEndpoint(Endpoint, GitlabWebhookMixin): permission_classes = () provider = "gitlab" - _handlers = {"Push Hook": PushEventWebhook, "Merge Request Hook": MergeEventWebhook} + _handlers: dict[str, type[Webhook]] = { + "Push Hook": PushEventWebhook, + "Merge Request Hook": MergeEventWebhook, + } @method_decorator(csrf_exempt) def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse: @@ -326,5 +346,13 @@ def post(self, request: HttpRequest) -> HttpResponse: ) if org_context: organization = org_context.organization - handler()(integration, organization, event) + event_handler = handler() + + with IntegrationWebhookEvent( + interaction_type=event_handler.event_type, + domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT, + provider_key="gitlab", + ).capture(): + event_handler(integration, organization, event) + return HttpResponse(status=204) diff --git a/src/sentry/integrations/jira/utils/api.py b/src/sentry/integrations/jira/utils/api.py index cdc0f2b37e6a51..a3701ad2d7c5a9 100644 --- a/src/sentry/integrations/jira/utils/api.py +++ b/src/sentry/integrations/jira/utils/api.py @@ -13,6 +13,11 @@ from sentry.shared_integrations.exceptions import ApiError from ...mixins.issues import IssueSyncIntegration +from ...project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, + ProjectManagementHaltReason, +) from ..client import JiraCloudClient logger = logging.getLogger(__name__) @@ -75,26 +80,45 @@ def handle_assignee_change( sync_group_assignee_inbound(integration, email, issue_key, assign=True) -def handle_status_change(integration, data): - issue_key = data["issue"]["key"] - status_changed = any(item for item in data["changelog"]["items"] if item["field"] == "status") - log_context = {"issue_key": issue_key, "integration_id": integration.id} - - if not status_changed: - logger.info("jira.handle_status_change.unchanged", extra=log_context) - return - - try: - changelog = next(item for item in data["changelog"]["items"] if item["field"] == "status") - except StopIteration: - logger.info("jira.missing-changelog-status", extra=log_context) - return - - result = integration_service.organization_contexts(integration_id=integration.id) - for oi in result.organization_integrations: - install = integration.get_installation(organization_id=oi.organization_id) - if isinstance(install, IssueSyncIntegration): - install.sync_status_inbound(issue_key, {"changelog": changelog, "issue": data["issue"]}) +# TODO(Gabe): Consolidate this with VSTS's implementation, create DTO for status +# changes. +def handle_status_change(integration: RpcIntegration, data: Mapping[str, Any]) -> None: + with ProjectManagementEvent( + action_type=ProjectManagementActionType.INBOUND_STATUS_SYNC, integration=integration + ).capture() as lifecycle: + issue_key = data["issue"]["key"] + status_changed = any( + item for item in data["changelog"]["items"] if item["field"] == "status" + ) + log_context = {"issue_key": issue_key, "integration_id": integration.id} + + if not status_changed: + logger.info("jira.handle_status_change.unchanged", extra=log_context) + return + + try: + changelog = next( + item for item in data["changelog"]["items"] if item["field"] == "status" + ) + except StopIteration: + lifecycle.record_halt( + ProjectManagementHaltReason.SYNC_INBOUND_MISSING_CHANGELOG_STATUS, extra=log_context + ) + logger.info("jira.missing-changelog-status", extra=log_context) + return + + result = integration_service.organization_contexts(integration_id=integration.id) + for oi in result.organization_integrations: + install = integration.get_installation(organization_id=oi.organization_id) + if isinstance(install, IssueSyncIntegration): + install.sync_status_inbound( + issue_key, {"changelog": changelog, "issue": data["issue"]} + ) + else: + lifecycle.record_halt( + ProjectManagementHaltReason.SYNC_NON_SYNC_INTEGRATION_PROVIDED, + extra=log_context, + ) def handle_jira_api_error(error: ApiError, message: str = "") -> Mapping[str, str] | None: diff --git a/src/sentry/integrations/jira/webhooks/base.py b/src/sentry/integrations/jira/webhooks/base.py index 7676c2aa86ae1d..2df7f73b877169 100644 --- a/src/sentry/integrations/jira/webhooks/base.py +++ b/src/sentry/integrations/jira/webhooks/base.py @@ -37,7 +37,7 @@ class JiraWebhookBase(Endpoint, abc.ABC): def dispatch(self, request: Request, *args, **kwargs) -> Response: return super().dispatch(request, *args, **kwargs) - def handle_exception( + def handle_exception_with_details( self, request: Request, exc: Exception, @@ -108,7 +108,7 @@ def handle_exception( # This will log the error locally, capture the exception and send it to Sentry, and create a # generic 500/Internal Error response - return super().handle_exception(request, exc, handler_context, scope) + return super().handle_exception_with_details(request, exc, handler_context, scope) def get_token(self, request: Request) -> str: try: diff --git a/src/sentry/integrations/jira/webhooks/installed.py b/src/sentry/integrations/jira/webhooks/installed.py index 421ed574fd93c4..7b6b33f13089c6 100644 --- a/src/sentry/integrations/jira/webhooks/installed.py +++ b/src/sentry/integrations/jira/webhooks/installed.py @@ -11,6 +11,9 @@ from sentry.integrations.utils.atlassian_connect import authenticate_asymmetric_jwt, verify_claims from sentry.utils import jwt +from ...base import IntegrationDomain +from ...project_management.metrics import ProjectManagementFailuresReason +from ...utils.metrics import IntegrationPipelineViewEvent, IntegrationPipelineViewType from ..integration import JiraIntegrationProvider from .base import JiraWebhookBase @@ -26,28 +29,34 @@ class JiraSentryInstalledWebhook(JiraWebhookBase): """ def post(self, request: Request, *args, **kwargs) -> Response: - token = self.get_token(request) - - state = request.data - if not state: - return self.respond(status=status.HTTP_400_BAD_REQUEST) - - key_id = jwt.peek_header(token).get("kid") - if key_id: - decoded_claims = authenticate_asymmetric_jwt(token, key_id) - verify_claims(decoded_claims, request.path, request.GET, method="POST") - - data = JiraIntegrationProvider().build_integration(state) - integration = ensure_integration(self.provider, data) - - # Note: Unlike in all other Jira webhooks, we don't call `bind_org_context_from_integration` - # here, because at this point the integration hasn't yet been bound to an organization. The - # best we can do at this point is to record the integration's id. - sentry_sdk.set_tag("integration_id", integration.id) - - # Sync integration metadata from Jira. This must be executed *after* - # the integration has been installed on Jira as the access tokens will - # not work until then. - sync_metadata.apply_async(kwargs={"integration_id": integration.id}, countdown=10) - - return self.respond() + with IntegrationPipelineViewEvent( + interaction_type=IntegrationPipelineViewType.VERIFY_INSTALLATION, + domain=IntegrationDomain.PROJECT_MANAGEMENT, + provider_key=self.provider, + ).capture() as lifecycle: + token = self.get_token(request) + + state = request.data + if not state: + lifecycle.record_failure(ProjectManagementFailuresReason.INSTALLATION_STATE_MISSING) + return self.respond(status=status.HTTP_400_BAD_REQUEST) + + key_id = jwt.peek_header(token).get("kid") + if key_id: + decoded_claims = authenticate_asymmetric_jwt(token, key_id) + verify_claims(decoded_claims, request.path, request.GET, method="POST") + + data = JiraIntegrationProvider().build_integration(state) + integration = ensure_integration(self.provider, data) + + # Note: Unlike in all other Jira webhooks, we don't call `bind_org_context_from_integration` + # here, because at this point the integration hasn't yet been bound to an organization. The + # best we can do at this point is to record the integration's id. + sentry_sdk.set_tag("integration_id", integration.id) + + # Sync integration metadata from Jira. This must be executed *after* + # the integration has been installed on Jira as the access tokens will + # not work until then. + sync_metadata.apply_async(kwargs={"integration_id": integration.id}, countdown=10) + + return self.respond() diff --git a/src/sentry/integrations/jira/webhooks/issue_updated.py b/src/sentry/integrations/jira/webhooks/issue_updated.py index 01b3202da3a789..c78025859d79bd 100644 --- a/src/sentry/integrations/jira/webhooks/issue_updated.py +++ b/src/sentry/integrations/jira/webhooks/issue_updated.py @@ -33,7 +33,7 @@ class JiraIssueUpdatedWebhook(JiraWebhookBase): Webhook hit by Jira whenever an issue is updated in Jira's database. """ - def handle_exception( + def handle_exception_with_details( self, request: Request, exc: Exception, @@ -45,7 +45,7 @@ def handle_exception( if response_option: return self.respond(response_option) - return super().handle_exception(request, exc, handler_context, scope) + return super().handle_exception_with_details(request, exc, handler_context, scope) def post(self, request: Request, *args, **kwargs) -> Response: token = self.get_token(request) diff --git a/src/sentry/integrations/jira_server/integration.py b/src/sentry/integrations/jira_server/integration.py index 66f3ee72e7f5c8..64cf65ab77d0d8 100644 --- a/src/sentry/integrations/jira_server/integration.py +++ b/src/sentry/integrations/jira_server/integration.py @@ -43,6 +43,7 @@ ) from sentry.silo.base import all_silo_function from sentry.users.models.identity import Identity +from sentry.users.models.user import User from sentry.users.services.user import RpcUser from sentry.users.services.user.service import user_service from sentry.utils.hashlib import sha1_text @@ -714,7 +715,7 @@ def get_projects(self, cached=True): return jira_projects @all_silo_function - def get_create_issue_config(self, group: Group | None, user: RpcUser, **kwargs): + def get_create_issue_config(self, group: Group | None, user: RpcUser | User, **kwargs): """ We use the `group` to get three things: organization_slug, project defaults, and default title and description. In the case where we're diff --git a/src/sentry/integrations/messaging/commands.py b/src/sentry/integrations/messaging/commands.py index 32968a56e8ef24..c88cba8cef384c 100644 --- a/src/sentry/integrations/messaging/commands.py +++ b/src/sentry/integrations/messaging/commands.py @@ -9,6 +9,7 @@ MessagingInteractionType, ) from sentry.integrations.messaging.spec import MessagingIntegrationSpec +from sentry.integrations.types import EventLifecycleOutcome, IntegrationResponse @dataclass(frozen=True, eq=True) @@ -101,6 +102,9 @@ def get_all_command_slugs(self) -> Iterable[CommandSlug]: R = TypeVar("R") # response +# Command handler type that receives lifecycle object +CommandHandler = Callable[[CommandInput], IntegrationResponse[R]] + class MessagingIntegrationCommandDispatcher(Generic[R], ABC): """The set of commands handled by one messaging integration.""" @@ -114,9 +118,17 @@ def integration_spec(self) -> MessagingIntegrationSpec: @abstractmethod def command_handlers( self, - ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], R]]]: + ) -> Iterable[tuple[MessagingIntegrationCommand, CommandHandler[R]]]: + """Return list of (command, handler) tuples. + + Each handler receives (command_input) and returns IntegrationResponse[R]. + """ raise NotImplementedError + """ + Handlers for bot commands which should wrap the EventLifecycle context. + """ + def get_event(self, command: MessagingIntegrationCommand) -> MessagingInteractionEvent: return MessagingInteractionEvent( interaction_type=command.interaction_type, spec=self.integration_spec @@ -127,7 +139,7 @@ def dispatch(self, cmd_input: CommandInput) -> R: class CandidateHandler: command: MessagingIntegrationCommand slug: CommandSlug - callback: Callable[[CommandInput], R] + callback: CommandHandler[R] def parsing_order(self) -> int: # Sort by descending length of arg tokens. If one slug is a prefix of @@ -145,7 +157,16 @@ def parsing_order(self) -> int: for handler in candidate_handlers: if handler.slug.does_match(cmd_input): arg_input = cmd_input.adjust(handler.slug) - with self.get_event(handler.command).capture(assume_success=False): - return handler.callback(arg_input) + event = self.get_event(handler.command) + with event.capture(assume_success=False) as lifecycle: + response = handler.callback(arg_input) + # Record the appropriate lifecycle event based on the response + if response.interaction_result == EventLifecycleOutcome.HALTED: + lifecycle.record_halt(response.outcome_reason, response.context_data) + elif response.interaction_result == EventLifecycleOutcome.FAILURE: + lifecycle.record_failure(response.outcome_reason, response.context_data) + else: + lifecycle.record_success() + return response.response raise CommandNotMatchedError(f"{cmd_input=!r}", cmd_input) diff --git a/src/sentry/integrations/messaging/metrics.py b/src/sentry/integrations/messaging/metrics.py index 76978dfd12decf..4056ae665641e7 100644 --- a/src/sentry/integrations/messaging/metrics.py +++ b/src/sentry/integrations/messaging/metrics.py @@ -1,6 +1,6 @@ from collections.abc import Mapping from dataclasses import dataclass -from enum import Enum +from enum import StrEnum from typing import Any from sentry.integrations.base import IntegrationDomain @@ -12,7 +12,7 @@ from sentry.users.services.user import RpcUser -class MessagingInteractionType(Enum): +class MessagingInteractionType(StrEnum): """A way in which a user can interact with Sentry through a messaging app.""" # Direct interactions with the user @@ -34,6 +34,7 @@ class MessagingInteractionType(Enum): UNRESOLVE = "UNRESOLVE" IGNORE = "IGNORE" MARK_ONGOING = "MARK_ONGOING" + VIEW_SUBMISSION = "VIEW_SUBMISSION" # Automatic behaviors UNFURL_ISSUES = "UNFURL_ISSUES" @@ -42,9 +43,6 @@ class MessagingInteractionType(Enum): GET_PARENT_NOTIFICATION = "GET_PARENT_NOTIFICATION" - def __str__(self) -> str: - return self.value.lower() - @dataclass class MessagingInteractionEvent(IntegrationEventLifecycleMetric): @@ -71,3 +69,31 @@ def get_extras(self) -> Mapping[str, Any]: "user_id": (self.user.id if self.user else None), "organization_id": (self.organization.id if self.organization else None), } + + +class MessageCommandHaltReason(StrEnum): + """Common reasons why a messaging command may halt without success/failure.""" + + # Identity Linking + ALREADY_LINKED = "already_linked" + NOT_LINKED = "not_linked" + + # Team Linking + LINK_FROM_CHANNEL = "link_from_channel" + LINK_USER_FIRST = "link_user_first" + CHANNEL_ALREADY_LINKED = "channel_already_linked" + TEAM_NOT_LINKED = "team_not_linked" + INSUFFICIENT_ROLE = "insufficient_role" + + +class MessageCommandFailureReason(StrEnum): + """Common reasons why a messaging command may fail.""" + + MISSING_DATA = "missing_data" + INVALID_STATE = "invalid_state" + + +class MessageInteractionFailureReason(StrEnum): + """Common reasons why a messaging interaction may fail.""" + + MISSING_ACTION = "missing_action" diff --git a/src/sentry/integrations/messaging/spec.py b/src/sentry/integrations/messaging/spec.py index 8dfd20a14422a1..b4939159012295 100644 --- a/src/sentry/integrations/messaging/spec.py +++ b/src/sentry/integrations/messaging/spec.py @@ -11,8 +11,8 @@ from sentry.incidents.models.alert_rule import ActionHandlerFactory, AlertRuleTriggerAction from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.integrations.base import IntegrationProvider -from sentry.models.notificationaction import ActionService, ActionTarget from sentry.models.project import Project +from sentry.notifications.models.notificationaction import ActionService, ActionTarget from sentry.rules import rules from sentry.rules.actions import IntegrationEventAction diff --git a/src/sentry/integrations/mixins/issues.py b/src/sentry/integrations/mixins/issues.py index b59ffcf8bf9171..93d9579dea603b 100644 --- a/src/sentry/integrations/mixins/issues.py +++ b/src/sentry/integrations/mixins/issues.py @@ -200,7 +200,7 @@ def get_persisted_user_default_config_fields(self): """ return [] - def store_issue_last_defaults(self, project: Project, user: RpcUser, data): + def store_issue_last_defaults(self, project: Project, user: RpcUser | User, data): """ Stores the last used field defaults on a per-project basis. This accepts a dict of values that will be filtered to keys returned by diff --git a/src/sentry/integrations/msteams/spec.py b/src/sentry/integrations/msteams/spec.py index b617599c853100..07d7d75b527fd7 100644 --- a/src/sentry/integrations/msteams/spec.py +++ b/src/sentry/integrations/msteams/spec.py @@ -6,7 +6,7 @@ MessagingIdentityLinkViewSet, MessagingIntegrationSpec, ) -from sentry.models.notificationaction import ActionService +from sentry.notifications.models.notificationaction import ActionService from sentry.rules.actions import IntegrationEventAction PROVIDER = "msteams" diff --git a/src/sentry/integrations/msteams/webhook.py b/src/sentry/integrations/msteams/webhook.py index b5b95e42cdb6b6..c47087573fe4dc 100644 --- a/src/sentry/integrations/msteams/webhook.py +++ b/src/sentry/integrations/msteams/webhook.py @@ -24,12 +24,14 @@ from sentry.identity.services.identity.model import RpcIdentity from sentry.integrations.messaging import commands from sentry.integrations.messaging.commands import ( + CommandHandler, CommandInput, CommandNotMatchedError, MessagingIntegrationCommand, MessagingIntegrationCommandDispatcher, ) from sentry.integrations.messaging.metrics import ( + MessageCommandHaltReason, MessagingInteractionEvent, MessagingInteractionType, ) @@ -37,6 +39,7 @@ from sentry.integrations.msteams import parsing from sentry.integrations.msteams.spec import PROVIDER, MsTeamsMessagingSpec from sentry.integrations.services.integration import integration_service +from sentry.integrations.types import EventLifecycleOutcome, IntegrationResponse from sentry.models.activity import ActivityIntegration from sentry.models.apikey import ApiKey from sentry.models.group import Group @@ -652,26 +655,49 @@ def conversation_id(self) -> str: def teams_user_id(self) -> str: return self.data["from"]["id"] - @property - def command_handlers( - self, - ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], AdaptiveCard]]]: - yield commands.HELP, (lambda _: build_help_command_card()) - yield commands.LINK_IDENTITY, self.link_identity - yield commands.UNLINK_IDENTITY, self.unlink_identity + def help_handler(self, input: CommandInput) -> IntegrationResponse[AdaptiveCard]: + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=build_help_command_card(), + ) - def link_identity(self, _: CommandInput) -> AdaptiveCard: + def link_user_handler(self, input: CommandInput) -> IntegrationResponse[AdaptiveCard]: linked_identity = identity_service.get_identity( filter={"identity_ext_id": self.teams_user_id} ) has_linked_identity = linked_identity is not None + if has_linked_identity: - return build_already_linked_identity_command_card() + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=build_already_linked_identity_command_card(), + outcome_reason=str(MessageCommandHaltReason.ALREADY_LINKED), + context_data={ + "user_id": self.teams_user_id, + "identity_id": linked_identity.id if linked_identity else None, + }, + ) else: - return build_link_identity_command_card() + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=build_link_identity_command_card(), + ) - def unlink_identity(self, _: CommandInput) -> AdaptiveCard: + def unlink_user_handler(self, input: CommandInput) -> IntegrationResponse[AdaptiveCard]: unlink_url = build_unlinking_url( self.conversation_id, self.data["serviceUrl"], self.teams_user_id ) - return build_unlink_identity_card(unlink_url) + # TODO: check if the user is already unlinked + return IntegrationResponse( + response=build_unlink_identity_card(unlink_url), + interaction_result=EventLifecycleOutcome.SUCCESS, + ) + + @property + def command_handlers( + self, + ) -> Iterable[tuple[MessagingIntegrationCommand, CommandHandler[AdaptiveCard]]]: + + yield commands.HELP, self.help_handler + yield commands.LINK_IDENTITY, self.link_user_handler + yield commands.UNLINK_IDENTITY, self.unlink_user_handler diff --git a/src/sentry/integrations/on_call/metrics.py b/src/sentry/integrations/on_call/metrics.py index 76f1b203c203b8..11f61a2ae666a4 100644 --- a/src/sentry/integrations/on_call/metrics.py +++ b/src/sentry/integrations/on_call/metrics.py @@ -1,4 +1,4 @@ -from enum import Enum +from enum import Enum, StrEnum from attr import dataclass @@ -56,3 +56,13 @@ def get_integration_name(self) -> str: def get_interaction_type(self) -> str: return str(self.interaction_type) + + +class OnCallIntegrationsHaltReason(StrEnum): + """ + Reasons why on on call integration method may halt without success/failure. + """ + + INVALID_TEAM = "invalid_team" + INVALID_SERVICE = "invalid_service" + INVALID_KEY = "invalid_key" diff --git a/src/sentry/integrations/on_call/spec.py b/src/sentry/integrations/on_call/spec.py index 130c537976e8d5..fd5cdef97deab0 100644 --- a/src/sentry/integrations/on_call/spec.py +++ b/src/sentry/integrations/on_call/spec.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod -from sentry.models.notificationaction import ActionService +from sentry.notifications.models.notificationaction import ActionService class OnCallSpec(ABC): diff --git a/src/sentry/integrations/opsgenie/actions/form.py b/src/sentry/integrations/opsgenie/actions/form.py index a6a29d9d208145..05bbb4bf6797e4 100644 --- a/src/sentry/integrations/opsgenie/actions/form.py +++ b/src/sentry/integrations/opsgenie/actions/form.py @@ -6,7 +6,7 @@ from django import forms from django.utils.translation import gettext_lazy as _ -from sentry.integrations.on_call.metrics import OnCallInteractionType +from sentry.integrations.on_call.metrics import OnCallIntegrationsHaltReason, OnCallInteractionType from sentry.integrations.opsgenie.metrics import record_event from sentry.integrations.opsgenie.utils import get_team from sentry.integrations.services.integration import integration_service @@ -65,7 +65,7 @@ def _get_team_status( return VALID_TEAM def _validate_team(self, team_id: str | None, integration_id: int | None) -> None: - with record_event(OnCallInteractionType.VERIFY_TEAM).capture(): + with record_event(OnCallInteractionType.VERIFY_TEAM).capture() as lifecyle: params = { "account": dict(self.fields["account"].choices).get(integration_id), "team": dict(self.fields["team"].choices).get(team_id), @@ -78,6 +78,7 @@ def _validate_team(self, team_id: str | None, integration_id: int | None) -> Non organization_id=self.org_id, ) if integration is None or org_integration is None: + lifecyle.record_halt(OnCallIntegrationsHaltReason.INVALID_TEAM) raise forms.ValidationError( _("The Opsgenie integration does not exist."), code="invalid_integration", @@ -86,6 +87,7 @@ def _validate_team(self, team_id: str | None, integration_id: int | None) -> Non team_status = self._get_team_status(team_id=team_id, org_integration=org_integration) if team_status == INVALID_TEAM: + lifecyle.record_halt(OnCallIntegrationsHaltReason.INVALID_TEAM) raise forms.ValidationError( _('The team "%(team)s" does not belong to the %(account)s Opsgenie account.'), code="invalid_team", diff --git a/src/sentry/integrations/opsgenie/client.py b/src/sentry/integrations/opsgenie/client.py index a7bd3755418b10..b86df081bb51e4 100644 --- a/src/sentry/integrations/opsgenie/client.py +++ b/src/sentry/integrations/opsgenie/client.py @@ -107,12 +107,12 @@ def send_notification( notification_uuid=notification_uuid, ) else: - # if we're acknowledging the alert—meaning that the Sentry alert was resolved + # if we're closing the alert—meaning that the Sentry alert was resolved if data.get("identifier"): interaction_type = OnCallInteractionType.RESOLVE alias = data["identifier"] resp = self.post( - f"/alerts/{alias}/acknowledge", + f"/alerts/{alias}/close", data={}, params={"identifierType": "alias"}, headers=headers, diff --git a/src/sentry/integrations/opsgenie/integration.py b/src/sentry/integrations/opsgenie/integration.py index 8842f608956130..77e968740449cc 100644 --- a/src/sentry/integrations/opsgenie/integration.py +++ b/src/sentry/integrations/opsgenie/integration.py @@ -20,7 +20,7 @@ ) from sentry.integrations.models.integration import Integration from sentry.integrations.models.organization_integration import OrganizationIntegration -from sentry.integrations.on_call.metrics import OnCallInteractionType +from sentry.integrations.on_call.metrics import OnCallIntegrationsHaltReason, OnCallInteractionType from sentry.integrations.opsgenie.metrics import record_event from sentry.integrations.opsgenie.tasks import migrate_opsgenie_plugin from sentry.organizations.services.organization import RpcOrganizationSummary @@ -183,7 +183,7 @@ def update_organization_config(self, data: MutableMapping[str, Any]) -> None: team["id"] = str(self.org_integration.id) + "-" + team["team"] invalid_keys = [] - with record_event(OnCallInteractionType.VERIFY_KEYS).capture(): + with record_event(OnCallInteractionType.VERIFY_KEYS).capture() as lifecycle: for team in teams: # skip if team, key pair already exist in config if (team["team"], team["integration_key"]) in existing_team_key_pairs: @@ -213,6 +213,10 @@ def update_organization_config(self, data: MutableMapping[str, Any]) -> None: raise if invalid_keys: + lifecycle.record_halt( + OnCallIntegrationsHaltReason.INVALID_KEY, + extra={"invalid_keys": invalid_keys, "integration_id": integration.id}, + ) raise ApiUnauthorized(f"Invalid integration key: {str(invalid_keys)}") return super().update_organization_config(data) diff --git a/src/sentry/integrations/pagerduty/actions/form.py b/src/sentry/integrations/pagerduty/actions/form.py index 3ac96179b54794..a6f468f7539d40 100644 --- a/src/sentry/integrations/pagerduty/actions/form.py +++ b/src/sentry/integrations/pagerduty/actions/form.py @@ -6,7 +6,7 @@ from django import forms from django.utils.translation import gettext_lazy as _ -from sentry.integrations.on_call.metrics import OnCallInteractionType +from sentry.integrations.on_call.metrics import OnCallIntegrationsHaltReason, OnCallInteractionType from sentry.integrations.pagerduty.metrics import record_event from sentry.integrations.services.integration import integration_service from sentry.integrations.types import ExternalProviders @@ -47,7 +47,7 @@ def __init__(self, *args, **kwargs): self.fields["service"].widget.choices = self.fields["service"].choices def _validate_service(self, service_id: int, integration_id: int) -> None: - with record_event(OnCallInteractionType.VALIDATE_SERVICE).capture(): + with record_event(OnCallInteractionType.VALIDATE_SERVICE).capture() as lifecycle: params = { "account": dict(self.fields["account"].choices).get(integration_id), "service": dict(self.fields["service"].choices).get(service_id), @@ -66,6 +66,7 @@ def _validate_service(self, service_id: int, integration_id: int) -> None: ): # We need to make sure that the service actually belongs to that integration, # meaning that it belongs under the appropriate account in PagerDuty. + lifecycle.record_halt(OnCallIntegrationsHaltReason.INVALID_SERVICE) raise forms.ValidationError( _( 'The service "%(service)s" has not been granted access in the %(account)s Pagerduty account.' diff --git a/src/sentry/integrations/project_management/__init__.py b/src/sentry/integrations/project_management/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/src/sentry/integrations/project_management/metrics.py b/src/sentry/integrations/project_management/metrics.py new file mode 100644 index 00000000000000..782bb9c2f4021d --- /dev/null +++ b/src/sentry/integrations/project_management/metrics.py @@ -0,0 +1,45 @@ +from dataclasses import dataclass +from enum import StrEnum + +from sentry.integrations.base import IntegrationDomain +from sentry.integrations.models import Integration +from sentry.integrations.services.integration import RpcIntegration +from sentry.integrations.utils.metrics import IntegrationEventLifecycleMetric + + +class ProjectManagementActionType(StrEnum): + CREATE_EXTERNAL_ISSUE = "create_external_issue" + OUTBOUND_ASSIGNMENT_SYNC = "outbound_assignment_sync" + INBOUND_ASSIGNMENT_SYNC = "inbound_assignment_sync" + COMMENT_SYNC = "comment_sync" + OUTBOUND_STATUS_SYNC = "outbound_status_sync" + INBOUND_STATUS_SYNC = "inbound_status_sync" + LINK_EXTERNAL_ISSUE = "link_external_issue" + + def __str__(self): + return self.value.lower() + + +class ProjectManagementHaltReason(StrEnum): + SYNC_INBOUND_ASSIGNEE_NOT_FOUND = "inbound-assignee-not-found" + SYNC_NON_SYNC_INTEGRATION_PROVIDED = "sync-non-sync-integration-provided" + SYNC_INBOUND_MISSING_CHANGELOG_STATUS = "missing-changelog-status" + + +class ProjectManagementFailuresReason(StrEnum): + INSTALLATION_STATE_MISSING = "installation-state-missing" + + +@dataclass +class ProjectManagementEvent(IntegrationEventLifecycleMetric): + action_type: ProjectManagementActionType + integration: Integration | RpcIntegration + + def get_integration_name(self) -> str: + return self.integration.provider + + def get_integration_domain(self) -> IntegrationDomain: + return IntegrationDomain.PROJECT_MANAGEMENT + + def get_interaction_type(self) -> str: + return str(self.action_type) diff --git a/src/sentry/integrations/repository/issue_alert.py b/src/sentry/integrations/repository/issue_alert.py index 0160bbf7328e95..bc0d00b212b4f1 100644 --- a/src/sentry/integrations/repository/issue_alert.py +++ b/src/sentry/integrations/repository/issue_alert.py @@ -11,8 +11,8 @@ BaseNotificationMessage, NotificationMessageValidationError, ) -from sentry.models.notificationmessage import NotificationMessage from sentry.models.rulefirehistory import RuleFireHistory +from sentry.notifications.models.notificationmessage import NotificationMessage _default_logger: Logger = getLogger(__name__) diff --git a/src/sentry/integrations/repository/metric_alert.py b/src/sentry/integrations/repository/metric_alert.py index 70b3c1dfe5f8de..2213b3d07755b9 100644 --- a/src/sentry/integrations/repository/metric_alert.py +++ b/src/sentry/integrations/repository/metric_alert.py @@ -10,7 +10,7 @@ BaseNotificationMessage, NotificationMessageValidationError, ) -from sentry.models.notificationmessage import NotificationMessage +from sentry.notifications.models.notificationmessage import NotificationMessage _default_logger: Logger = getLogger(__name__) diff --git a/src/sentry/integrations/slack/notifications.py b/src/sentry/integrations/slack/notifications.py index 50cfb9e9d62b6e..16859e66ed41a7 100644 --- a/src/sentry/integrations/slack/notifications.py +++ b/src/sentry/integrations/slack/notifications.py @@ -19,6 +19,7 @@ from sentry.notifications.notifications.base import BaseNotification from sentry.notifications.notify import register_notification_provider from sentry.types.actor import Actor +from sentry.users.models.user import User from sentry.utils import metrics logger = logging.getLogger("sentry.notifications") @@ -46,7 +47,7 @@ def send_message(self, channel_id: str, message: str) -> None: @register_notification_provider(ExternalProviders.SLACK) def send_notification_as_slack( notification: BaseNotification, - recipients: Iterable[Actor], + recipients: Iterable[Actor | User], shared_context: Mapping[str, Any], extra_context_by_actor: Mapping[Actor, Mapping[str, Any]] | None, ) -> None: diff --git a/src/sentry/integrations/slack/spec.py b/src/sentry/integrations/slack/spec.py index 0b41fbf48c5f4f..59441767cd258a 100644 --- a/src/sentry/integrations/slack/spec.py +++ b/src/sentry/integrations/slack/spec.py @@ -6,7 +6,7 @@ MessagingIdentityLinkViewSet, MessagingIntegrationSpec, ) -from sentry.models.notificationaction import ActionService +from sentry.notifications.models.notificationaction import ActionService from sentry.rules.actions import IntegrationEventAction diff --git a/src/sentry/integrations/slack/utils/notifications.py b/src/sentry/integrations/slack/utils/notifications.py index 5535275a3a4a09..2fce90fc3443db 100644 --- a/src/sentry/integrations/slack/utils/notifications.py +++ b/src/sentry/integrations/slack/utils/notifications.py @@ -140,6 +140,8 @@ def send_incident_alert_notification( "incident_id": incident.id, "incident_status": new_status, "attachments": attachments, + "channel_id": channel, + "channel_name": action.target_display, } _logger.info("slack.metric_alert.error", exc_info=True, extra=log_params) metrics.incr( diff --git a/src/sentry/integrations/slack/webhooks/action.py b/src/sentry/integrations/slack/webhooks/action.py index f97a632a8b69b0..a4f74148431940 100644 --- a/src/sentry/integrations/slack/webhooks/action.py +++ b/src/sentry/integrations/slack/webhooks/action.py @@ -26,6 +26,7 @@ from sentry.auth.access import from_member from sentry.exceptions import UnableToAcceptMemberInvitationException from sentry.integrations.messaging.metrics import ( + MessageInteractionFailureReason, MessagingInteractionEvent, MessagingInteractionType, ) @@ -247,6 +248,17 @@ def _unpack_error_text(validation_error: serializers.ValidationError) -> str: return element detail = element + def record_event( + self, interaction_type: MessagingInteractionType, group: Group, request: Request + ) -> MessagingInteractionEvent: + user = request.user + return MessagingInteractionEvent( + interaction_type, + SlackMessagingSpec(), + user=(user if isinstance(user, User) else None), + organization=(group.project.organization if group else None), + ) + def validation_error( self, slack_request: SlackActionRequest, @@ -364,72 +376,85 @@ def _handle_group_actions( if slack_request.type == "view_submission": # TODO: if we use modals for something other than resolve and archive, this will need to be more specific - - # Masquerade a status action - selection = None - view = slack_request.data.get("view") - if view: - state = view.get("state") - if state: - values = state.get("values") - if values: - for value in values: - for val in values[value]: - selection = values[value][val]["selected_option"]["value"] - if selection: - break - - if not selection: - return self.respond() - - status_action = MessageAction(name="status", value=selection) - - try: - self.on_status(request, identity_user, group, status_action) - except client.ApiError as error: - return self.api_error(slack_request, group, identity_user, error, "status_dialog") - - view = View(**slack_request.data["view"]) - private_metadata = orjson.loads(view.private_metadata) - original_tags_from_request = set(private_metadata.get("tags", {})) - - blocks = SlackIssuesMessageBuilder( - group, - identity=identity, - actions=[status_action], - tags=original_tags_from_request, - rules=[rule] if rule else None, - issue_details=True, - skip_fallback=True, - ).build() - - # use the original response_url to update the link attachment - try: - webhook_client = WebhookClient(private_metadata["orig_response_url"]) - webhook_client.send( - blocks=blocks.get("blocks"), delete_original=False, replace_original=True - ) - metrics.incr( - SLACK_WEBHOOK_GROUP_ACTIONS_SUCCESS_DATADOG_METRIC, - sample_rate=1.0, - tags={"type": "submit_modal"}, - ) - except SlackApiError as e: - metrics.incr( - SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC, - sample_rate=1.0, - tags={"type": "submit_modal"}, - ) - _logger.exception( - "slack.webhook.view_submission.response-error", - extra={ - "error": str(e), - "integration_id": slack_request.integration.id, - "organization_id": group.project.organization_id, - }, + with self.record_event( + MessagingInteractionType.VIEW_SUBMISSION, group, request + ).capture() as lifecycle: + + # Masquerade a status action + selection = None + view = slack_request.data.get("view") + if view: + state = view.get("state") + if state: + values = state.get("values") + if values: + for value in values: + for val in values[value]: + selection = values[value][val]["selected_option"]["value"] + if selection: + break + + if not selection: + lifecycle.record_failure(MessageInteractionFailureReason.MISSING_ACTION) + return self.respond() + + lifecycle.add_extra( + "selection", + selection, ) - return self.respond() + status_action = MessageAction(name="status", value=selection) + + try: + self.on_status(request, identity_user, group, status_action) + except client.ApiError as error: + lifecycle.record_failure(error) + return self.api_error( + slack_request, group, identity_user, error, "status_dialog" + ) + + view = View(**slack_request.data["view"]) + private_metadata = orjson.loads(view.private_metadata) + original_tags_from_request = set(private_metadata.get("tags", {})) + + blocks = SlackIssuesMessageBuilder( + group, + identity=identity, + actions=[status_action], + tags=original_tags_from_request, + rules=[rule] if rule else None, + issue_details=True, + skip_fallback=True, + ).build() + + # use the original response_url to update the link attachment + try: + webhook_client = WebhookClient(private_metadata["orig_response_url"]) + webhook_client.send( + blocks=blocks.get("blocks"), delete_original=False, replace_original=True + ) + metrics.incr( + SLACK_WEBHOOK_GROUP_ACTIONS_SUCCESS_DATADOG_METRIC, + sample_rate=1.0, + tags={"type": "submit_modal"}, + ) + except SlackApiError as e: + lifecycle.record_failure(e) + metrics.incr( + SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC, + sample_rate=1.0, + tags={"type": "submit_modal"}, + ) + _logger.exception( + "slack.webhook.view_submission.response-error", + extra={ + "error": str(e), + "integration_id": slack_request.integration.id, + "organization_id": group.project.organization_id, + }, + ) + + return self.respond() # Usually we'll want to respond with the updated attachment including # the list of actions taken. However, when opening a dialog we do not @@ -437,32 +462,31 @@ def _handle_group_actions( # response_url later to update it. defer_attachment_update = False - def record_event(interaction_type: MessagingInteractionType) -> MessagingInteractionEvent: - user = request.user - return MessagingInteractionEvent( - interaction_type, - SlackMessagingSpec(), - user=(user if isinstance(user, User) else None), - organization=(group.project.organization if group else None), - ) - # Handle interaction actions for action in action_list: try: if action.name in ("status", "unresolved:ongoing"): - with record_event(MessagingInteractionType.STATUS).capture(): + with self.record_event( + MessagingInteractionType.STATUS, group, request + ).capture(): self.on_status(request, identity_user, group, action) elif ( action.name == "assign" ): # TODO: remove this as it is replaced by the options-load endpoint - with record_event(MessagingInteractionType.ASSIGN).capture(): + with self.record_event( + MessagingInteractionType.ASSIGN, group, request + ).capture(): self.on_assign(request, identity_user, group, action) elif action.name == "resolve_dialog": - with record_event(MessagingInteractionType.RESOLVE_DIALOG).capture(): + with self.record_event( + MessagingInteractionType.RESOLVE_DIALOG, group, request + ).capture(): _ResolveDialog().open_dialog(slack_request, group) defer_attachment_update = True elif action.name == "archive_dialog": - with record_event(MessagingInteractionType.ARCHIVE_DIALOG).capture(): + with self.record_event( + MessagingInteractionType.ARCHIVE_DIALOG, group, request + ).capture(): _ArchiveDialog().open_dialog(slack_request, group) defer_attachment_update = True except client.ApiError as error: diff --git a/src/sentry/integrations/slack/webhooks/base.py b/src/sentry/integrations/slack/webhooks/base.py index b0663cccebb8d6..fba29ed49d3b62 100644 --- a/src/sentry/integrations/slack/webhooks/base.py +++ b/src/sentry/integrations/slack/webhooks/base.py @@ -2,7 +2,7 @@ import abc import logging -from collections.abc import Callable, Iterable +from collections.abc import Iterable from dataclasses import dataclass from rest_framework import status @@ -11,11 +11,13 @@ from sentry.api.base import Endpoint from sentry.integrations.messaging import commands from sentry.integrations.messaging.commands import ( + CommandHandler, CommandInput, CommandNotMatchedError, MessagingIntegrationCommand, MessagingIntegrationCommandDispatcher, ) +from sentry.integrations.messaging.metrics import MessageCommandHaltReason from sentry.integrations.messaging.spec import MessagingIntegrationSpec from sentry.integrations.slack.message_builder.help import SlackHelpMessageBuilder from sentry.integrations.slack.metrics import ( @@ -24,6 +26,7 @@ ) from sentry.integrations.slack.requests.base import SlackDMRequest, SlackRequestError from sentry.integrations.slack.spec import SlackMessagingSpec +from sentry.integrations.types import EventLifecycleOutcome, IntegrationResponse from sentry.utils import metrics LINK_USER_MESSAGE = ( @@ -129,16 +132,106 @@ class SlackCommandDispatcher(MessagingIntegrationCommandDispatcher[Response]): endpoint: SlackDMEndpoint request: SlackDMRequest + # Define mapping of messages to halt reasons + @property + def TEAM_HALT_MAPPINGS(self) -> dict[str, MessageCommandHaltReason]: + from sentry.integrations.slack.webhooks.command import ( + CHANNEL_ALREADY_LINKED_MESSAGE, + INSUFFICIENT_ROLE_MESSAGE, + LINK_FROM_CHANNEL_MESSAGE, + LINK_USER_FIRST_MESSAGE, + TEAM_NOT_LINKED_MESSAGE, + ) + + return { + LINK_FROM_CHANNEL_MESSAGE: MessageCommandHaltReason.LINK_FROM_CHANNEL, + LINK_USER_FIRST_MESSAGE: MessageCommandHaltReason.LINK_USER_FIRST, + INSUFFICIENT_ROLE_MESSAGE: MessageCommandHaltReason.INSUFFICIENT_ROLE, + CHANNEL_ALREADY_LINKED_MESSAGE: MessageCommandHaltReason.CHANNEL_ALREADY_LINKED, + TEAM_NOT_LINKED_MESSAGE: MessageCommandHaltReason.TEAM_NOT_LINKED, + } + @property def integration_spec(self) -> MessagingIntegrationSpec: return SlackMessagingSpec() + def help_handler(self, input: CommandInput) -> IntegrationResponse[Response]: + response = self.endpoint.help(input.cmd_value) + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=response, + ) + + def link_user_handler(self, input: CommandInput) -> IntegrationResponse[Response]: + response = self.endpoint.link_user(self.request) + if ALREADY_LINKED_MESSAGE.format(username=self.request.identity_str) in str(response.data): + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=response, + outcome_reason=str(MessageCommandHaltReason.ALREADY_LINKED), + context_data={ + "email": self.request.identity_str, + }, + ) + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=response, + ) + + def unlink_user_handler(self, input: CommandInput) -> IntegrationResponse[Response]: + response = self.endpoint.unlink_user(self.request) + if NOT_LINKED_MESSAGE in str(response.data): + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=response, + outcome_reason=str(MessageCommandHaltReason.NOT_LINKED), + context_data={ + "email": self.request.identity_str, + }, + ) + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=response, + ) + + def link_team_handler(self, input: CommandInput) -> IntegrationResponse[Response]: + response = self.endpoint.link_team(self.request) + + for message, reason in self.TEAM_HALT_MAPPINGS.items(): + if message in str(response.data): + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=response, + outcome_reason=str(reason), + ) + + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=response, + ) + + def unlink_team_handler(self, input: CommandInput) -> IntegrationResponse[Response]: + response = self.endpoint.unlink_team(self.request) + for message, reason in self.TEAM_HALT_MAPPINGS.items(): + if message in str(response.data): + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=response, + outcome_reason=str(reason), + ) + + return IntegrationResponse( + interaction_result=EventLifecycleOutcome.SUCCESS, + response=response, + ) + @property def command_handlers( self, - ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], Response]]]: - yield commands.HELP, (lambda i: self.endpoint.help(i.cmd_value)) - yield commands.LINK_IDENTITY, (lambda i: self.endpoint.link_user(self.request)) - yield commands.UNLINK_IDENTITY, (lambda i: self.endpoint.unlink_user(self.request)) - yield commands.LINK_TEAM, (lambda i: self.endpoint.link_team(self.request)) - yield commands.UNLINK_TEAM, (lambda i: self.endpoint.unlink_team(self.request)) + ) -> Iterable[tuple[MessagingIntegrationCommand, CommandHandler[Response]]]: + + yield commands.HELP, self.help_handler + yield commands.LINK_IDENTITY, self.link_user_handler + yield commands.UNLINK_IDENTITY, self.unlink_user_handler + yield commands.LINK_TEAM, self.link_team_handler + yield commands.UNLINK_TEAM, self.unlink_team_handler diff --git a/src/sentry/integrations/source_code_management/commit_context.py b/src/sentry/integrations/source_code_management/commit_context.py index 590431ef4d4c70..564a7c5eeb9c35 100644 --- a/src/sentry/integrations/source_code_management/commit_context.py +++ b/src/sentry/integrations/source_code_management/commit_context.py @@ -13,6 +13,11 @@ from sentry import analytics from sentry.auth.exceptions import IdentityNotValid from sentry.integrations.models.repository_project_path_config import RepositoryProjectPathConfig +from sentry.integrations.source_code_management.metrics import ( + CommitContextHaltReason, + CommitContextIntegrationInteractionEvent, + SCMIntegrationInteractionType, +) from sentry.locks import locks from sentry.models.commit import Commit from sentry.models.group import Group @@ -26,6 +31,7 @@ PullRequestCommit, ) from sentry.models.repository import Repository +from sentry.shared_integrations.exceptions import ApiRateLimitedError from sentry.users.models.identity import Identity from sentry.utils import metrics from sentry.utils.cache import cache @@ -94,16 +100,28 @@ def get_blame_for_files( files: list of FileBlameInfo objects """ - try: - client = self.get_client() - except Identity.DoesNotExist: - return [] - try: - response = client.get_blame_for_files(files, extra) - except IdentityNotValid: - return [] - - return response + with CommitContextIntegrationInteractionEvent( + interaction_type=SCMIntegrationInteractionType.GET_BLAME_FOR_FILES, + provider_key=self.integration_name, + ).capture() as lifecycle: + try: + client = self.get_client() + except Identity.DoesNotExist as e: + lifecycle.record_failure(e) + sentry_sdk.capture_exception(e) + return [] + try: + response = client.get_blame_for_files(files, extra) + except IdentityNotValid as e: + lifecycle.record_failure(e) + sentry_sdk.capture_exception(e) + return [] + # Swallow rate limited errors so we don't log them as exceptions + except ApiRateLimitedError as e: + sentry_sdk.capture_exception(e) + lifecycle.record_halt(e) + return [] + return response def get_commit_context_all_frames( self, files: Sequence[SourceLineInfo], extra: Mapping[str, Any] @@ -120,114 +138,137 @@ def queue_comment_task_if_needed( group_owner: GroupOwner, group_id: int, ) -> None: - if not OrganizationOption.objects.get_value( + with CommitContextIntegrationInteractionEvent( + interaction_type=SCMIntegrationInteractionType.QUEUE_COMMENT_TASK, + provider_key=self.integration_name, organization=project.organization, - key="sentry:github_pr_bot", - default=True, - ): - logger.info( - _pr_comment_log(integration_name=self.integration_name, suffix="disabled"), - extra={"organization_id": project.organization_id}, - ) - return - - repo_query = Repository.objects.filter(id=commit.repository_id).order_by("-date_added") - group = Group.objects.get_from_cache(id=group_id) - if not ( - group.level is not logging.INFO and repo_query.exists() - ): # Don't comment on info level issues - logger.info( - _pr_comment_log( - integration_name=self.integration_name, suffix="incorrect_repo_config" - ), - extra={"organization_id": project.organization_id}, - ) - return - - repo: Repository = repo_query.get() - - logger.info( - _pr_comment_log(integration_name=self.integration_name, suffix="queue_comment_check"), - extra={"organization_id": commit.organization_id, "merge_commit_sha": commit.key}, - ) - from sentry.integrations.github.tasks.pr_comment import github_comment_workflow + project=project, + commit=commit, + ).capture() as lifecycle: + if not OrganizationOption.objects.get_value( + organization=project.organization, + key="sentry:github_pr_bot", + default=True, + ): + # TODO: remove logger in favor of the log recorded in lifecycle.record_halt + logger.info( + _pr_comment_log(integration_name=self.integration_name, suffix="disabled"), + extra={"organization_id": project.organization_id}, + ) + lifecycle.record_halt(CommitContextHaltReason.PR_BOT_DISABLED) + return + + repo_query = Repository.objects.filter(id=commit.repository_id).order_by("-date_added") + group = Group.objects.get_from_cache(id=group_id) + if not ( + group.level is not logging.INFO and repo_query.exists() + ): # Don't comment on info level issues + logger.info( + _pr_comment_log( + integration_name=self.integration_name, suffix="incorrect_repo_config" + ), + extra={"organization_id": project.organization_id}, + ) + lifecycle.record_halt(CommitContextHaltReason.INCORRECT_REPO_CONFIG) + return - # client will raise an Exception if the request is not successful - try: - client = self.get_client() - merge_commit_sha = client.get_merge_commit_sha_from_commit( - repo=repo.name, sha=commit.key - ) - except Exception as e: - sentry_sdk.capture_exception(e) - return + repo: Repository = repo_query.get() + lifecycle.add_extra("repository_id", repo.id) - if merge_commit_sha is None: logger.info( _pr_comment_log( - integration_name=self.integration_name, - suffix="queue_comment_workflow.commit_not_in_default_branch", + integration_name=self.integration_name, suffix="queue_comment_check" ), - extra={ - "organization_id": commit.organization_id, - "repository_id": repo.id, - "commit_sha": commit.key, - }, + extra={"organization_id": commit.organization_id, "merge_commit_sha": commit.key}, ) - return + scope = sentry_sdk.Scope.get_isolation_scope() + scope.set_tag("queue_comment_check.merge_commit_sha", commit.key) + scope.set_tag("queue_comment_check.organization_id", commit.organization_id) + from sentry.integrations.github.tasks.pr_comment import github_comment_workflow + + # client will raise an Exception if the request is not successful + try: + client = self.get_client() + merge_commit_sha = client.get_merge_commit_sha_from_commit( + repo=repo.name, sha=commit.key + ) + except Exception as e: + sentry_sdk.capture_exception(e) + lifecycle.record_halt(e) + return - pr_query = PullRequest.objects.filter( - organization_id=commit.organization_id, - repository_id=commit.repository_id, - merge_commit_sha=merge_commit_sha, - ) - if not pr_query.exists(): - logger.info( - _pr_comment_log( - integration_name=self.integration_name, - suffix="queue_comment_workflow.missing_pr", - ), - extra={ - "organization_id": commit.organization_id, - "repository_id": repo.id, - "commit_sha": commit.key, - }, - ) - return + if merge_commit_sha is None: + logger.info( + _pr_comment_log( + integration_name=self.integration_name, + suffix="queue_comment_workflow.commit_not_in_default_branch", + ), + extra={ + "organization_id": commit.organization_id, + "repository_id": repo.id, + "commit_sha": commit.key, + }, + ) + lifecycle.record_halt(CommitContextHaltReason.COMMIT_NOT_IN_DEFAULT_BRANCH) + return - pr = pr_query.first() - assert pr is not None - # need to query explicitly for merged PR comments since we can have multiple comments per PR - merged_pr_comment_query = PullRequestComment.objects.filter( - pull_request_id=pr.id, comment_type=CommentType.MERGED_PR - ) - if pr.date_added >= datetime.now(tz=timezone.utc) - timedelta(days=PR_COMMENT_WINDOW) and ( - not merged_pr_comment_query.exists() - or group_owner.group_id not in merged_pr_comment_query[0].group_ids - ): - lock = locks.get( - _debounce_pr_comment_lock_key(pr.id), duration=10, name="queue_comment_task" + pr_query = PullRequest.objects.filter( + organization_id=commit.organization_id, + repository_id=commit.repository_id, + merge_commit_sha=merge_commit_sha, ) - with lock.acquire(): - cache_key = _debounce_pr_comment_cache_key(pullrequest_id=pr.id) - if cache.get(cache_key) is not None: - return - - # create PR commit row for suspect commit and PR - PullRequestCommit.objects.get_or_create(commit=commit, pull_request=pr) - + if not pr_query.exists(): logger.info( _pr_comment_log( - integration_name=self.integration_name, suffix="queue_comment_workflow" + integration_name=self.integration_name, + suffix="queue_comment_workflow.missing_pr", ), - extra={"pullrequest_id": pr.id, "project_id": group_owner.project_id}, + extra={ + "organization_id": commit.organization_id, + "repository_id": repo.id, + "commit_sha": commit.key, + }, ) + lifecycle.record_halt(CommitContextHaltReason.MISSING_PR) + return + + pr = pr_query.first() + lifecycle.add_extra("pull_request_id", pr.id if pr else None) + assert pr is not None + # need to query explicitly for merged PR comments since we can have multiple comments per PR + merged_pr_comment_query = PullRequestComment.objects.filter( + pull_request_id=pr.id, comment_type=CommentType.MERGED_PR + ) + if pr.date_added >= datetime.now(tz=timezone.utc) - timedelta( + days=PR_COMMENT_WINDOW + ) and ( + not merged_pr_comment_query.exists() + or group_owner.group_id not in merged_pr_comment_query[0].group_ids + ): + lock = locks.get( + _debounce_pr_comment_lock_key(pr.id), duration=10, name="queue_comment_task" + ) + with lock.acquire(): + cache_key = _debounce_pr_comment_cache_key(pullrequest_id=pr.id) + if cache.get(cache_key) is not None: + lifecycle.record_halt(CommitContextHaltReason.ALREADY_QUEUED) + return - cache.set(cache_key, True, PR_COMMENT_TASK_TTL) + # create PR commit row for suspect commit and PR + PullRequestCommit.objects.get_or_create(commit=commit, pull_request=pr) - github_comment_workflow.delay( - pullrequest_id=pr.id, project_id=group_owner.project_id - ) + logger.info( + _pr_comment_log( + integration_name=self.integration_name, suffix="queue_comment_workflow" + ), + extra={"pullrequest_id": pr.id, "project_id": group_owner.project_id}, + ) + + cache.set(cache_key, True, PR_COMMENT_TASK_TTL) + + github_comment_workflow.delay( + pullrequest_id=pr.id, project_id=group_owner.project_id + ) def create_or_update_comment( self, @@ -248,70 +289,81 @@ def create_or_update_comment( ) pr_comment = pr_comment_query[0] if pr_comment_query.exists() else None - # client will raise ApiError if the request is not successful - if pr_comment is None: - resp = client.create_comment( - repo=repo.name, - issue_id=str(pr_key), - data=( - { - "body": comment_body, - "actions": github_copilot_actions, - } - if github_copilot_actions - else {"body": comment_body} - ), - ) + interaction_type = ( + SCMIntegrationInteractionType.CREATE_COMMENT + if not pr_comment + else SCMIntegrationInteractionType.UPDATE_COMMENT + ) - current_time = django_timezone.now() - comment = PullRequestComment.objects.create( - external_id=resp.body["id"], - pull_request_id=pullrequest_id, - created_at=current_time, - updated_at=current_time, - group_ids=issue_list, - comment_type=comment_type, - ) - metrics.incr( - metrics_base.format(integration=self.integration_name, key="comment_created") - ) + with CommitContextIntegrationInteractionEvent( + interaction_type=interaction_type, + provider_key=self.integration_name, + repository=repo, + pull_request_id=pullrequest_id, + ).capture(): + if pr_comment is None: + resp = client.create_comment( + repo=repo.name, + issue_id=str(pr_key), + data=( + { + "body": comment_body, + "actions": github_copilot_actions, + } + if github_copilot_actions + else {"body": comment_body} + ), + ) - if comment_type == CommentType.OPEN_PR: - analytics.record( - "open_pr_comment.created", - comment_id=comment.id, - org_id=repo.organization_id, - pr_id=pullrequest_id, - language=(language or "not found"), + current_time = django_timezone.now() + comment = PullRequestComment.objects.create( + external_id=resp.body["id"], + pull_request_id=pullrequest_id, + created_at=current_time, + updated_at=current_time, + group_ids=issue_list, + comment_type=comment_type, ) - else: - resp = client.update_comment( - repo=repo.name, - issue_id=str(pr_key), - comment_id=pr_comment.external_id, - data=( - { - "body": comment_body, - "actions": github_copilot_actions, - } - if github_copilot_actions - else {"body": comment_body} - ), + metrics.incr( + metrics_base.format(integration=self.integration_name, key="comment_created") + ) + + if comment_type == CommentType.OPEN_PR: + analytics.record( + "open_pr_comment.created", + comment_id=comment.id, + org_id=repo.organization_id, + pr_id=pullrequest_id, + language=(language or "not found"), + ) + else: + resp = client.update_comment( + repo=repo.name, + issue_id=str(pr_key), + comment_id=pr_comment.external_id, + data=( + { + "body": comment_body, + "actions": github_copilot_actions, + } + if github_copilot_actions + else {"body": comment_body} + ), + ) + metrics.incr( + metrics_base.format(integration=self.integration_name, key="comment_updated") + ) + pr_comment.updated_at = django_timezone.now() + pr_comment.group_ids = issue_list + pr_comment.save() + + logger_event = metrics_base.format( + integration=self.integration_name, key="create_or_update_comment" ) - metrics.incr( - metrics_base.format(integration=self.integration_name, key="comment_updated") + logger.info( + logger_event, + extra={"new_comment": pr_comment is None, "pr_key": pr_key, "repo": repo.name}, ) - pr_comment.updated_at = django_timezone.now() - pr_comment.group_ids = issue_list - pr_comment.save() - - logger_event = metrics_base.format( - integration=self.integration_name, key="create_or_update_comment" - ) - logger.info( - logger_event, - extra={"new_comment": pr_comment is None, "pr_key": pr_key, "repo": repo.name}, - ) class CommitContextClient(ABC): diff --git a/src/sentry/integrations/source_code_management/issues.py b/src/sentry/integrations/source_code_management/issues.py index a1de561da34e40..04d7487f1e9090 100644 --- a/src/sentry/integrations/source_code_management/issues.py +++ b/src/sentry/integrations/source_code_management/issues.py @@ -6,8 +6,8 @@ from sentry.integrations.mixins.issues import IssueBasicIntegration from sentry.integrations.source_code_management.metrics import ( - SourceCodeIssueIntegrationInteractionEvent, - SourceCodeIssueIntegrationInteractionType, + SCMIntegrationInteractionEvent, + SCMIntegrationInteractionType, ) from sentry.integrations.source_code_management.repository import BaseRepositoryIntegration from sentry.models.group import Group @@ -15,8 +15,8 @@ class SourceCodeIssueIntegration(IssueBasicIntegration, BaseRepositoryIntegration, ABC): - def record_event(self, event: SourceCodeIssueIntegrationInteractionType): - return SourceCodeIssueIntegrationInteractionEvent( + def record_event(self, event: SCMIntegrationInteractionType): + return SCMIntegrationInteractionEvent( interaction_type=event, provider_key=self.model.provider, organization=self.organization, @@ -27,9 +27,7 @@ def get_repository_choices(self, group: Group | None, params: Mapping[str, Any], """ Returns the default repository and a set/subset of repositories of associated with the installation """ - with self.record_event( - SourceCodeIssueIntegrationInteractionType.GET_REPOSITORY_CHOICES - ).capture(): + with self.record_event(SCMIntegrationInteractionType.GET_REPOSITORY_CHOICES).capture(): try: repos = self.get_repositories() except ApiError: diff --git a/src/sentry/integrations/source_code_management/metrics.py b/src/sentry/integrations/source_code_management/metrics.py index cf359c4f934319..77e1c6bc0b5834 100644 --- a/src/sentry/integrations/source_code_management/metrics.py +++ b/src/sentry/integrations/source_code_management/metrics.py @@ -1,5 +1,5 @@ from collections.abc import Mapping -from enum import Enum +from enum import StrEnum from typing import Any from attr import dataclass @@ -8,41 +8,52 @@ from sentry.integrations.models.organization_integration import OrganizationIntegration from sentry.integrations.services.integration import RpcOrganizationIntegration from sentry.integrations.utils.metrics import IntegrationEventLifecycleMetric +from sentry.models.commit import Commit from sentry.models.organization import Organization +from sentry.models.project import Project +from sentry.models.repository import Repository from sentry.organizations.services.organization import RpcOrganization -class RepositoryIntegrationInteractionType(Enum): +class SCMIntegrationInteractionType(StrEnum): """ - A RepositoryIntegration feature. + SCM integration features """ - GET_STACKTRACE_LINK = "GET_STACKTRACE_LINK" - GET_CODEOWNER_FILE = "GET_CODEOWNER_FILE" - CHECK_FILE = "CHECK_FILE" + # RepositoryIntegration + GET_STACKTRACE_LINK = "get_stacktrace_link" + GET_CODEOWNER_FILE = "get_codeowner_file" + CHECK_FILE = "check_file" - def __str__(self) -> str: - return self.value.lower() + # SourceCodeIssueIntegration (SCM only) + GET_REPOSITORY_CHOICES = "get_repository_choices" + # SourceCodeSearchEndpoint + HANDLE_SEARCH_ISSUES = "handle_search_issues" + HANDLE_SEARCH_REPOSITORIES = "handle_search_repositories" + GET = "get" -class SourceCodeIssueIntegrationInteractionType(Enum): - """ - A SourceCodeIssueIntegration feature. - """ + # CommitContextIntegration + GET_BLAME_FOR_FILES = "get_blame_for_files" + CREATE_COMMENT = "create_comment" + UPDATE_COMMENT = "update_comment" + QUEUE_COMMENT_TASK = "queue_comment_task" - GET_REPOSITORY_CHOICES = "GET_REPOSITORY_CHOICES" - CREATE_ISSUE = "CREATE_ISSUE" - SYNC_STATUS_OUTBOUND = "SYNC_STATUS_OUTBOUND" - SYNC_ASSIGNEE_OUTBOUND = "SYNC_ASSIGNEE_OUTBOUND" + # Tasks + LINK_ALL_REPOS = "link_all_repos" + + # GitHub only + DERIVE_CODEMAPPINGS = "derive_codemappings" + STUDENT_PACK = "student_pack" @dataclass -class RepositoryIntegrationInteractionEvent(IntegrationEventLifecycleMetric): +class SCMIntegrationInteractionEvent(IntegrationEventLifecycleMetric): """ - An instance to be recorded of a RepositoryIntegration feature call. + An instance to be recorded of an SCM integration feature call. """ - interaction_type: RepositoryIntegrationInteractionType + interaction_type: SCMIntegrationInteractionType provider_key: str # Optional attributes to populate extras @@ -66,29 +77,57 @@ def get_extras(self) -> Mapping[str, Any]: @dataclass -class SourceCodeIssueIntegrationInteractionEvent(IntegrationEventLifecycleMetric): +class CommitContextIntegrationInteractionEvent(SCMIntegrationInteractionEvent): """ - An instance to be recorded of a SourceCodeIssueIntegration feature call. + An instance to be recorded of a CommitContextIntegration feature call. """ - interaction_type: SourceCodeIssueIntegrationInteractionType - provider_key: str + project: Project | None = None + commit: Commit | None = None + repository: Repository | None = None + pull_request_id: int | None = None - # Optional attributes to populate extras - organization: Organization | RpcOrganization | None = None - org_integration: OrganizationIntegration | RpcOrganizationIntegration | None = None + def get_extras(self) -> Mapping[str, Any]: + parent_extras = super().get_extras() + return { + **parent_extras, + "project_id": (self.project.id if self.project else None), + "commit_id": (self.commit.id if self.commit else None), + "repository_id": (self.repository.id if self.repository else None), + "pull_request_id": self.pull_request_id, + } - def get_integration_domain(self) -> IntegrationDomain: - return IntegrationDomain.SOURCE_CODE_MANAGEMENT - def get_integration_name(self) -> str: - return self.provider_key +class CommitContextHaltReason(StrEnum): + """Common reasons why a commit context integration may halt without success/failure.""" - def get_interaction_type(self) -> str: - return str(self.interaction_type) + PR_BOT_DISABLED = "pr_bot_disabled" + INCORRECT_REPO_CONFIG = "incorrect_repo_config" + COMMIT_NOT_IN_DEFAULT_BRANCH = "commit_not_in_default_branch" + MISSING_PR = "missing_pr" + ALREADY_QUEUED = "already_queued" - def get_extras(self) -> Mapping[str, Any]: - return { - "organization_id": (self.organization.id if self.organization else None), - "org_integration_id": (self.org_integration.id if self.org_integration else None), - } + +class LinkAllReposHaltReason(StrEnum): + """ + Common reasons why a link all repos task may halt without success/failure. + """ + + MISSING_INTEGRATION = "missing_integration" + MISSING_ORGANIZATION = "missing_organization" + RATE_LIMITED = "rate_limited" + REPOSITORY_NOT_CREATED = "repository_not_created" + + +class SourceCodeSearchEndpointHaltReason(StrEnum): + """ + Reasons why a SourceCodeSearchEndpoint method (handle_search_issues, + handle_search_repositories, or get) may halt without success/failure. + """ + + NO_ISSUE_TRACKER = "no_issue_tracker" + RATE_LIMITED = "rate_limited" + MISSING_REPOSITORY_OR_NO_ACCESS = "missing_repository_or_no_access" + MISSING_INTEGRATION = "missing_integration" + SERIALIZER_ERRORS = "serializer_errors" + MISSING_REPOSITORY_FIELD = "missing_repository_field" diff --git a/src/sentry/integrations/source_code_management/repository.py b/src/sentry/integrations/source_code_management/repository.py index ed1d097a5fbf84..bf737cdfda0062 100644 --- a/src/sentry/integrations/source_code_management/repository.py +++ b/src/sentry/integrations/source_code_management/repository.py @@ -10,8 +10,8 @@ from sentry.integrations.base import IntegrationInstallation from sentry.integrations.services.repository import RpcRepository from sentry.integrations.source_code_management.metrics import ( - RepositoryIntegrationInteractionEvent, - RepositoryIntegrationInteractionType, + SCMIntegrationInteractionEvent, + SCMIntegrationInteractionType, ) from sentry.models.repository import Repository from sentry.shared_integrations.client.base import BaseApiResponseX @@ -97,8 +97,8 @@ def get_unmigratable_repositories(self) -> list[RpcRepository]: """ return [] - def record_event(self, event: RepositoryIntegrationInteractionType): - return RepositoryIntegrationInteractionEvent( + def record_event(self, event: SCMIntegrationInteractionType): + return SCMIntegrationInteractionEvent( interaction_type=event, provider_key=self.integration_name, organization=self.organization, @@ -118,7 +118,7 @@ def check_file(self, repo: Repository, filepath: str, branch: str | None = None) filepath: file from the stacktrace (string) branch: commitsha or default_branch (string) """ - with self.record_event(RepositoryIntegrationInteractionType.CHECK_FILE).capture(): + with self.record_event(SCMIntegrationInteractionType.CHECK_FILE).capture(): filepath = filepath.lstrip("/") try: client = self.get_client() @@ -153,7 +153,7 @@ def get_stacktrace_link( If no file was found return `None`, and re-raise for non-"Not Found" errors, like 403 "Account Suspended". """ - with self.record_event(RepositoryIntegrationInteractionType.GET_STACKTRACE_LINK).capture(): + with self.record_event(SCMIntegrationInteractionType.GET_STACKTRACE_LINK).capture(): scope = sentry_sdk.Scope.get_isolation_scope() scope.set_tag("stacktrace_link.tried_version", False) if version: @@ -182,7 +182,7 @@ def get_codeowner_file( * filepath - full path of the file i.e. CODEOWNERS, .github/CODEOWNERS, docs/CODEOWNERS * raw - the decoded raw contents of the codeowner file """ - with self.record_event(RepositoryIntegrationInteractionType.GET_CODEOWNER_FILE).capture(): + with self.record_event(SCMIntegrationInteractionType.GET_CODEOWNER_FILE).capture(): if self.codeowners_locations is None: raise NotImplementedError("Implement self.codeowners_locations to use this method.") diff --git a/src/sentry/integrations/source_code_management/search.py b/src/sentry/integrations/source_code_management/search.py index d6619aaa287291..c3d90350538c95 100644 --- a/src/sentry/integrations/source_code_management/search.py +++ b/src/sentry/integrations/source_code_management/search.py @@ -13,6 +13,11 @@ from sentry.integrations.api.bases.integration import IntegrationEndpoint from sentry.integrations.models.integration import Integration from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration +from sentry.integrations.source_code_management.metrics import ( + SCMIntegrationInteractionEvent, + SCMIntegrationInteractionType, + SourceCodeSearchEndpointHaltReason, +) from sentry.organizations.services.organization import RpcOrganization T = TypeVar("T", bound=SourceCodeIssueIntegration) @@ -55,6 +60,15 @@ def installation_class( def handle_search_issues(self, installation: T, query: str, repo: str | None) -> Response: raise NotImplementedError + def record_event(self, event: SCMIntegrationInteractionType): + # XXX (mifu67): self.integration_provider is None for the GithubSharedSearchEndpoint, + # which is used by both GitHub and GitHub Enterprise. + provider_name = "github" if self.integration_provider is None else self.integration_provider + return SCMIntegrationInteractionEvent( + interaction_type=event, + provider_key=provider_name, + ) + # not used in VSTS def handle_search_repositories( self, integration: Integration, installation: T, query: str @@ -64,41 +78,50 @@ def handle_search_repositories( def get( self, request: Request, organization: RpcOrganization, integration_id: int, **kwds: Any ) -> Response: - integration_query = Q( - organizationintegration__organization_id=organization.id, id=integration_id - ) - - if self.integration_provider: - integration_query &= Q(provider=self.integration_provider) - try: - integration: Integration = Integration.objects.get(integration_query) - except Integration.DoesNotExist: - return Response(status=404) - - serializer = SourceCodeSearchSerializer(data=request.query_params) - if not serializer.is_valid(): - return self.respond(serializer.errors, status=400) - - field = serializer.validated_data["field"] - query = serializer.validated_data["query"] - - installation = integration.get_installation(organization.id) - if not isinstance(installation, self.installation_class): - raise NotFound(f"Integration by that id is not of type {self.integration_provider}.") - - if field == self.issue_field: - repo = None - - if self.repository_field: # only fetch repository - repo = request.GET.get(self.repository_field) - if repo is None: - return Response( - {"detail": f"{self.repository_field} is a required parameter"}, status=400 - ) - - return self.handle_search_issues(installation, query, repo) - - if self.repository_field and field == self.repository_field: - return self.handle_search_repositories(integration, installation, query) - - return Response({"detail": "Invalid field"}, status=400) + with self.record_event(SCMIntegrationInteractionType.GET).capture() as lifecycle: + integration_query = Q( + organizationintegration__organization_id=organization.id, id=integration_id + ) + + if self.integration_provider: + integration_query &= Q(provider=self.integration_provider) + try: + integration: Integration = Integration.objects.get(integration_query) + except Integration.DoesNotExist: + lifecycle.record_halt(str(SourceCodeSearchEndpointHaltReason.MISSING_INTEGRATION)) + return Response(status=404) + + serializer = SourceCodeSearchSerializer(data=request.query_params) + if not serializer.is_valid(): + lifecycle.record_halt(str(SourceCodeSearchEndpointHaltReason.SERIALIZER_ERRORS)) + return self.respond(serializer.errors, status=400) + + field = serializer.validated_data["field"] + query = serializer.validated_data["query"] + + installation = integration.get_installation(organization.id) + if not isinstance(installation, self.installation_class): + raise NotFound( + f"Integration by that id is not of type {self.integration_provider}." + ) + + if field == self.issue_field: + repo = None + + if self.repository_field: # only fetch repository + repo = request.GET.get(self.repository_field) + if repo is None: + lifecycle.record_halt( + str(SourceCodeSearchEndpointHaltReason.MISSING_REPOSITORY_FIELD) + ) + return Response( + {"detail": f"{self.repository_field} is a required parameter"}, + status=400, + ) + + return self.handle_search_issues(installation, query, repo) + + if self.repository_field and field == self.repository_field: + return self.handle_search_repositories(integration, installation, query) + + return Response({"detail": "Invalid field"}, status=400) diff --git a/src/sentry/integrations/tasks/sync_assignee_outbound.py b/src/sentry/integrations/tasks/sync_assignee_outbound.py index 78b24fe9273a28..749113771cbd15 100644 --- a/src/sentry/integrations/tasks/sync_assignee_outbound.py +++ b/src/sentry/integrations/tasks/sync_assignee_outbound.py @@ -4,6 +4,10 @@ from sentry.constants import ObjectStatus from sentry.integrations.models.external_issue import ExternalIssue from sentry.integrations.models.integration import Integration +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, +) from sentry.integrations.services.assignment_source import AssignmentSource from sentry.integrations.services.integration import integration_service from sentry.models.organization import Organization @@ -48,23 +52,28 @@ def sync_assignee_outbound( return installation = integration.get_installation(organization_id=external_issue.organization_id) - if not ( - hasattr(installation, "should_sync") and hasattr(installation, "sync_assignee_outbound") - ): - return - parsed_assignment_source = ( - AssignmentSource.from_dict(assignment_source_dict) if assignment_source_dict else None - ) - if installation.should_sync("outbound_assignee", parsed_assignment_source): - # Assume unassign if None. - user = user_service.get_user(user_id) if user_id else None - installation.sync_assignee_outbound( - external_issue, user, assign=assign, assignment_source=parsed_assignment_source - ) - analytics.record( - "integration.issue.assignee.synced", - provider=integration.provider, - id=integration.id, - organization_id=external_issue.organization_id, + with ProjectManagementEvent( + action_type=ProjectManagementActionType.OUTBOUND_ASSIGNMENT_SYNC, integration=integration + ).capture() as lifecycle: + lifecycle.add_extra("sync_task", "sync_assignee_outbound") + if not ( + hasattr(installation, "should_sync") and hasattr(installation, "sync_assignee_outbound") + ): + return + + parsed_assignment_source = ( + AssignmentSource.from_dict(assignment_source_dict) if assignment_source_dict else None ) + if installation.should_sync("outbound_assignee", parsed_assignment_source): + # Assume unassign if None. + user = user_service.get_user(user_id) if user_id else None + installation.sync_assignee_outbound( + external_issue, user, assign=assign, assignment_source=parsed_assignment_source + ) + analytics.record( + "integration.issue.assignee.synced", + provider=integration.provider, + id=integration.id, + organization_id=external_issue.organization_id, + ) diff --git a/src/sentry/integrations/tasks/sync_status_inbound.py b/src/sentry/integrations/tasks/sync_status_inbound.py index 729428d5118219..7337a657ced6b1 100644 --- a/src/sentry/integrations/tasks/sync_status_inbound.py +++ b/src/sentry/integrations/tasks/sync_status_inbound.py @@ -243,6 +243,7 @@ def sync_status_inbound( organization_id=organization_id, group_id=group.id, resolution_type="with_third_party_app", + provider=provider.key, issue_type=group.issue_type.slug, issue_category=group.issue_category.name.lower(), ) diff --git a/src/sentry/integrations/tasks/sync_status_outbound.py b/src/sentry/integrations/tasks/sync_status_outbound.py index 7aa1fb3afcc9d9..cf6dfe157a1ad3 100644 --- a/src/sentry/integrations/tasks/sync_status_outbound.py +++ b/src/sentry/integrations/tasks/sync_status_outbound.py @@ -2,6 +2,10 @@ from sentry.constants import ObjectStatus from sentry.integrations.models.external_issue import ExternalIssue from sentry.integrations.models.integration import Integration +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, +) from sentry.integrations.services.integration import integration_service from sentry.models.group import Group, GroupStatus from sentry.silo.base import SiloMode @@ -43,14 +47,19 @@ def sync_status_outbound(group_id: int, external_issue_id: int) -> bool | None: installation = integration.get_installation(organization_id=external_issue.organization_id) if not (hasattr(installation, "should_sync") and hasattr(installation, "sync_status_outbound")): return None - if installation.should_sync("outbound_status"): - installation.sync_status_outbound( - external_issue, group.status == GroupStatus.RESOLVED, group.project_id - ) - analytics.record( - "integration.issue.status.synced", - provider=integration.provider, - id=integration.id, - organization_id=external_issue.organization_id, - ) + + with ProjectManagementEvent( + action_type=ProjectManagementActionType.OUTBOUND_STATUS_SYNC, integration=integration + ).capture() as lifecycle: + lifecycle.add_extra("sync_task", "sync_status_outbound") + if installation.should_sync("outbound_status"): + installation.sync_status_outbound( + external_issue, group.status == GroupStatus.RESOLVED, group.project_id + ) + analytics.record( + "integration.issue.status.synced", + provider=integration.provider, + id=integration.id, + organization_id=external_issue.organization_id, + ) return None diff --git a/src/sentry/integrations/types.py b/src/sentry/integrations/types.py index 6c71f7b516a721..cc3bb0b3324fd2 100644 --- a/src/sentry/integrations/types.py +++ b/src/sentry/integrations/types.py @@ -92,4 +92,5 @@ def __str__(self) -> str: class IntegrationResponse(Generic[T]): interaction_result: EventLifecycleOutcome response: T + outcome_reason: str | Exception | None = None context_data: dict | None = None diff --git a/src/sentry/integrations/utils/metrics.py b/src/sentry/integrations/utils/metrics.py index 1da2b33917ffbb..f28252d9c1db0d 100644 --- a/src/sentry/integrations/utils/metrics.py +++ b/src/sentry/integrations/utils/metrics.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from collections.abc import Mapping from dataclasses import dataclass -from enum import Enum +from enum import StrEnum from types import TracebackType from typing import Any, Self @@ -113,8 +113,12 @@ def add_extra(self, name: str, value: Any) -> None: """ self._extra[name] = value + def add_extras(self, extras: Mapping[str, int | str]) -> None: + """Add multiple values to logged "extra" data.""" + self._extra.update(extras) + def record_event( - self, outcome: EventLifecycleOutcome, exc: BaseException | None = None + self, outcome: EventLifecycleOutcome, outcome_reason: BaseException | str | None = None ) -> None: """Record a starting or halting event. @@ -128,28 +132,35 @@ def record_event( sample_rate = 1.0 metrics.incr(key, tags=tags, sample_rate=sample_rate) + extra = dict(self._extra) + extra.update(tags) + log_params: dict[str, Any] = { + "extra": extra, + } + + if isinstance(outcome_reason, BaseException): + log_params["exc_info"] = outcome_reason + elif isinstance(outcome_reason, str): + extra["outcome_reason"] = outcome_reason + if outcome == EventLifecycleOutcome.FAILURE: - extra = dict(self._extra) - extra.update(tags) - logger.error(key, extra=self._extra, exc_info=exc) + logger.error(key, **log_params) elif outcome == EventLifecycleOutcome.HALTED: - extra = dict(self._extra) - extra.update(tags) - logger.warning(key, extra=self._extra, exc_info=exc) + logger.warning(key, **log_params) @staticmethod def _report_flow_error(message) -> None: logger.error("EventLifecycle flow error: %s", message) def _terminate( - self, new_state: EventLifecycleOutcome, exc: BaseException | None = None + self, new_state: EventLifecycleOutcome, outcome_reason: BaseException | str | None = None ) -> None: if self._state is None: self._report_flow_error("The lifecycle has not yet been entered") if self._state != EventLifecycleOutcome.STARTED: self._report_flow_error("The lifecycle has already been exited") self._state = new_state - self.record_event(new_state, exc) + self.record_event(new_state, outcome_reason) def record_success(self) -> None: """Record that the event halted successfully. @@ -162,7 +173,7 @@ def record_success(self) -> None: self._terminate(EventLifecycleOutcome.SUCCESS) def record_failure( - self, exc: BaseException | None = None, extra: dict[str, Any] | None = None + self, failure_reason: BaseException | str | None = None, extra: dict[str, Any] | None = None ) -> None: """Record that the event halted in failure. Additional data may be passed to be logged. @@ -179,10 +190,10 @@ def record_failure( if extra: self._extra.update(extra) - self._terminate(EventLifecycleOutcome.FAILURE, exc) + self._terminate(EventLifecycleOutcome.FAILURE, failure_reason) def record_halt( - self, exc: BaseException | None = None, extra: dict[str, Any] | None = None + self, halt_reason: BaseException | str | None = None, extra: dict[str, Any] | None = None ) -> None: """Record that the event halted in an ambiguous state. @@ -200,7 +211,7 @@ def record_halt( if extra: self._extra.update(extra) - self._terminate(EventLifecycleOutcome.HALTED, exc) + self._terminate(EventLifecycleOutcome.HALTED, halt_reason) def __enter__(self) -> Self: if self._state is not None: @@ -234,30 +245,27 @@ def __exit__( ) -class IntegrationPipelineViewType(Enum): +class IntegrationPipelineViewType(StrEnum): """A specific step in an integration's pipeline that is not a static page.""" # IdentityProviderPipeline - IDENTITY_LOGIN = "IDENTITY_LOGIN" - IDENTITY_LINK = "IDENTITY_LINK" - TOKEN_EXCHANGE = "TOKEN_EXCHANGE" + IDENTITY_LOGIN = "identity_login" + IDENTITY_LINK = "identity_link" + TOKEN_EXCHANGE = "token_exchange" # GitHub - OAUTH_LOGIN = "OAUTH_LOGIN" - GITHUB_INSTALLATION = "GITHUB_INSTALLATION" + OAUTH_LOGIN = "oauth_loging" + GITHUB_INSTALLATION = "github_installation" # Bitbucket - VERIFY_INSTALLATION = "VERIFY_INSTALLATION" + VERIFY_INSTALLATION = "verify_installation" # Bitbucket Server # OAUTH_LOGIN = "OAUTH_LOGIN" - OAUTH_CALLBACK = "OAUTH_CALLBACK" + OAUTH_CALLBACK = "oauth_callback" # Azure DevOps - ACCOUNT_CONFIG = "ACCOUNT_CONFIG" - - def __str__(self) -> str: - return self.value.lower() + ACCOUNT_CONFIG = "account_config" @dataclass @@ -279,3 +287,31 @@ def get_integration_name(self) -> str: def get_interaction_type(self) -> str: return str(self.interaction_type) + + +class IntegrationWebhookEventType(StrEnum): + INSTALLATION = "installation" + PUSH = "push" + PULL_REQUEST = "pull_request" + INBOUND_SYNC = "inbound_sync" + + +@dataclass +class IntegrationWebhookEvent(IntegrationEventLifecycleMetric): + """An instance to be recorded of a webhook event.""" + + interaction_type: IntegrationWebhookEventType + domain: IntegrationDomain + provider_key: str + + def get_metrics_domain(self) -> str: + return "webhook" + + def get_integration_domain(self) -> IntegrationDomain: + return self.domain + + def get_integration_name(self) -> str: + return self.provider_key + + def get_interaction_type(self) -> str: + return str(self.interaction_type) diff --git a/src/sentry/integrations/utils/sync.py b/src/sentry/integrations/utils/sync.py index a672dc4daee464..86bb58330748ae 100644 --- a/src/sentry/integrations/utils/sync.py +++ b/src/sentry/integrations/utils/sync.py @@ -6,6 +6,11 @@ from sentry import features from sentry.integrations.models.integration import Integration +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, + ProjectManagementHaltReason, +) from sentry.integrations.services.assignment_source import AssignmentSource from sentry.integrations.services.integration import integration_service from sentry.integrations.tasks.sync_assignee_outbound import sync_assignee_outbound @@ -77,48 +82,54 @@ def sync_group_assignee_inbound( logger = logging.getLogger(f"sentry.integrations.{integration.provider}") - orgs_with_sync_enabled = where_should_sync(integration, "inbound_assignee") - affected_groups = Group.objects.get_groups_by_external_issue( - integration, - orgs_with_sync_enabled, - external_issue_key, - ) - log_context = { - "integration_id": integration.id, - "email": email, - "issue_key": external_issue_key, - } - if not affected_groups: - logger.info("no-affected-groups", extra=log_context) - return [] - - if not assign: + with ProjectManagementEvent( + action_type=ProjectManagementActionType.INBOUND_ASSIGNMENT_SYNC, integration=integration + ).capture() as lifecycle: + orgs_with_sync_enabled = where_should_sync(integration, "inbound_assignee") + affected_groups = Group.objects.get_groups_by_external_issue( + integration, + orgs_with_sync_enabled, + external_issue_key, + ) + log_context = { + "integration_id": integration.id, + "email": email, + "issue_key": external_issue_key, + } + if not affected_groups: + logger.info("no-affected-groups", extra=log_context) + return [] + + if not assign: + for group in affected_groups: + GroupAssignee.objects.deassign( + group, + assignment_source=AssignmentSource.from_integration(integration), + ) + + return affected_groups + + users = user_service.get_many_by_email(emails=[email], is_verified=True) + users_by_id = {user.id: user for user in users} + projects_by_user = Project.objects.get_by_users(users) + + groups_assigned = [] for group in affected_groups: - GroupAssignee.objects.deassign( - group, - assignment_source=AssignmentSource.from_integration(integration), - ) - - return affected_groups - - users = user_service.get_many_by_email(emails=[email], is_verified=True) - users_by_id = {user.id: user for user in users} - projects_by_user = Project.objects.get_by_users(users) - - groups_assigned = [] - for group in affected_groups: - user_id = get_user_id(projects_by_user, group) - user = users_by_id.get(user_id) - if user: - GroupAssignee.objects.assign( - group, - user, - assignment_source=AssignmentSource.from_integration(integration), - ) - groups_assigned.append(group) - else: - logger.info("assignee-not-found-inbound", extra=log_context) - return groups_assigned + user_id = get_user_id(projects_by_user, group) + user = users_by_id.get(user_id) + if user: + GroupAssignee.objects.assign( + group, + user, + assignment_source=AssignmentSource.from_integration(integration), + ) + groups_assigned.append(group) + else: + lifecycle.record_halt( + ProjectManagementHaltReason.SYNC_INBOUND_ASSIGNEE_NOT_FOUND, extra=log_context + ) + logger.info("inbound-assignee-not-found", extra=log_context) + return groups_assigned def sync_group_assignee_outbound( diff --git a/src/sentry/integrations/vsts/integration.py b/src/sentry/integrations/vsts/integration.py index b6b84563f5921b..8db5fe37936af1 100644 --- a/src/sentry/integrations/vsts/integration.py +++ b/src/sentry/integrations/vsts/integration.py @@ -20,6 +20,7 @@ from sentry.identity.vsts.provider import get_user_info from sentry.integrations.base import ( FeatureDescription, + IntegrationDomain, IntegrationFeatures, IntegrationMetadata, IntegrationProvider, @@ -31,6 +32,10 @@ from sentry.integrations.services.repository import RpcRepository, repository_service from sentry.integrations.source_code_management.repository import RepositoryIntegration from sentry.integrations.tasks.migrate_repo import migrate_repo +from sentry.integrations.utils.metrics import ( + IntegrationPipelineViewEvent, + IntegrationPipelineViewType, +) from sentry.integrations.vsts.issues import VstsIssuesSpec from sentry.models.apitoken import generate_token from sentry.models.repository import Repository @@ -631,43 +636,46 @@ def setup(self) -> None: class AccountConfigView(PipelineView): def dispatch(self, request: HttpRequest, pipeline: Pipeline) -> HttpResponseBase: - account_id = request.POST.get("account") - if account_id is not None: - state_accounts: Sequence[Mapping[str, Any]] | None = pipeline.fetch_state( - key="accounts" - ) - account = self.get_account_from_id(account_id, state_accounts or []) - if account is not None: - pipeline.bind_state("account", account) - return pipeline.next_step() - - state: Mapping[str, Any] | None = pipeline.fetch_state(key="identity") - access_token = (state or {}).get("data", {}).get("access_token") - user = get_user_info(access_token) - - accounts = self.get_accounts(access_token, user["uuid"]) - logger.info( - "vsts.get_accounts", - extra={ + with IntegrationPipelineViewEvent( + IntegrationPipelineViewType.ACCOUNT_CONFIG, + IntegrationDomain.SOURCE_CODE_MANAGEMENT, + VstsIntegrationProvider.key, + ).capture() as lifecycle: + account_id = request.POST.get("account") + if account_id is not None: + state_accounts: Sequence[Mapping[str, Any]] | None = pipeline.fetch_state( + key="accounts" + ) + account = self.get_account_from_id(account_id, state_accounts or []) + if account is not None: + pipeline.bind_state("account", account) + return pipeline.next_step() + + state: Mapping[str, Any] | None = pipeline.fetch_state(key="identity") + access_token = (state or {}).get("data", {}).get("access_token") + user = get_user_info(access_token) + + accounts = self.get_accounts(access_token, user["uuid"]) + extra = { "organization_id": pipeline.organization.id if pipeline.organization else None, "user_id": request.user.id, "accounts": accounts, - }, - ) - if not accounts or not accounts.get("value"): + } + if not accounts or not accounts.get("value"): + lifecycle.record_failure("no_accounts", extra=extra) + return render_to_response( + template="sentry/integrations/vsts-config.html", + context={"no_accounts": True}, + request=request, + ) + accounts = accounts["value"] + pipeline.bind_state("accounts", accounts) + account_form = AccountForm(accounts) return render_to_response( template="sentry/integrations/vsts-config.html", - context={"no_accounts": True}, + context={"form": account_form, "no_accounts": False}, request=request, ) - accounts = accounts["value"] - pipeline.bind_state("accounts", accounts) - account_form = AccountForm(accounts) - return render_to_response( - template="sentry/integrations/vsts-config.html", - context={"form": account_form, "no_accounts": False}, - request=request, - ) def get_account_from_id( self, account_id: int, accounts: Sequence[Mapping[str, Any]] diff --git a/src/sentry/integrations/vsts/issues.py b/src/sentry/integrations/vsts/issues.py index 61062507fbdfcf..df19746f290fd0 100644 --- a/src/sentry/integrations/vsts/issues.py +++ b/src/sentry/integrations/vsts/issues.py @@ -11,9 +11,6 @@ from sentry.integrations.mixins.issues import IssueSyncIntegration from sentry.integrations.services.integration import integration_service from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration -from sentry.integrations.source_code_management.metrics import ( - SourceCodeIssueIntegrationInteractionType, -) from sentry.models.activity import Activity from sentry.shared_integrations.exceptions import ApiError, ApiUnauthorized, IntegrationError from sentry.silo.base import all_silo_function @@ -171,36 +168,35 @@ def create_issue(self, data: Mapping[str, str], **kwargs: Any) -> Mapping[str, A """ Creates the issue on the remote service and returns an issue ID. """ - with self.record_event(SourceCodeIssueIntegrationInteractionType.CREATE_ISSUE).capture(): - project_id = data.get("project") - if project_id is None: - raise ValueError("Azure DevOps expects project") + project_id = data.get("project") + if project_id is None: + raise ValueError("Azure DevOps expects project") - client = self.get_client() + client = self.get_client() - title = data["title"] - description = data["description"] - item_type = data["work_item_type"] + title = data["title"] + description = data["description"] + item_type = data["work_item_type"] - try: - created_item = client.create_work_item( - project=project_id, - item_type=item_type, - title=title, - # Descriptions cannot easily be seen. So, a comment will be added as well. - description=markdown(description), - comment=markdown(description), - ) - except Exception as e: - self.raise_error(e) + try: + created_item = client.create_work_item( + project=project_id, + item_type=item_type, + title=title, + # Descriptions cannot easily be seen. So, a comment will be added as well. + description=markdown(description), + comment=markdown(description), + ) + except Exception as e: + self.raise_error(e) - project_name = created_item["fields"]["System.AreaPath"] - return { - "key": str(created_item["id"]), - "title": title, - "description": description, - "metadata": {"display_name": "{}#{}".format(project_name, created_item["id"])}, - } + project_name = created_item["fields"]["System.AreaPath"] + return { + "key": str(created_item["id"]), + "title": title, + "description": description, + "metadata": {"display_name": "{}#{}".format(project_name, created_item["id"])}, + } def get_issue(self, issue_id: int, **kwargs: Any) -> Mapping[str, Any]: client = self.get_client() @@ -223,110 +219,100 @@ def sync_assignee_outbound( assign: bool = True, **kwargs: Any, ) -> None: - with self.record_event( - SourceCodeIssueIntegrationInteractionType.SYNC_ASSIGNEE_OUTBOUND - ).capture() as lifecycle: - client = self.get_client() - assignee = None - - if user and assign is True: - sentry_emails = [email.lower() for email in user.emails] - continuation_token = None - while True: - vsts_users = client.get_users(self.model.name, continuation_token) - continuation_token = vsts_users.headers.get("X-MS-ContinuationToken") - for vsts_user in vsts_users["value"]: - vsts_email = vsts_user.get("mailAddress") - if vsts_email and vsts_email.lower() in sentry_emails: - assignee = vsts_user["mailAddress"] - break - - if not continuation_token: + client = self.get_client() + assignee = None + + if user and assign is True: + sentry_emails = [email.lower() for email in user.emails] + continuation_token = None + while True: + vsts_users = client.get_users(self.model.name, continuation_token) + continuation_token = vsts_users.headers.get("X-MS-ContinuationToken") + for vsts_user in vsts_users["value"]: + vsts_email = vsts_user.get("mailAddress") + if vsts_email and vsts_email.lower() in sentry_emails: + assignee = vsts_user["mailAddress"] break - if assignee is None: - # TODO(lb): Email people when this happens - self.logger.info( - "vsts.assignee-not-found", - extra={ - "integration_id": external_issue.integration_id, - "user_id": user.id, - "issue_key": external_issue.key, - }, - ) - lifecycle.record_halt() - return + if not continuation_token: + break - try: - client.update_work_item(external_issue.key, assigned_to=assignee) - except (ApiUnauthorized, ApiError): + if assignee is None: + # TODO(lb): Email people when this happens self.logger.info( - "vsts.failed-to-assign", + "vsts.assignee-not-found", extra={ "integration_id": external_issue.integration_id, - "user_id": user.id if user else None, + "user_id": user.id, "issue_key": external_issue.key, }, ) - lifecycle.record_halt() + return + + try: + client.update_work_item(external_issue.key, assigned_to=assignee) + except (ApiUnauthorized, ApiError): + self.logger.info( + "vsts.failed-to-assign", + extra={ + "integration_id": external_issue.integration_id, + "user_id": user.id if user else None, + "issue_key": external_issue.key, + }, + ) def sync_status_outbound( self, external_issue: "ExternalIssue", is_resolved: bool, project_id: int, **kwargs: Any ) -> None: - with self.record_event( - SourceCodeIssueIntegrationInteractionType.SYNC_STATUS_OUTBOUND - ).capture() as lifecycle: - client = self.get_client() - work_item = client.get_work_item(external_issue.key) - # For some reason, vsts doesn't include the project id - # in the work item response. - # TODO(jess): figure out if there's a better way to do this - vsts_project_name = work_item["fields"]["System.TeamProject"] - - vsts_projects = client.get_projects() - - vsts_project_id = None - for p in vsts_projects: - if p["name"] == vsts_project_name: - vsts_project_id = p["id"] - break - - integration_external_project = integration_service.get_integration_external_project( - organization_id=external_issue.organization_id, - integration_id=external_issue.integration_id, - external_id=vsts_project_id, + client = self.get_client() + work_item = client.get_work_item(external_issue.key) + # For some reason, vsts doesn't include the project id + # in the work item response. + # TODO(jess): figure out if there's a better way to do this + vsts_project_name = work_item["fields"]["System.TeamProject"] + + vsts_projects = client.get_projects() + + vsts_project_id = None + for p in vsts_projects: + if p["name"] == vsts_project_name: + vsts_project_id = p["id"] + break + + integration_external_project = integration_service.get_integration_external_project( + organization_id=external_issue.organization_id, + integration_id=external_issue.integration_id, + external_id=vsts_project_id, + ) + if integration_external_project is None: + self.logger.info( + "vsts.external-project-not-found", + extra={ + "integration_id": external_issue.integration_id, + "is_resolved": is_resolved, + "issue_key": external_issue.key, + }, ) - if integration_external_project is None: - self.logger.info( - "vsts.external-project-not-found", - extra={ - "integration_id": external_issue.integration_id, - "is_resolved": is_resolved, - "issue_key": external_issue.key, - }, - ) - lifecycle.record_halt() - return + return - status = ( - integration_external_project.resolved_status - if is_resolved - else integration_external_project.unresolved_status - ) + status = ( + integration_external_project.resolved_status + if is_resolved + else integration_external_project.unresolved_status + ) - try: - client.update_work_item(external_issue.key, state=status) - except (ApiUnauthorized, ApiError) as error: - self.logger.info( - "vsts.failed-to-change-status", - extra={ - "integration_id": external_issue.integration_id, - "is_resolved": is_resolved, - "issue_key": external_issue.key, - "exception": error, - }, - ) - lifecycle.record_halt() + try: + client.update_work_item(external_issue.key, state=status) + except (ApiUnauthorized, ApiError) as error: + self.logger.info( + "vsts.failed-to-change-status", + extra={ + "integration_id": external_issue.integration_id, + "is_resolved": is_resolved, + "issue_key": external_issue.key, + "exception": error, + }, + ) def get_resolve_sync_action(self, data: Mapping[str, Any]) -> ResolveSyncAction: done_states = self._get_done_statuses(data["project"]) diff --git a/src/sentry/integrations/vsts/search.py b/src/sentry/integrations/vsts/search.py index dfad6424b2ad7b..05b56eff862294 100644 --- a/src/sentry/integrations/vsts/search.py +++ b/src/sentry/integrations/vsts/search.py @@ -4,6 +4,7 @@ from sentry.api.base import control_silo_endpoint from sentry.integrations.source_code_management.issues import SourceCodeIssueIntegration +from sentry.integrations.source_code_management.metrics import SCMIntegrationInteractionType from sentry.integrations.source_code_management.search import SourceCodeSearchEndpoint from sentry.integrations.vsts.integration import VstsIntegration @@ -21,17 +22,18 @@ def installation_class(self): return VstsIntegration def handle_search_issues(self, installation: T, query: str, repo: str | None) -> Response: - if not query: - return Response([]) - - assert isinstance(installation, self.installation_class) - resp = installation.search_issues(query=query) - return Response( - [ - { - "label": f'({i["fields"]["system.id"]}) {i["fields"]["system.title"]}', - "value": i["fields"]["system.id"], - } - for i in resp.get("results", []) - ] - ) + with self.record_event(SCMIntegrationInteractionType.HANDLE_SEARCH_ISSUES).capture(): + if not query: + return Response([]) + + assert isinstance(installation, self.installation_class) + resp = installation.search_issues(query=query) + return Response( + [ + { + "label": f'({i["fields"]["system.id"]}) {i["fields"]["system.title"]}', + "value": i["fields"]["system.id"], + } + for i in resp.get("results", []) + ] + ) diff --git a/src/sentry/integrations/vsts/webhooks.py b/src/sentry/integrations/vsts/webhooks.py index d0d6ea877fd48f..7490129eddc01d 100644 --- a/src/sentry/integrations/vsts/webhooks.py +++ b/src/sentry/integrations/vsts/webhooks.py @@ -13,8 +13,15 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import Endpoint, region_silo_endpoint from sentry.constants import ObjectStatus +from sentry.integrations.base import IntegrationDomain from sentry.integrations.mixins.issues import IssueSyncIntegration +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, + ProjectManagementHaltReason, +) from sentry.integrations.services.integration import integration_service +from sentry.integrations.utils.metrics import IntegrationWebhookEvent, IntegrationWebhookEventType from sentry.integrations.utils.sync import sync_group_assignee_inbound from sentry.utils.email import parse_email @@ -67,7 +74,12 @@ def post(self, request: Request, *args: Any, **kwargs: Any) -> Response: if not check_webhook_secret(request, integration, event_type): return self.respond(status=status.HTTP_401_UNAUTHORIZED) - handle_updated_workitem(data, integration) + with IntegrationWebhookEvent( + interaction_type=IntegrationWebhookEventType.INBOUND_SYNC, + domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT, + provider_key="vsts", + ).capture(): + handle_updated_workitem(data, integration) return self.respond() @@ -122,31 +134,48 @@ def handle_assign_to( ) +# TODO(Gabe): Consolidate this with Jira's implementation, create DTO for status +# changes. def handle_status_change( integration: RpcIntegration, external_issue_key: str, status_change: Mapping[str, str] | None, project: str | None, ) -> None: - if status_change is None: - return - - org_integrations = integration_service.get_organization_integrations( - integration_id=integration.id - ) + with ProjectManagementEvent( + action_type=ProjectManagementActionType.INBOUND_STATUS_SYNC, integration=integration + ).capture() as lifecycle: + if status_change is None: + return + + org_integrations = integration_service.get_organization_integrations( + integration_id=integration.id + ) - for org_integration in org_integrations: - installation = integration.get_installation(organization_id=org_integration.organization_id) - if isinstance(installation, IssueSyncIntegration): - installation.sync_status_inbound( - external_issue_key, - { - "new_state": status_change["newValue"], - # old_state is None when the issue is New - "old_state": status_change.get("oldValue"), - "project": project, - }, + logging_context = { + "org_integration_ids": [oi.id for oi in org_integrations], + "integration_id": integration.id, + "status_change": status_change, + } + for org_integration in org_integrations: + installation = integration.get_installation( + organization_id=org_integration.organization_id ) + if isinstance(installation, IssueSyncIntegration): + installation.sync_status_inbound( + external_issue_key, + { + "new_state": status_change["newValue"], + # old_state is None when the issue is New + "old_state": status_change.get("oldValue"), + "project": project, + }, + ) + else: + lifecycle.record_halt( + ProjectManagementHaltReason.SYNC_NON_SYNC_INTEGRATION_PROVIDED, + extra=logging_context, + ) def handle_updated_workitem(data: Mapping[str, Any], integration: RpcIntegration) -> None: diff --git a/src/sentry/interfaces/contexts.py b/src/sentry/interfaces/contexts.py index 161a1f292365c6..070789529747c0 100644 --- a/src/sentry/interfaces/contexts.py +++ b/src/sentry/interfaces/contexts.py @@ -146,6 +146,28 @@ def change_type(self, value: int | float | list | dict) -> Any: return value +# NOTE: +# If you are adding a new context to tag mapping which creates a tag out of an interpolation +# of multiple context fields, you will most likely have to add the same mapping creation in Relay, +# which should be added directly to the context payload itself, and you should reflect this here. +# +# Current examples of this include the `os`, `runtime` and `browser` fields of their respective context. +# +# Example: +# Suppose you have a new context named "my_context" which has fields: +# - "field_1" +# - "field_2" +# +# And you want to create a tag named "field_3" which is equal to "{field_1}-{field_2}". +# +# If you do this here, on demand metrics will stop working because if a user filters by "field_3" and +# we generate a metrics extraction specification for it, Relay won't know what "field_3" means, it will +# only know "field_1" and "field_2" from the context. +# +# To solve this, you should materialize "field_3" during event normalization in Relay and directly express +# the mapping in Sentry as "field_3" is equal to "field_3" (which was added by Relay during normalization). + + # TODO(dcramer): contexts need to document/describe expected (optional) fields @contexttype class DefaultContextType(ContextType): @@ -168,20 +190,20 @@ class DeviceContextType(ContextType): @contexttype class RuntimeContextType(ContextType): type = "runtime" - context_to_tag_mapping = {"": "{name} {version}", "name": "{name}"} + context_to_tag_mapping = {"": "{runtime}", "name": "{name}"} @contexttype class BrowserContextType(ContextType): type = "browser" - context_to_tag_mapping = {"": "{name} {version}", "name": "{name}"} + context_to_tag_mapping = {"": "{browser}", "name": "{name}"} # viewport @contexttype class OsContextType(ContextType): type = "os" - context_to_tag_mapping = {"": "{name} {version}", "name": "{name}", "rooted": "{rooted}"} + context_to_tag_mapping = {"": "{os}", "name": "{name}", "rooted": "{rooted}"} # build, rooted diff --git a/src/sentry/interfaces/user.py b/src/sentry/interfaces/user.py index 0923ddcc6ae1bc..814daac8d598b9 100644 --- a/src/sentry/interfaces/user.py +++ b/src/sentry/interfaces/user.py @@ -15,6 +15,7 @@ class EventUserApiContext(TypedDict, total=False): username: str | None ip_address: str | None name: str | None + geo: dict[str, str] | None data: dict[str, Any] | None @@ -69,6 +70,7 @@ def get_api_context(self, is_public=False, platform=None) -> EventUserApiContext "username": self.username, "ip_address": self.ip_address, "name": self.name, + "geo": self.geo.to_json() if self.geo is not None else None, "data": self.data, } @@ -80,6 +82,7 @@ def get_api_meta(self, meta, is_public=False, platform=None): "username": meta.get("username"), "ip_address": meta.get("ip_address"), "name": meta.get("name"), + "geo": meta.get("geo"), "data": meta.get("data"), } diff --git a/src/sentry/issues/endpoints/group_hashes.py b/src/sentry/issues/endpoints/group_hashes.py index 696a304bdf4dfd..73c5104ec47d8d 100644 --- a/src/sentry/issues/endpoints/group_hashes.py +++ b/src/sentry/issues/endpoints/group_hashes.py @@ -8,7 +8,7 @@ from sentry.api.base import region_silo_endpoint from sentry.api.bases import GroupEndpoint from sentry.api.paginator import GenericOffsetPaginator -from sentry.api.serializers import EventSerializer, serialize +from sentry.api.serializers import EventSerializer, SimpleEventSerializer, serialize from sentry.models.grouphash import GroupHash from sentry.tasks.unmerge import unmerge from sentry.utils import metrics @@ -31,8 +31,10 @@ def get(self, request: Request, group) -> Response: checksums used to aggregate individual events. :pparam string issue_id: the ID of the issue to retrieve. + :pparam bool full: If this is set to true, the event payload will include the full event body, including the stacktrace. :auth: required """ + full = request.GET.get("full", True) data_fn = partial( lambda *args, **kwargs: raw_query(*args, **kwargs)["data"], @@ -47,7 +49,9 @@ def get(self, request: Request, group) -> Response: tenant_ids={"organization_id": group.project.organization_id}, ) - handle_results = partial(self.__handle_results, group.project_id, group.id, request.user) + handle_results = partial( + self.__handle_results, group.project_id, group.id, request.user, full + ) return self.paginate( request=request, @@ -90,13 +94,16 @@ def put(self, request: Request, group) -> Response: return Response(status=202) - def __handle_results(self, project_id, group_id, user, results): - return [self.__handle_result(user, project_id, group_id, result) for result in results] + def __handle_results(self, project_id, group_id, user, full, results): + return [ + self.__handle_result(user, project_id, group_id, full, result) for result in results + ] - def __handle_result(self, user, project_id, group_id, result): + def __handle_result(self, user, project_id, group_id, full, result): event = eventstore.backend.get_event_by_id(project_id, result["event_id"]) + serializer = EventSerializer if full else SimpleEventSerializer return { "id": result["primary_hash"], - "latestEvent": serialize(event, user, EventSerializer()), + "latestEvent": serialize(event, user, serializer()), } diff --git a/src/sentry/issues/endpoints/group_similar_issues_embeddings.py b/src/sentry/issues/endpoints/group_similar_issues_embeddings.py index 55cdda74aa4f19..b51e57e3f8b760 100644 --- a/src/sentry/issues/endpoints/group_similar_issues_embeddings.py +++ b/src/sentry/issues/endpoints/group_similar_issues_embeddings.py @@ -18,6 +18,7 @@ from sentry.seer.similarity.similar_issues import get_similarity_data_from_seer from sentry.seer.similarity.types import SeerSimilarIssueData, SimilarIssuesEmbeddingsRequest from sentry.seer.similarity.utils import ( + TooManyOnlySystemFramesException, event_content_has_stacktrace, get_stacktrace_string, killswitch_enabled, @@ -74,7 +75,7 @@ def get_formatted_results( return [(serialized_groups[group_id], group_data[group_id]) for group_id in group_data] - def get(self, request: Request, group) -> Response: + def get(self, request: Request, group: Group) -> Response: if killswitch_enabled(group.project.id): return Response([]) @@ -82,9 +83,16 @@ def get(self, request: Request, group) -> Response: stacktrace_string = "" if latest_event and event_content_has_stacktrace(latest_event): grouping_info = get_grouping_info(None, project=group.project, event=latest_event) - stacktrace_string = get_stacktrace_string(grouping_info) - - if stacktrace_string == "" or not latest_event: + try: + stacktrace_string = get_stacktrace_string( + grouping_info, platform=latest_event.platform + ) + except TooManyOnlySystemFramesException: + pass + except Exception: + logger.exception("Unexpected exception in stacktrace string formatting") + + if not stacktrace_string or not latest_event: return Response([]) # No exception, stacktrace or in-app frames, or event similar_issues_params: SimilarIssuesEmbeddingsRequest = { diff --git a/src/sentry/issues/endpoints/organization_group_search_views.py b/src/sentry/issues/endpoints/organization_group_search_views.py index e0f9f123bc2764..99d46dab86c745 100644 --- a/src/sentry/issues/endpoints/organization_group_search_views.py +++ b/src/sentry/issues/endpoints/organization_group_search_views.py @@ -118,7 +118,6 @@ def put(self, request: Request, organization: Organization) -> Response: def bulk_update_views( org: Organization, user_id: int, views: list[GroupSearchViewValidatorResponse] ) -> None: - existing_view_ids = [view["id"] for view in views if "id" in view] _delete_missing_views(org, user_id, view_ids_to_keep=existing_view_ids) @@ -140,7 +139,7 @@ def _update_existing_view( org: Organization, user_id: int, view: GroupSearchViewValidatorResponse, position: int ) -> None: try: - GroupSearchView.objects.get(id=view["id"]).update( + GroupSearchView.objects.get(id=view["id"], user_id=user_id).update( name=view["name"], query=view["query"], query_sort=view["querySort"], diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py index bd5670ddc2883c..7810e9f66ba712 100644 --- a/src/sentry/issues/grouptype.py +++ b/src/sentry/issues/grouptype.py @@ -22,7 +22,7 @@ from sentry.models.organization import Organization from sentry.models.project import Project from sentry.users.models.user import User - from sentry.workflow_engine.processors.detector import DetectorHandler + from sentry.workflow_engine.handlers.detector import DetectorHandler from sentry.workflow_engine.endpoints.validators import BaseGroupTypeDetectorValidator import logging diff --git a/src/sentry/issues/ignored.py b/src/sentry/issues/ignored.py index 439159e06260e7..33e20a49424dbd 100644 --- a/src/sentry/issues/ignored.py +++ b/src/sentry/issues/ignored.py @@ -6,6 +6,7 @@ from datetime import datetime, timedelta from typing import Any, TypedDict +from django.contrib.auth.models import AnonymousUser from django.utils import timezone from sentry.db.postgres.transactions import in_test_hide_transaction_boundary @@ -86,11 +87,10 @@ def handle_archived_until_escalating( def handle_ignored( - group_ids: Sequence[Group], group_list: Sequence[Group], status_details: dict[str, Any], - acting_user: User | None, - user: User | RpcUser, + acting_user: User | RpcUser | AnonymousUser, + user: User | RpcUser | AnonymousUser, ) -> IgnoredStatusDetails: """ Handle issues that are ignored and create a snooze for them. @@ -100,7 +100,7 @@ def handle_ignored( Returns: a dict with the statusDetails for ignore conditions. """ metrics.incr("group.ignored", skip_internal=True) - for group in group_ids: + for group in group_list: remove_group_from_inbox(group, action=GroupInboxRemoveAction.IGNORED, user=acting_user) new_status_details: IgnoredStatusDetails = {} @@ -153,6 +153,6 @@ def handle_ignored( actor=serialized_user[0] if serialized_user else None, ) else: - GroupSnooze.objects.filter(group__in=group_ids).delete() + GroupSnooze.objects.filter(group__in=[group.id for group in group_list]).delete() return new_status_details diff --git a/src/sentry/issues/merge.py b/src/sentry/issues/merge.py index 314d6fdaedc513..fdd13d5113f1f0 100644 --- a/src/sentry/issues/merge.py +++ b/src/sentry/issues/merge.py @@ -1,6 +1,6 @@ from __future__ import annotations -from collections.abc import Sequence +from collections.abc import Mapping, Sequence from typing import TypedDict from uuid import uuid4 @@ -23,7 +23,7 @@ class MergedGroup(TypedDict): def handle_merge( group_list: Sequence[Group], - project_lookup: dict[int, Project], + project_lookup: Mapping[int, Project], acting_user: User | None, ) -> MergedGroup: """ diff --git a/src/sentry/issues/status_change.py b/src/sentry/issues/status_change.py index cfda68a6ef8384..25c5679235df63 100644 --- a/src/sentry/issues/status_change.py +++ b/src/sentry/issues/status_change.py @@ -1,8 +1,8 @@ from __future__ import annotations import logging -from collections import defaultdict, namedtuple -from collections.abc import Sequence +from collections import defaultdict +from collections.abc import Mapping, Sequence from datetime import datetime, timedelta, timezone from typing import Any @@ -25,13 +25,12 @@ from sentry.utils import json logger = logging.getLogger(__name__) -ActivityInfo = namedtuple("ActivityInfo", ("activity_type", "activity_data")) def infer_substatus( new_status: int | None, new_substatus: int | None, - status_details: dict[str, Any], + status_details: Mapping[str, Any], group_list: Sequence[Group], ) -> int | None: if new_substatus is not None: @@ -67,19 +66,17 @@ def infer_substatus( def handle_status_update( group_list: Sequence[Group], projects: Sequence[Project], - project_lookup: dict[int, Project], + project_lookup: Mapping[int, Project], new_status: int, new_substatus: int | None, is_bulk: bool, status_details: dict[str, Any], acting_user: User | None, sender: Any, -) -> ActivityInfo: +) -> None: """ Update the status for a list of groups and create entries for Activity and GroupHistory. This currently handles unresolving or ignoring groups. - - Returns a tuple of (activity_type, activity_data) for the activity that was created. """ activity_data = {} activity_type = ( @@ -173,5 +170,3 @@ def handle_status_update( created=False, update_fields=["status", "substatus"], ) - - return ActivityInfo(activity_type, activity_data) diff --git a/src/sentry/issues/update_inbox.py b/src/sentry/issues/update_inbox.py index 18421e9458e79a..698c1f8c6ff40c 100644 --- a/src/sentry/issues/update_inbox.py +++ b/src/sentry/issues/update_inbox.py @@ -1,5 +1,6 @@ from __future__ import annotations +from collections.abc import Mapping, Sequence from typing import Any from sentry.issues.ongoing import bulk_transition_group_to_ongoing @@ -18,8 +19,8 @@ def update_inbox( in_inbox: bool, - group_list: list[Group], - project_lookup: dict[int, Project], + group_list: Sequence[Group], + project_lookup: Mapping[int, Project], acting_user: User | None, http_referrer: str, sender: Any, diff --git a/src/sentry/lang/java/processing.py b/src/sentry/lang/java/processing.py index 136f95064805e3..2da820bf0dcea8 100644 --- a/src/sentry/lang/java/processing.py +++ b/src/sentry/lang/java/processing.py @@ -7,7 +7,7 @@ from sentry.attachments import CachedAttachment, attachment_cache from sentry.ingest.consumer.processors import CACHE_TIMEOUT -from sentry.lang.java.utils import get_jvm_images, get_proguard_images +from sentry.lang.java.utils import JAVA_PLATFORMS, get_jvm_images, get_proguard_images from sentry.lang.native.error import SymbolicationFailed, write_error from sentry.lang.native.symbolicator import Symbolicator from sentry.models.eventerror import EventError @@ -85,11 +85,11 @@ def _handles_frame(frame: dict[str, Any], platform: str) -> bool: return ( "function" in frame and "module" in frame - and (frame.get("platform", None) or platform) == "java" + and (frame.get("platform", None) or platform) in JAVA_PLATFORMS ) -FRAME_FIELDS = ("abs_path", "lineno", "function", "module", "filename", "in_app") +FRAME_FIELDS = ("platform", "abs_path", "lineno", "function", "module", "filename", "in_app") def _normalize_frame(raw_frame: dict[str, Any], index: int) -> dict[str, Any]: @@ -287,6 +287,7 @@ def process_jvm_stacktraces(symbolicator: Symbolicator, data: Any) -> Any: release_package = _get_release_package(symbolicator.project, data.get("release")) metrics.incr("process.java.symbolicate.request") response = symbolicator.process_jvm( + platform=data.get("platform"), exceptions=[ {"module": exc["module"], "type": exc["type"]} for exc in processable_exceptions ], diff --git a/src/sentry/lang/java/utils.py b/src/sentry/lang/java/utils.py index 705ca103b8c7cc..8fb98d52d6ff08 100644 --- a/src/sentry/lang/java/utils.py +++ b/src/sentry/lang/java/utils.py @@ -11,9 +11,19 @@ from sentry.lang.java.proguard import open_proguard_mapper from sentry.models.debugfile import ProjectDebugFile from sentry.models.project import Project +from sentry.stacktraces.processing import StacktraceInfo +from sentry.utils import metrics from sentry.utils.cache import cache_key_for_event from sentry.utils.safe import get_path +# Platform values that should mark an event +# or frame as being Java for the purposes +# of symbolication. +# +# Strictly speaking, this should probably include +# "android" too—at least we use it in profiling. +JAVA_PLATFORMS = ("java",) + def is_valid_proguard_image(image): return bool(image) and image.get("type") == "proguard" and image.get("uuid") is not None @@ -107,10 +117,24 @@ def deobfuscation_template(data, map_type, deobfuscation_fn): attachment_cache.set(cache_key, attachments=new_attachments, timeout=CACHE_TIMEOUT) -def is_jvm_event(data: Any) -> bool: - """Returns whether `data` is a JVM event, based on its images.""" +def is_jvm_event(data: Any, stacktraces: list[StacktraceInfo]) -> bool: + """Returns whether `data` is a JVM event, based on its platform, + the supplied stacktraces, and its images.""" + + platform = data.get("platform") + + if platform in JAVA_PLATFORMS: + return True + + for stacktrace in stacktraces: + # The platforms of a stacktrace are exactly the platforms of its frames + # so this is tantamount to checking if any frame has a Java platform. + if any(x in JAVA_PLATFORMS for x in stacktrace.platforms): + return True # check if there are any JVM or Proguard images + # TODO: Can this actually happen if the event platform + # is not "java"? images = get_path( data, "debug_meta", @@ -118,4 +142,10 @@ def is_jvm_event(data: Any) -> bool: filter=lambda x: is_valid_jvm_image(x) or is_valid_proguard_image(x), default=(), ) - return bool(images) + + if images: + metrics.incr("process.java.symbolicate.missing_platform", tags={platform: platform}) + + return True + + return False diff --git a/src/sentry/lang/javascript/processing.py b/src/sentry/lang/javascript/processing.py index 57def72b0dbe9f..3c815e6774f5d2 100644 --- a/src/sentry/lang/javascript/processing.py +++ b/src/sentry/lang/javascript/processing.py @@ -3,6 +3,7 @@ from typing import Any from sentry.debug_files.artifact_bundles import maybe_renew_artifact_bundles_from_processing +from sentry.lang.javascript.utils import JAVASCRIPT_PLATFORMS from sentry.lang.native.error import SymbolicationFailed, write_error from sentry.lang.native.symbolicator import Symbolicator from sentry.models.eventerror import EventError @@ -171,6 +172,11 @@ def map_symbolicator_process_js_errors(errors): def _handles_frame(frame, data): abs_path = frame.get("abs_path") + platform = frame.get("platform") or data.get("platform", "unknown") + + # Skip non-JS frames + if platform not in JAVASCRIPT_PLATFORMS: + return False # Skip frames without an `abs_path` or line number if not abs_path or not frame.get("lineno"): @@ -181,7 +187,7 @@ def _handles_frame(frame, data): return False # Skip builtin node modules - if _is_built_in(abs_path, data.get("platform")): + if _is_built_in(abs_path, platform): return False return True @@ -205,7 +211,7 @@ def _normalize_nonhandled_frame(frame, data): return frame -FRAME_FIELDS = ("abs_path", "lineno", "colno", "function") +FRAME_FIELDS = ("platform", "abs_path", "lineno", "colno", "function") def _normalize_frame(raw_frame: Any) -> dict: @@ -240,6 +246,7 @@ def process_js_stacktraces(symbolicator: Symbolicator, data: Any) -> Any: metrics.incr("process.javascript.symbolicate.request") response = symbolicator.process_js( + platform=data.get("platform"), stacktraces=stacktraces, modules=modules, release=data.get("release"), diff --git a/src/sentry/lang/javascript/utils.py b/src/sentry/lang/javascript/utils.py index 6c7e5c4db1cf13..a17f6f30007cd7 100644 --- a/src/sentry/lang/javascript/utils.py +++ b/src/sentry/lang/javascript/utils.py @@ -11,6 +11,12 @@ LINES_OF_CONTEXT = 5 +# Platform values that should mark an event +# or frame as being JavaScript for the purposes +# of symbolication. +JAVASCRIPT_PLATFORMS = ("javascript", "node") + + def get_source_context( source: list[bytes], lineno: int, context=LINES_OF_CONTEXT ) -> tuple[list[bytes] | None, bytes | None, list[bytes] | None]: @@ -127,11 +133,13 @@ def is_js_event(data: Any, stacktraces: list[StacktraceInfo]) -> bool: """Returns whether `data` is a JS event, based on its platform and the supplied stacktraces.""" - if data.get("platform") in ("javascript", "node"): + if data.get("platform") in JAVASCRIPT_PLATFORMS: return True for stacktrace in stacktraces: - if any(x in ("javascript", "node") for x in stacktrace.platforms): + # The platforms of a stacktrace are exactly the platforms of its frames + # so this is tantamount to checking if any frame has a JS platform. + if any(x in JAVASCRIPT_PLATFORMS for x in stacktrace.platforms): return True return False diff --git a/src/sentry/lang/native/processing.py b/src/sentry/lang/native/processing.py index 7b8b9d931f0c60..022b30d4198780 100644 --- a/src/sentry/lang/native/processing.py +++ b/src/sentry/lang/native/processing.py @@ -285,7 +285,7 @@ def process_minidump(symbolicator: Symbolicator, data: Any) -> Any: return metrics.incr("process.native.symbolicate.request") - response = symbolicator.process_minidump(minidump.data) + response = symbolicator.process_minidump(data.get("platform"), minidump.data) if _handle_response_status(data, response): _merge_full_response(data, response) @@ -308,7 +308,7 @@ def process_applecrashreport(symbolicator: Symbolicator, data: Any) -> Any: return metrics.incr("process.native.symbolicate.request") - response = symbolicator.process_applecrashreport(report.data) + response = symbolicator.process_applecrashreport(data.get("platform"), report.data) if _handle_response_status(data, response): _merge_full_response(data, response) @@ -423,7 +423,9 @@ def process_native_stacktraces(symbolicator: Symbolicator, data: Any) -> Any: signal = signal_from_data(data) metrics.incr("process.native.symbolicate.request") - response = symbolicator.process_payload(stacktraces=stacktraces, modules=modules, signal=signal) + response = symbolicator.process_payload( + platform=data.get("platform"), stacktraces=stacktraces, modules=modules, signal=signal + ) if not _handle_response_status(data, response): return data diff --git a/src/sentry/lang/native/symbolicator.py b/src/sentry/lang/native/symbolicator.py index 7a256e0891b65c..505c60b913b21c 100644 --- a/src/sentry/lang/native/symbolicator.py +++ b/src/sentry/lang/native/symbolicator.py @@ -156,10 +156,11 @@ def _process(self, task_name: str, path: str, **kwargs): # Otherwise, we are done processing, yay return json_response - def process_minidump(self, minidump): + def process_minidump(self, platform, minidump): (sources, process_response) = sources_for_symbolication(self.project) scraping_config = get_scraping_config(self.project) data = { + "platform": orjson.dumps(platform).decode(), "sources": orjson.dumps(sources).decode(), "scraping": orjson.dumps(scraping_config).decode(), "options": '{"dif_candidates": true}', @@ -173,10 +174,11 @@ def process_minidump(self, minidump): ) return process_response(res) - def process_applecrashreport(self, report): + def process_applecrashreport(self, platform, report): (sources, process_response) = sources_for_symbolication(self.project) scraping_config = get_scraping_config(self.project) data = { + "platform": orjson.dumps(platform).decode(), "sources": orjson.dumps(sources).decode(), "scraping": orjson.dumps(scraping_config).decode(), "options": '{"dif_candidates": true}', @@ -190,10 +192,13 @@ def process_applecrashreport(self, report): ) return process_response(res) - def process_payload(self, stacktraces, modules, signal=None, apply_source_context=True): + def process_payload( + self, platform, stacktraces, modules, signal=None, apply_source_context=True + ): (sources, process_response) = sources_for_symbolication(self.project) scraping_config = get_scraping_config(self.project) json = { + "platform": platform, "sources": sources, "options": { "dif_candidates": True, @@ -210,11 +215,12 @@ def process_payload(self, stacktraces, modules, signal=None, apply_source_contex res = self._process("symbolicate_stacktraces", "symbolicate", json=json) return process_response(res) - def process_js(self, stacktraces, modules, release, dist, apply_source_context=True): + def process_js(self, platform, stacktraces, modules, release, dist, apply_source_context=True): source = get_internal_artifact_lookup_source(self.project) scraping_config = get_scraping_config(self.project) json = { + "platform": platform, "source": source, "stacktraces": stacktraces, "modules": modules, @@ -231,6 +237,7 @@ def process_js(self, stacktraces, modules, release, dist, apply_source_context=T def process_jvm( self, + platform, exceptions, stacktraces, modules, @@ -242,6 +249,7 @@ def process_jvm( Process a JVM event by remapping its frames and exceptions with ProGuard. + :param platform: The event's platform. This should be either unset or "java". :param exceptions: The event's exceptions. These must contain a `type` and a `module`. :param stacktraces: The event's stacktraces. Frames must contain a `function` and a `module`. :param modules: ProGuard modules and source bundles. They must contain a `uuid` and have a @@ -253,6 +261,7 @@ def process_jvm( source = get_internal_source(self.project) json = { + "platform": platform, "sources": [source], "exceptions": exceptions, "stacktraces": stacktraces, diff --git a/src/sentry/management/commands/makemigrations.py b/src/sentry/management/commands/makemigrations.py index 500e35b5e7ceff..e701094a4efea2 100644 --- a/src/sentry/management/commands/makemigrations.py +++ b/src/sentry/management/commands/makemigrations.py @@ -71,7 +71,7 @@ def handle(self, *app_labels, **options): if options.get("check_changes"): validate(migrations_filepath, latest_migration_by_app) else: - result = "\n".join( + result = "\n\n".join( f"{app_label}: {name}" for app_label, name in sorted(latest_migration_by_app.items()) ) diff --git a/src/sentry/middleware/customer_domain.py b/src/sentry/middleware/customer_domain.py index 8a81a14c2e753a..bc0de539a98136 100644 --- a/src/sentry/middleware/customer_domain.py +++ b/src/sentry/middleware/customer_domain.py @@ -74,8 +74,7 @@ def _resolve_redirect_url(request, activeorg): path = reverse(result.url_name or result.func, kwargs=kwargs) qs = _query_string(request) - redirect_url = f"{redirect_url}{path}{qs}" - return redirect_url + return f"{redirect_url}{path}{qs}" class CustomerDomainMiddleware: diff --git a/src/sentry/middleware/flag.py b/src/sentry/middleware/flag.py deleted file mode 100644 index 49d0e486d42c35..00000000000000 --- a/src/sentry/middleware/flag.py +++ /dev/null @@ -1,15 +0,0 @@ -from collections.abc import Callable - -from django.http.request import HttpRequest -from django.http.response import HttpResponseBase - -from sentry.utils.flag import initialize_flag_manager - - -class FlagMiddleware: - def __init__(self, get_response: Callable[[HttpRequest], HttpResponseBase]): - self.get_response = get_response - - def __call__(self, request: HttpRequest) -> HttpResponseBase: - initialize_flag_manager() - return self.get_response(request) diff --git a/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py b/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py index 5592f4a25a27a5..16ef15658ea34a 100644 --- a/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py +++ b/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py @@ -54,6 +54,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + replaces = [ ("sentry", "0001_squashed_0200_release_indices"), ("sentry", "0201_semver_package"), diff --git a/src/sentry/migrations/0490_add_is_test_to_org.py b/src/sentry/migrations/0490_add_is_test_to_org.py index 5ea600ad4cf024..398394ba1409cb 100644 --- a/src/sentry/migrations/0490_add_is_test_to_org.py +++ b/src/sentry/migrations/0490_add_is_test_to_org.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0489_index_checkin_timeout"), ] diff --git a/src/sentry/migrations/0491_remove_orgmemmap_unique_constraints.py b/src/sentry/migrations/0491_remove_orgmemmap_unique_constraints.py index a239fb98237803..4e30ad2812ad4c 100644 --- a/src/sentry/migrations/0491_remove_orgmemmap_unique_constraints.py +++ b/src/sentry/migrations/0491_remove_orgmemmap_unique_constraints.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0490_add_is_test_to_org"), ] diff --git a/src/sentry/migrations/0505_debugfile_date_accessed.py b/src/sentry/migrations/0505_debugfile_date_accessed.py index c39b0755a95d94..ff60d15fe6bbad 100644 --- a/src/sentry/migrations/0505_debugfile_date_accessed.py +++ b/src/sentry/migrations/0505_debugfile_date_accessed.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0504_add_artifact_bundle_index"), ] diff --git a/src/sentry/migrations/0526_pr_comment_type_column.py b/src/sentry/migrations/0526_pr_comment_type_column.py index 3bfa0f8575e774..1791094fdecd7d 100644 --- a/src/sentry/migrations/0526_pr_comment_type_column.py +++ b/src/sentry/migrations/0526_pr_comment_type_column.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0525_add_next_checkin_latest"), ] diff --git a/src/sentry/migrations/0535_add_created_date_to_outbox_model.py b/src/sentry/migrations/0535_add_created_date_to_outbox_model.py index 179daa393d1816..abff5766528167 100644 --- a/src/sentry/migrations/0535_add_created_date_to_outbox_model.py +++ b/src/sentry/migrations/0535_add_created_date_to_outbox_model.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0534_add_notification_uuid_to_rule_fire_history"), ] diff --git a/src/sentry/migrations/0544_remove_groupsubscription_columns.py b/src/sentry/migrations/0544_remove_groupsubscription_columns.py index 10f92624209879..bc6ad90444e2d2 100644 --- a/src/sentry/migrations/0544_remove_groupsubscription_columns.py +++ b/src/sentry/migrations/0544_remove_groupsubscription_columns.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0543_add_team_id_to_groupsubscription"), ] diff --git a/src/sentry/migrations/0545_add_last_verified_auth_ident_replica.py b/src/sentry/migrations/0545_add_last_verified_auth_ident_replica.py index b24ed739818100..71a4ebafc20939 100644 --- a/src/sentry/migrations/0545_add_last_verified_auth_ident_replica.py +++ b/src/sentry/migrations/0545_add_last_verified_auth_ident_replica.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0544_remove_groupsubscription_columns"), ] diff --git a/src/sentry/migrations/0548_add_is_unclaimed_boolean_to_user.py b/src/sentry/migrations/0548_add_is_unclaimed_boolean_to_user.py index 324603794f7859..6e2ae095940622 100644 --- a/src/sentry/migrations/0548_add_is_unclaimed_boolean_to_user.py +++ b/src/sentry/migrations/0548_add_is_unclaimed_boolean_to_user.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0547_add_commitfilechange_language_column"), ] diff --git a/src/sentry/migrations/0549_re_add_groupsubscription_columns.py b/src/sentry/migrations/0549_re_add_groupsubscription_columns.py index f124f9337c4f8f..4468a981c603bf 100644 --- a/src/sentry/migrations/0549_re_add_groupsubscription_columns.py +++ b/src/sentry/migrations/0549_re_add_groupsubscription_columns.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = True + allow_run_sql = True + dependencies = [ ("sentry", "0548_add_is_unclaimed_boolean_to_user"), ] diff --git a/src/sentry/migrations/0556_organizationmapping_replicate_require_2fa.py b/src/sentry/migrations/0556_organizationmapping_replicate_require_2fa.py index e249bd0d688fd4..3185e6373b831d 100644 --- a/src/sentry/migrations/0556_organizationmapping_replicate_require_2fa.py +++ b/src/sentry/migrations/0556_organizationmapping_replicate_require_2fa.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0555_set_neglectedrule_email_date_columns_nullable"), ] diff --git a/src/sentry/migrations/0564_commitfilechange_delete_language_column.py b/src/sentry/migrations/0564_commitfilechange_delete_language_column.py index c29268441b68d4..9a5cc11a0d9ee6 100644 --- a/src/sentry/migrations/0564_commitfilechange_delete_language_column.py +++ b/src/sentry/migrations/0564_commitfilechange_delete_language_column.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0563_commitfilechange_drop_language_column"), ] diff --git a/src/sentry/migrations/0570_repository_add_languages_column.py b/src/sentry/migrations/0570_repository_add_languages_column.py index 8d61069af4bbdb..9b7cd95383156f 100644 --- a/src/sentry/migrations/0570_repository_add_languages_column.py +++ b/src/sentry/migrations/0570_repository_add_languages_column.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0569_dashboard_widgets_indicator"), ] diff --git a/src/sentry/migrations/0583_add_early_adopter_to_organization_mapping.py b/src/sentry/migrations/0583_add_early_adopter_to_organization_mapping.py index 8d87de590a53d8..5641c4e1d47a1d 100644 --- a/src/sentry/migrations/0583_add_early_adopter_to_organization_mapping.py +++ b/src/sentry/migrations/0583_add_early_adopter_to_organization_mapping.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0582_add_status_indexes_checkins"), ] diff --git a/src/sentry/migrations/0590_add_metadata_to_sentry_app.py b/src/sentry/migrations/0590_add_metadata_to_sentry_app.py index 70467066f7a0b4..17c605b63b648b 100644 --- a/src/sentry/migrations/0590_add_metadata_to_sentry_app.py +++ b/src/sentry/migrations/0590_add_metadata_to_sentry_app.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0589_add_commit_date_added_indices"), ] diff --git a/src/sentry/migrations/0591_remove_relocation_hybrid_cloud_foreign_keys.py b/src/sentry/migrations/0591_remove_relocation_hybrid_cloud_foreign_keys.py index 9504b752d61bef..d4702555a72981 100644 --- a/src/sentry/migrations/0591_remove_relocation_hybrid_cloud_foreign_keys.py +++ b/src/sentry/migrations/0591_remove_relocation_hybrid_cloud_foreign_keys.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0590_add_metadata_to_sentry_app"), ] diff --git a/src/sentry/migrations/0592_delete_relocation_hybrid_cloud_foreign_keys.py b/src/sentry/migrations/0592_delete_relocation_hybrid_cloud_foreign_keys.py index f184baa6f83908..6ce84ad9f211e1 100644 --- a/src/sentry/migrations/0592_delete_relocation_hybrid_cloud_foreign_keys.py +++ b/src/sentry/migrations/0592_delete_relocation_hybrid_cloud_foreign_keys.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0591_remove_relocation_hybrid_cloud_foreign_keys"), ] diff --git a/src/sentry/migrations/0607_drop_externalactor_actorid.py b/src/sentry/migrations/0607_drop_externalactor_actorid.py index 6f7bf3fd118686..6052d52bb1aa2f 100644 --- a/src/sentry/migrations/0607_drop_externalactor_actorid.py +++ b/src/sentry/migrations/0607_drop_externalactor_actorid.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0606_update_user_to_optional_organization_slug_reservation"), ] diff --git a/src/sentry/migrations/0610_remove_notification_setting_table.py b/src/sentry/migrations/0610_remove_notification_setting_table.py index 47dfb155e353ac..e88cf1a990ec60 100644 --- a/src/sentry/migrations/0610_remove_notification_setting_table.py +++ b/src/sentry/migrations/0610_remove_notification_setting_table.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0609_remove_notification_setting_model"), ] diff --git a/src/sentry/migrations/0617_monitor_boolean_fields_muted_disabled.py b/src/sentry/migrations/0617_monitor_boolean_fields_muted_disabled.py index fe25887609f43f..040d95c6bfcce5 100644 --- a/src/sentry/migrations/0617_monitor_boolean_fields_muted_disabled.py +++ b/src/sentry/migrations/0617_monitor_boolean_fields_muted_disabled.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0616_drop_event_user_id_from_userreport_table_step_1"), ] diff --git a/src/sentry/migrations/0618_drop_event_user_id_from_userreport_table_step_2.py b/src/sentry/migrations/0618_drop_event_user_id_from_userreport_table_step_2.py index 0a2d77ed6ca01d..60c5c7d3c100c6 100644 --- a/src/sentry/migrations/0618_drop_event_user_id_from_userreport_table_step_2.py +++ b/src/sentry/migrations/0618_drop_event_user_id_from_userreport_table_step_2.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0617_monitor_boolean_fields_muted_disabled"), ] diff --git a/src/sentry/migrations/0624_add_is_muted_monitorenvironment.py b/src/sentry/migrations/0624_add_is_muted_monitorenvironment.py index 9d36e7039b1533..28437b53203c1b 100644 --- a/src/sentry/migrations/0624_add_is_muted_monitorenvironment.py +++ b/src/sentry/migrations/0624_add_is_muted_monitorenvironment.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0623_increase_regression_fingerprint_length"), ] diff --git a/src/sentry/migrations/0631_add_priority_columns_to_groupedmessage.py b/src/sentry/migrations/0631_add_priority_columns_to_groupedmessage.py index fdad116dc7ee45..6dffdce9d51f16 100644 --- a/src/sentry/migrations/0631_add_priority_columns_to_groupedmessage.py +++ b/src/sentry/migrations/0631_add_priority_columns_to_groupedmessage.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0630_better_monitor_latest_index"), ] diff --git a/src/sentry/migrations/0633_add_priority_locked_at_to_groupedmessage.py b/src/sentry/migrations/0633_add_priority_locked_at_to_groupedmessage.py index f0639df01f5e08..25c185d4151429 100644 --- a/src/sentry/migrations/0633_add_priority_locked_at_to_groupedmessage.py +++ b/src/sentry/migrations/0633_add_priority_locked_at_to_groupedmessage.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0632_apitoken_backfill_last_chars"), ] diff --git a/src/sentry/migrations/0637_remove_pr_comment_pr_id_constraint.py b/src/sentry/migrations/0637_remove_pr_comment_pr_id_constraint.py index 2c7a45b552bbf6..c06a425026ca2f 100644 --- a/src/sentry/migrations/0637_remove_pr_comment_pr_id_constraint.py +++ b/src/sentry/migrations/0637_remove_pr_comment_pr_id_constraint.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0636_monitor_incident_env_resolving_index"), ] diff --git a/src/sentry/migrations/0638_add_date_added_to_dashboard_widget_on_demand.py b/src/sentry/migrations/0638_add_date_added_to_dashboard_widget_on_demand.py index dc5e9246bdb970..b759ff0f296f58 100644 --- a/src/sentry/migrations/0638_add_date_added_to_dashboard_widget_on_demand.py +++ b/src/sentry/migrations/0638_add_date_added_to_dashboard_widget_on_demand.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0637_remove_pr_comment_pr_id_constraint"), ] diff --git a/src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py b/src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py index 1c6498a4817afc..09068d38850701 100644 --- a/src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py +++ b/src/sentry/migrations/0643_add_date_modified_col_dashboard_widget_query.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0642_index_together_release"), ] diff --git a/src/sentry/migrations/0651_enable_activated_alert_rules.py b/src/sentry/migrations/0651_enable_activated_alert_rules.py index 60408e2a6f4779..4e1d2640b9edc3 100644 --- a/src/sentry/migrations/0651_enable_activated_alert_rules.py +++ b/src/sentry/migrations/0651_enable_activated_alert_rules.py @@ -23,6 +23,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0650_create_sentryshot"), ] diff --git a/src/sentry/migrations/0657_add_status_column_for_alert_rule_trigger_action.py b/src/sentry/migrations/0657_add_status_column_for_alert_rule_trigger_action.py index d1d5dc69bbc3d6..31fbdf83276a70 100644 --- a/src/sentry/migrations/0657_add_status_column_for_alert_rule_trigger_action.py +++ b/src/sentry/migrations/0657_add_status_column_for_alert_rule_trigger_action.py @@ -19,6 +19,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0656_add_discover_dataset_split_dashboard"), ] diff --git a/src/sentry/migrations/0658_projectkey_usecase.py b/src/sentry/migrations/0658_projectkey_usecase.py index 721f564704bce8..84964091d4aa0d 100644 --- a/src/sentry/migrations/0658_projectkey_usecase.py +++ b/src/sentry/migrations/0658_projectkey_usecase.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0657_add_status_column_for_alert_rule_trigger_action"), ] diff --git a/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py b/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py index 70ec590c959b64..670b5d9aa5bb6f 100644 --- a/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py +++ b/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0662_monitor_drop_last_state_change"), ] diff --git a/src/sentry/migrations/0665_monitor_drop_last_state_change_db.py b/src/sentry/migrations/0665_monitor_drop_last_state_change_db.py index dbef628771474b..56e9a741d8020a 100644 --- a/src/sentry/migrations/0665_monitor_drop_last_state_change_db.py +++ b/src/sentry/migrations/0665_monitor_drop_last_state_change_db.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0664_create_new_broken_monitor_detection_table"), ] diff --git a/src/sentry/migrations/0678_add_is_hidden_dashboard_widget_query.py b/src/sentry/migrations/0678_add_is_hidden_dashboard_widget_query.py index d45b47230af02f..6f6fb4a07250d5 100644 --- a/src/sentry/migrations/0678_add_is_hidden_dashboard_widget_query.py +++ b/src/sentry/migrations/0678_add_is_hidden_dashboard_widget_query.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0677_unpickle_project_options_again"), ] diff --git a/src/sentry/migrations/0682_monitors_constrain_to_project_id_slug.py b/src/sentry/migrations/0682_monitors_constrain_to_project_id_slug.py index c6a78343712ecf..1bf094a1582093 100644 --- a/src/sentry/migrations/0682_monitors_constrain_to_project_id_slug.py +++ b/src/sentry/migrations/0682_monitors_constrain_to_project_id_slug.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = True + allow_run_sql = True + dependencies = [ ("sentry", "0681_unpickle_authenticator_again"), ] diff --git a/src/sentry/migrations/0689_drop_config_from_cron_checkin.py b/src/sentry/migrations/0689_drop_config_from_cron_checkin.py index 724035f46302cf..e2165fe3b044b8 100644 --- a/src/sentry/migrations/0689_drop_config_from_cron_checkin.py +++ b/src/sentry/migrations/0689_drop_config_from_cron_checkin.py @@ -18,6 +18,8 @@ class Migration(CheckedMigration): # change, it's completely safe to run the operation after the code has deployed. is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0688_add_project_flag_high_priority_alerts"), ] diff --git a/src/sentry/migrations/0697_remove_monitor_owner_actor_id_db.py b/src/sentry/migrations/0697_remove_monitor_owner_actor_id_db.py index 6f09d09c5c55af..837d74f0dd3f44 100644 --- a/src/sentry/migrations/0697_remove_monitor_owner_actor_id_db.py +++ b/src/sentry/migrations/0697_remove_monitor_owner_actor_id_db.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0696_remove_monitor_owner_actor_id"), ] diff --git a/src/sentry/migrations/0700_drop_fileid_controlavatar.py b/src/sentry/migrations/0700_drop_fileid_controlavatar.py index 0df209ea2d5de8..2e0607e567795c 100644 --- a/src/sentry/migrations/0700_drop_fileid_controlavatar.py +++ b/src/sentry/migrations/0700_drop_fileid_controlavatar.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0699_update_monitor_owner_team_id_cascsade"), ] diff --git a/src/sentry/migrations/0707_alert_rule_activations_incidents_fk.py b/src/sentry/migrations/0707_alert_rule_activations_incidents_fk.py index ff7a700e1aa93b..084f37f4732178 100644 --- a/src/sentry/migrations/0707_alert_rule_activations_incidents_fk.py +++ b/src/sentry/migrations/0707_alert_rule_activations_incidents_fk.py @@ -22,6 +22,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0706_grouphistory_userteam_backfill"), ] diff --git a/src/sentry/migrations/0709_alertrule_remove_owner_state.py b/src/sentry/migrations/0709_alertrule_remove_owner_state.py index 4eb13999d671c1..8123ffc5f80607 100644 --- a/src/sentry/migrations/0709_alertrule_remove_owner_state.py +++ b/src/sentry/migrations/0709_alertrule_remove_owner_state.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0708_rule_remove_owner_state"), ] diff --git a/src/sentry/migrations/0710_grouphistory_remove_actor_state.py b/src/sentry/migrations/0710_grouphistory_remove_actor_state.py index b011bcbde62c45..59e4bdb5806122 100644 --- a/src/sentry/migrations/0710_grouphistory_remove_actor_state.py +++ b/src/sentry/migrations/0710_grouphistory_remove_actor_state.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0709_alertrule_remove_owner_state"), ] diff --git a/src/sentry/migrations/0713_team_remove_actor_state.py b/src/sentry/migrations/0713_team_remove_actor_state.py index 84a80d3fe1d029..f4c773750cfd7f 100644 --- a/src/sentry/migrations/0713_team_remove_actor_state.py +++ b/src/sentry/migrations/0713_team_remove_actor_state.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0712_create_tombstone_compound_indexes"), ] diff --git a/src/sentry/migrations/0714_drop_project_team_avatar.py b/src/sentry/migrations/0714_drop_project_team_avatar.py index 73e0438f7c58bb..7e3a5b6623364f 100644 --- a/src/sentry/migrations/0714_drop_project_team_avatar.py +++ b/src/sentry/migrations/0714_drop_project_team_avatar.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0713_team_remove_actor_state"), ] diff --git a/src/sentry/migrations/0715_remove_actormodel_constraints.py b/src/sentry/migrations/0715_remove_actormodel_constraints.py index cd87515e7ed464..9b9eb125d0a456 100644 --- a/src/sentry/migrations/0715_remove_actormodel_constraints.py +++ b/src/sentry/migrations/0715_remove_actormodel_constraints.py @@ -22,6 +22,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0714_drop_project_team_avatar"), ] diff --git a/src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py b/src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py index 59ca6933a64ed6..718648afa84e14 100644 --- a/src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py +++ b/src/sentry/migrations/0719_querysubscription_timebox_column_deletion_db.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0718_delete_timebox_columns"), ] diff --git a/src/sentry/migrations/0720_remove_actor_columns.py b/src/sentry/migrations/0720_remove_actor_columns.py index 6b098b9ebddbcb..add6252e7b4805 100644 --- a/src/sentry/migrations/0720_remove_actor_columns.py +++ b/src/sentry/migrations/0720_remove_actor_columns.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0719_querysubscription_timebox_column_deletion_db"), ] diff --git a/src/sentry/migrations/0722_drop_sentryfunctions.py b/src/sentry/migrations/0722_drop_sentryfunctions.py index 8e0bcd9ece8416..bf0357c3fb98d3 100644 --- a/src/sentry/migrations/0722_drop_sentryfunctions.py +++ b/src/sentry/migrations/0722_drop_sentryfunctions.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0721_delete_sentryfunctions"), ] diff --git a/src/sentry/migrations/0724_discover_saved_query_dataset.py b/src/sentry/migrations/0724_discover_saved_query_dataset.py index c8439d7e2261d2..0dbf432449a084 100644 --- a/src/sentry/migrations/0724_discover_saved_query_dataset.py +++ b/src/sentry/migrations/0724_discover_saved_query_dataset.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0723_project_template_models"), ] diff --git a/src/sentry/migrations/0725_create_sentry_groupsearchview_table.py b/src/sentry/migrations/0725_create_sentry_groupsearchview_table.py index 40120a0f0db007..eecb7007bcafa9 100644 --- a/src/sentry/migrations/0725_create_sentry_groupsearchview_table.py +++ b/src/sentry/migrations/0725_create_sentry_groupsearchview_table.py @@ -26,6 +26,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0724_discover_saved_query_dataset"), ] diff --git a/src/sentry/migrations/0733_relocation_provenance.py b/src/sentry/migrations/0733_relocation_provenance.py index 9600cfa7b7f2c8..72626606c5ca6e 100644 --- a/src/sentry/migrations/0733_relocation_provenance.py +++ b/src/sentry/migrations/0733_relocation_provenance.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0732_add_span_attribute_extraction_rules"), ] diff --git a/src/sentry/migrations/0737_add_discover_saved_query_dataset_source.py b/src/sentry/migrations/0737_add_discover_saved_query_dataset_source.py index d3d14ff0930995..02ea18a44e3410 100644 --- a/src/sentry/migrations/0737_add_discover_saved_query_dataset_source.py +++ b/src/sentry/migrations/0737_add_discover_saved_query_dataset_source.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0736_rm_reprocessing_step2"), ] diff --git a/src/sentry/migrations/0738_rm_reprocessing_step3.py b/src/sentry/migrations/0738_rm_reprocessing_step3.py index ed23fac78edb47..de7cde8e7701bb 100644 --- a/src/sentry/migrations/0738_rm_reprocessing_step3.py +++ b/src/sentry/migrations/0738_rm_reprocessing_step3.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0737_add_discover_saved_query_dataset_source"), ] diff --git a/src/sentry/migrations/0741_metric_alert_anomaly_detection.py b/src/sentry/migrations/0741_metric_alert_anomaly_detection.py index 3e8d7f3aeda955..86eb1ba26927a5 100644 --- a/src/sentry/migrations/0741_metric_alert_anomaly_detection.py +++ b/src/sentry/migrations/0741_metric_alert_anomaly_detection.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0740_one_relocation_file_kind_per_relocation"), ] diff --git a/src/sentry/migrations/0744_add_dataset_source_field_to_dashboards.py b/src/sentry/migrations/0744_add_dataset_source_field_to_dashboards.py index 47159802485e5d..de09cf3ff0abee 100644 --- a/src/sentry/migrations/0744_add_dataset_source_field_to_dashboards.py +++ b/src/sentry/migrations/0744_add_dataset_source_field_to_dashboards.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0743_backfill_broken_monitor_notification_setting_option"), ] diff --git a/src/sentry/migrations/0746_add_bitflags_to_hybrid_cloud.py b/src/sentry/migrations/0746_add_bitflags_to_hybrid_cloud.py index 0c577fe5630a9e..9dad268ac936f7 100644 --- a/src/sentry/migrations/0746_add_bitflags_to_hybrid_cloud.py +++ b/src/sentry/migrations/0746_add_bitflags_to_hybrid_cloud.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0745_add_prevent_superuser_access_bitflag"), ] diff --git a/src/sentry/migrations/0750_disable_member_invite_in_hybrid_cloud.py b/src/sentry/migrations/0750_disable_member_invite_in_hybrid_cloud.py index fcb366e642b1df..3eb750ff9d496b 100644 --- a/src/sentry/migrations/0750_disable_member_invite_in_hybrid_cloud.py +++ b/src/sentry/migrations/0750_disable_member_invite_in_hybrid_cloud.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0749_disable_member_invite"), ] diff --git a/src/sentry/migrations/0757_add_scopes_to_apiapplication.py b/src/sentry/migrations/0757_add_scopes_to_apiapplication.py index e0851898bf17ab..074c9ce950dc69 100644 --- a/src/sentry/migrations/0757_add_scopes_to_apiapplication.py +++ b/src/sentry/migrations/0757_add_scopes_to_apiapplication.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0756_grouprelease_represented_in_django"), ] diff --git a/src/sentry/migrations/0759_remove_spanattributeextraction_tables.py b/src/sentry/migrations/0759_remove_spanattributeextraction_tables.py index d778dbf7def58d..aba74fb7c586e3 100644 --- a/src/sentry/migrations/0759_remove_spanattributeextraction_tables.py +++ b/src/sentry/migrations/0759_remove_spanattributeextraction_tables.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0758_remove_spanattributeextraction_models"), ] diff --git a/src/sentry/migrations/0760_remove_appstore_connect_integration_tables.py b/src/sentry/migrations/0760_remove_appstore_connect_integration_tables.py index 3d62e30b52a786..5383ebc03a650c 100644 --- a/src/sentry/migrations/0760_remove_appstore_connect_integration_tables.py +++ b/src/sentry/migrations/0760_remove_appstore_connect_integration_tables.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0759_remove_spanattributeextraction_tables"), ] diff --git a/src/sentry/migrations/0776_drop_group_score_in_database.py b/src/sentry/migrations/0776_drop_group_score_in_database.py index 53ed5f75b6f0d2..2a9f36dd7e98f8 100644 --- a/src/sentry/migrations/0776_drop_group_score_in_database.py +++ b/src/sentry/migrations/0776_drop_group_score_in_database.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = True + allow_run_sql = True + dependencies = [ ("sentry", "0775_add_dashboard_permissions_model"), ] diff --git a/src/sentry/migrations/0785_add_new_field_to_dashboard_permissions.py b/src/sentry/migrations/0785_add_new_field_to_dashboard_permissions.py index 32912ed3a81cfc..bae928a544df7d 100644 --- a/src/sentry/migrations/0785_add_new_field_to_dashboard_permissions.py +++ b/src/sentry/migrations/0785_add_new_field_to_dashboard_permissions.py @@ -23,6 +23,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0784_remove_broadcasts_cta_column"), ] diff --git a/src/sentry/migrations/0786_drop_broadcasts_cta_column.py b/src/sentry/migrations/0786_drop_broadcasts_cta_column.py index fd14d73769726a..c8cac50728a022 100644 --- a/src/sentry/migrations/0786_drop_broadcasts_cta_column.py +++ b/src/sentry/migrations/0786_drop_broadcasts_cta_column.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0785_add_new_field_to_dashboard_permissions"), ] diff --git a/src/sentry/migrations/0790_delete_dashboard_perms_col.py b/src/sentry/migrations/0790_delete_dashboard_perms_col.py new file mode 100644 index 00000000000000..38e17ab6fda1da --- /dev/null +++ b/src/sentry/migrations/0790_delete_dashboard_perms_col.py @@ -0,0 +1,44 @@ +# Generated by Django 5.1.1 on 2024-11-14 16:45 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + allow_run_sql = True + + dependencies = [ + ("sentry", "0789_add_unique_constraint_to_rollbackorganization"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + """ + ALTER TABLE "sentry_dashboardpermissions" DROP COLUMN "is_creator_only_editable"; + """, + reverse_sql=""" + ALTER TABLE "sentry_dashboardpermissions" ADD COLUMN "is_creator_only_editable" bool NULL; + """, + hints={"tables": ["sentry_dashboardpermissions"]}, + ) + ], + state_operations=[], + ) + ] diff --git a/src/sentry/migrations/0791_add_hashing_metadata_to_grouphash_metadata.py b/src/sentry/migrations/0791_add_hashing_metadata_to_grouphash_metadata.py new file mode 100644 index 00000000000000..dd11284266f888 --- /dev/null +++ b/src/sentry/migrations/0791_add_hashing_metadata_to_grouphash_metadata.py @@ -0,0 +1,34 @@ +# Generated by Django 5.1.1 on 2024-11-14 22:09 + +from django.db import migrations + +import sentry.db.models.fields.jsonfield +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0790_delete_dashboard_perms_col"), + ] + + operations = [ + migrations.AddField( + model_name="grouphashmetadata", + name="hashing_metadata", + field=sentry.db.models.fields.jsonfield.JSONField(null=True), + ), + ] diff --git a/src/sentry/migrations/0792_add_unique_index_apiauthorization.py b/src/sentry/migrations/0792_add_unique_index_apiauthorization.py new file mode 100644 index 00000000000000..905f98324f24c6 --- /dev/null +++ b/src/sentry/migrations/0792_add_unique_index_apiauthorization.py @@ -0,0 +1,48 @@ +# Generated by Django 5.1.1 on 2024-11-19 17:37 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0791_add_hashing_metadata_to_grouphash_metadata"), + ] + + operations = [ + migrations.AddConstraint( + model_name="apiauthorization", + constraint=models.UniqueConstraint( + condition=models.Q(("organization_id__isnull", True)), + fields=("user", "application"), + name="apiauthorization_user_app", + ), + ), + migrations.AddConstraint( + model_name="apiauthorization", + constraint=models.UniqueConstraint( + condition=models.Q(("organization_id__isnull", False)), + fields=("user", "application", "organization_id"), + name="apiauthorization_user_app_org", + ), + ), + migrations.AlterUniqueTogether( + name="apiauthorization", + unique_together=set(), + ), + ] diff --git a/src/sentry/migrations/0793_remove_db_constraint_alert_rule_exclusion.py b/src/sentry/migrations/0793_remove_db_constraint_alert_rule_exclusion.py new file mode 100644 index 00000000000000..8c673571a87f4d --- /dev/null +++ b/src/sentry/migrations/0793_remove_db_constraint_alert_rule_exclusion.py @@ -0,0 +1,59 @@ +# Generated by Django 5.1.1 on 2024-11-22 17:43 + +import django.db.models.deletion +from django.db import migrations + +import sentry.db.models.fields.foreignkey +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0792_add_unique_index_apiauthorization"), + ] + + operations = [ + migrations.AlterField( + model_name="alertruleexcludedprojects", + name="alert_rule", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + db_constraint=False, + db_index=False, + on_delete=django.db.models.deletion.CASCADE, + to="sentry.alertrule", + ), + ), + migrations.AlterField( + model_name="alertruletriggerexclusion", + name="alert_rule_trigger", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + db_constraint=False, + on_delete=django.db.models.deletion.CASCADE, + related_name="exclusions", + to="sentry.alertruletrigger", + ), + ), + migrations.AlterField( + model_name="alertruletriggerexclusion", + name="query_subscription", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + db_constraint=False, + on_delete=django.db.models.deletion.CASCADE, + to="sentry.querysubscription", + ), + ), + ] diff --git a/src/sentry/migrations/0794_rm_excluded_included_projects_alertrule.py b/src/sentry/migrations/0794_rm_excluded_included_projects_alertrule.py new file mode 100644 index 00000000000000..0add9ea8dd47c6 --- /dev/null +++ b/src/sentry/migrations/0794_rm_excluded_included_projects_alertrule.py @@ -0,0 +1,45 @@ +# Generated by Django 5.1.1 on 2024-11-22 19:12 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0793_remove_db_constraint_alert_rule_exclusion"), + ] + + operations = [ + SafeRemoveField( + model_name="alertrule", + name="excluded_projects", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + migrations.AlterField( + model_name="alertrule", + name="include_all_projects", + field=models.BooleanField(default=False, null=True), + ), + SafeRemoveField( + model_name="alertrule", + name="include_all_projects", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/src/sentry/migrations/0795_drop_included_excluded_projects.py b/src/sentry/migrations/0795_drop_included_excluded_projects.py new file mode 100644 index 00000000000000..772097d94199f3 --- /dev/null +++ b/src/sentry/migrations/0795_drop_included_excluded_projects.py @@ -0,0 +1,36 @@ +# Generated by Django 5.1.1 on 2024-11-25 17:33 + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.fields import SafeRemoveField +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0794_rm_excluded_included_projects_alertrule"), + ] + + operations = [ + SafeRemoveField( + model_name="alertrule", name="excluded_projects", deletion_action=DeletionAction.DELETE + ), + SafeRemoveField( + model_name="alertrule", + name="include_all_projects", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/src/sentry/migrations/0796_rm_excluded_projects_triggers.py b/src/sentry/migrations/0796_rm_excluded_projects_triggers.py new file mode 100644 index 00000000000000..710cea5d0cc5f7 --- /dev/null +++ b/src/sentry/migrations/0796_rm_excluded_projects_triggers.py @@ -0,0 +1,36 @@ +# Generated by Django 5.1.1 on 2024-11-25 20:06 + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0795_drop_included_excluded_projects"), + ] + + operations = [ + SafeDeleteModel( + name="AlertRuleExcludedProjects", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + SafeDeleteModel( + name="AlertRuleTriggerExclusion", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/src/sentry/migrations/0797_drop_excluded_project_triggers.py b/src/sentry/migrations/0797_drop_excluded_project_triggers.py new file mode 100644 index 00000000000000..d410dcf3729a51 --- /dev/null +++ b/src/sentry/migrations/0797_drop_excluded_project_triggers.py @@ -0,0 +1,30 @@ +# Generated by Django 5.1.1 on 2024-11-26 18:34 + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0796_rm_excluded_projects_triggers"), + ] + + operations = [ + SafeDeleteModel(name="AlertRuleExcludedProjects", deletion_action=DeletionAction.DELETE), + SafeDeleteModel(name="AlertRuleTriggerExclusion", deletion_action=DeletionAction.DELETE), + ] diff --git a/src/sentry/migrations/0798_add_favorite_dashboard_col.py b/src/sentry/migrations/0798_add_favorite_dashboard_col.py new file mode 100644 index 00000000000000..ab274624653ed0 --- /dev/null +++ b/src/sentry/migrations/0798_add_favorite_dashboard_col.py @@ -0,0 +1,61 @@ +# Generated by Django 5.1.1 on 2024-12-01 08:26 + +import django.db.models.deletion +from django.db import migrations, models + +import sentry.db.models.fields.bounded +import sentry.db.models.fields.foreignkey +import sentry.db.models.fields.hybrid_cloud_foreign_key +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0797_drop_excluded_project_triggers"), + ] + + operations = [ + migrations.CreateModel( + name="DashboardFavoriteUser", + fields=[ + ( + "id", + sentry.db.models.fields.bounded.BoundedBigAutoField( + primary_key=True, serialize=False + ), + ), + ("date_updated", models.DateTimeField(auto_now=True)), + ("date_added", models.DateTimeField(auto_now_add=True)), + ( + "user_id", + sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.User", db_index=True, on_delete="CASCADE" + ), + ), + ( + "dashboard", + sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="sentry.dashboard" + ), + ), + ], + options={ + "db_table": "sentry_dashboardfavoriteuser", + "unique_together": {("user_id", "dashboard")}, + }, + ), + ] diff --git a/src/sentry/migrations/0799_cron_incident_index.py b/src/sentry/migrations/0799_cron_incident_index.py new file mode 100644 index 00000000000000..73f4d4c8972715 --- /dev/null +++ b/src/sentry/migrations/0799_cron_incident_index.py @@ -0,0 +1,34 @@ +# Generated by Django 5.1.1 on 2024-11-27 19:11 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = True + + dependencies = [ + ("sentry", "0798_add_favorite_dashboard_col"), + ] + + operations = [ + migrations.AddIndex( + model_name="monitorcheckin", + index=models.Index( + fields=["status", "date_added"], name="sentry_moni_status_dd2d85_idx" + ), + ), + ] diff --git a/src/sentry/migrations/0800_rm_incidentseen_incidentsubscription.py b/src/sentry/migrations/0800_rm_incidentseen_incidentsubscription.py new file mode 100644 index 00000000000000..5a77729743455f --- /dev/null +++ b/src/sentry/migrations/0800_rm_incidentseen_incidentsubscription.py @@ -0,0 +1,72 @@ +# Generated by Django 5.1.1 on 2024-12-03 18:48 +import django +from django.db import migrations + +import sentry +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0799_cron_incident_index"), + ] + + operations = [ + migrations.AlterField( + model_name="incidentseen", + name="incident", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + db_constraint=False, + on_delete=django.db.models.deletion.CASCADE, + to="sentry.incident", + ), + ), + migrations.AlterField( + model_name="incidentseen", + name="user_id", + field=sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.User", db_index=False, null=True, on_delete="CASCADE" + ), + ), + migrations.AlterField( + model_name="incidentsubscription", + name="incident", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + db_constraint=False, + db_index=False, + on_delete=django.db.models.deletion.CASCADE, + to="sentry.incident", + ), + ), + migrations.AlterField( + model_name="incidentsubscription", + name="user_id", + field=sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.User", db_index=True, null=True, on_delete="CASCADE" + ), + ), + SafeDeleteModel( + name="IncidentSeen", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + SafeDeleteModel( + name="IncidentSubscription", + deletion_action=DeletionAction.MOVE_TO_PENDING, + ), + ] diff --git a/src/sentry/migrations/0801_drop_incidentseen_incidentsubscription.py b/src/sentry/migrations/0801_drop_incidentseen_incidentsubscription.py new file mode 100644 index 00000000000000..c53fd1533be6b5 --- /dev/null +++ b/src/sentry/migrations/0801_drop_incidentseen_incidentsubscription.py @@ -0,0 +1,36 @@ +# Generated by Django 5.1.1 on 2024-12-03 21:47 + +from sentry.new_migrations.migrations import CheckedMigration +from sentry.new_migrations.monkey.models import SafeDeleteModel +from sentry.new_migrations.monkey.state import DeletionAction + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0800_rm_incidentseen_incidentsubscription"), + ] + + operations = [ + SafeDeleteModel( + name="IncidentSeen", + deletion_action=DeletionAction.DELETE, + ), + SafeDeleteModel( + name="IncidentSubscription", + deletion_action=DeletionAction.DELETE, + ), + ] diff --git a/src/sentry/migrations/0802_remove_grouping_auto_update_option.py b/src/sentry/migrations/0802_remove_grouping_auto_update_option.py new file mode 100644 index 00000000000000..9cd59ae9ceb493 --- /dev/null +++ b/src/sentry/migrations/0802_remove_grouping_auto_update_option.py @@ -0,0 +1,44 @@ +# Generated by Django 5.1.1 on 2024-12-04 17:57 + +from django.apps.registry import Apps +from django.db import migrations +from django.db.backends.base.schema import BaseDatabaseSchemaEditor + +from sentry.new_migrations.migrations import CheckedMigration + + +def remove_grouping_auto_update_option( + apps: Apps, _schema_editor: BaseDatabaseSchemaEditor +) -> None: + ProjectOption = apps.get_model("sentry", "ProjectOption") + + for option in ProjectOption.objects.filter(key="sentry:grouping_auto_update"): + option.delete() + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0801_drop_incidentseen_incidentsubscription"), + ] + + operations = [ + migrations.RunPython( + remove_grouping_auto_update_option, + migrations.RunPython.noop, + hints={"tables": ["sentry_projectoptions"]}, + ), + ] diff --git a/src/sentry/models/__init__.py b/src/sentry/models/__init__.py index 1a18f4dc34799e..61a973fed1e262 100644 --- a/src/sentry/models/__init__.py +++ b/src/sentry/models/__init__.py @@ -60,8 +60,6 @@ from .grouptombstone import * # NOQA from .importchunk import * # NOQA from .latestreporeleaseenvironment import * # NOQA -from .notificationmessage import * # NOQA -from .notificationsettingoption import * # NOQA from .notificationsettingprovider import * # NOQA from .options import * # NOQA from .organization import * # NOQA diff --git a/src/sentry/models/apiauthorization.py b/src/sentry/models/apiauthorization.py index 512b5bb85f5145..91a6ffe600fd77 100644 --- a/src/sentry/models/apiauthorization.py +++ b/src/sentry/models/apiauthorization.py @@ -1,4 +1,5 @@ from django.db import models +from django.db.models import Q from django.utils import timezone from sentry.backup.scopes import RelocationScope @@ -35,7 +36,18 @@ class ApiAuthorization(Model, HasApiScopes): class Meta: app_label = "sentry" db_table = "sentry_apiauthorization" - unique_together = (("user", "application"),) + constraints = [ + models.UniqueConstraint( + fields=["user", "application"], + name="apiauthorization_user_app", + condition=Q(organization_id__isnull=True), + ), + models.UniqueConstraint( + fields=["user", "application", "organization_id"], + name="apiauthorization_user_app_org", + condition=Q(organization_id__isnull=False), + ), + ] __repr__ = sane_repr("user_id", "application_id") diff --git a/src/sentry/models/counter.py b/src/sentry/models/counter.py index e42a303f214244..5a967fedb8dc69 100644 --- a/src/sentry/models/counter.py +++ b/src/sentry/models/counter.py @@ -45,8 +45,7 @@ def increment_project_counter(project, delta=1, using="default"): # To prevent the statement_timeout leaking into the session we need to use # set local which can be used only within a transaction with transaction.atomic(using=using): - cur = connections[using].cursor() - try: + with connections[using].cursor() as cur: statement_timeout = None if settings.SENTRY_PROJECT_COUNTER_STATEMENT_TIMEOUT: # WARNING: This is not a proper fix and should be removed once @@ -84,9 +83,6 @@ def increment_project_counter(project, delta=1, using="default"): return project_counter - finally: - cur.close() - # this must be idempotent because it seems to execute twice # (at least during test runs) diff --git a/src/sentry/models/dashboard.py b/src/sentry/models/dashboard.py index d4d2ee7c38a753..21939517fb9bde 100644 --- a/src/sentry/models/dashboard.py +++ b/src/sentry/models/dashboard.py @@ -9,6 +9,7 @@ from sentry import features from sentry.backup.scopes import RelocationScope from sentry.db.models import FlexibleForeignKey, Model, region_silo_model, sane_repr +from sentry.db.models.base import DefaultFieldsModel from sentry.db.models.fields.bounded import BoundedBigIntegerField from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey from sentry.db.models.fields.jsonfield import JSONField @@ -29,6 +30,19 @@ class Meta: unique_together = (("project", "dashboard"),) +@region_silo_model +class DashboardFavoriteUser(DefaultFieldsModel): + __relocation_scope__ = RelocationScope.Organization + + user_id = HybridCloudForeignKey("sentry.User", on_delete="CASCADE") + dashboard = FlexibleForeignKey("sentry.Dashboard", on_delete=models.CASCADE) + + class Meta: + app_label = "sentry" + db_table = "sentry_dashboardfavoriteuser" + unique_together = (("user_id", "dashboard"),) + + @region_silo_model class Dashboard(Model): """ @@ -55,6 +69,30 @@ class Meta: __repr__ = sane_repr("organization", "title") + @property + def favorited_by(self): + user_ids = DashboardFavoriteUser.objects.filter(dashboard=self).values_list( + "user_id", flat=True + ) + return user_ids + + @favorited_by.setter + def favorited_by(self, user_ids): + from django.db import router, transaction + + existing_user_ids = DashboardFavoriteUser.objects.filter(dashboard=self).values_list( + "user_id", flat=True + ) + with transaction.atomic(using=router.db_for_write(DashboardFavoriteUser)): + newly_favourited = [ + DashboardFavoriteUser(dashboard=self, user_id=user_id) + for user_id in set(user_ids) - set(existing_user_ids) + ] + DashboardFavoriteUser.objects.filter( + dashboard=self, user_id__in=set(existing_user_ids) - set(user_ids) + ).delete() + DashboardFavoriteUser.objects.bulk_create(newly_favourited) + @staticmethod def get_prebuilt_list(organization, user, title_query=None): query = list( @@ -156,6 +194,8 @@ def get_prebuilt_dashboards(organization, user) -> list[dict[str, Any]]: "title": "General", "dateCreated": "", "createdBy": "", + "permissions": {"isEditableByEveryone": True, "teamsWithEditAccess": []}, + "isFavorited": False, "widgets": [ { "title": "Number of Errors", diff --git a/src/sentry/models/debugfile.py b/src/sentry/models/debugfile.py index ea0d64793ac8b8..c7572ddccfeb48 100644 --- a/src/sentry/models/debugfile.py +++ b/src/sentry/models/debugfile.py @@ -338,7 +338,10 @@ def create_dif_from_id( return dif, True -def _analyze_progard_filename(filename: str) -> str | None: +def _analyze_progard_filename(filename: str | None) -> str | None: + if filename is None: + return None + match = _proguard_file_re.search(filename) if match is None: return None @@ -474,9 +477,9 @@ def detect_dif_from_path( :raises BadDif: If the file is not a valid DIF. """ - # proguard files (proguard/UUID.txt) or + # Proguard files have a path or a name like (proguard/UUID.txt) or # (proguard/mapping-UUID.txt). - proguard_id = _analyze_progard_filename(path) + proguard_id = _analyze_progard_filename(path) or _analyze_progard_filename(name) if proguard_id is not None: data = {"features": ["mapping"]} return [ diff --git a/src/sentry/models/groupassignee.py b/src/sentry/models/groupassignee.py index eeea257073f4a5..b4479012ecb2a9 100644 --- a/src/sentry/models/groupassignee.py +++ b/src/sentry/models/groupassignee.py @@ -33,7 +33,10 @@ class GroupAssigneeManager(BaseManager["GroupAssignee"]): def get_assigned_to_data( - self, assigned_to: Team | RpcUser, assignee_type: str, extra: dict[str, str] | None = None + self, + assigned_to: Team | RpcUser | User, + assignee_type: str, + extra: dict[str, str] | None = None, ) -> dict[str, Any]: data = { "assignee": str(assigned_to.id), @@ -45,7 +48,7 @@ def get_assigned_to_data( return data - def get_assignee_data(self, assigned_to: Team | RpcUser) -> tuple[str, str, str]: + def get_assignee_data(self, assigned_to: Team | RpcUser | User) -> tuple[str, str, str]: from sentry.models.team import Team from sentry.users.models.user import User from sentry.users.services.user import RpcUser @@ -130,7 +133,7 @@ def remove_old_assignees( def assign( self, group: Group, - assigned_to: Team | RpcUser, + assigned_to: Team | RpcUser | User, acting_user: User | None = None, create_only: bool = False, extra: dict[str, str] | None = None, diff --git a/src/sentry/models/grouphashmetadata.py b/src/sentry/models/grouphashmetadata.py index 0f6d3bfb90d0fb..f8ad100527d748 100644 --- a/src/sentry/models/grouphashmetadata.py +++ b/src/sentry/models/grouphashmetadata.py @@ -5,6 +5,8 @@ from sentry.db.models import Model, region_silo_model from sentry.db.models.base import sane_repr from sentry.db.models.fields.foreignkey import FlexibleForeignKey +from sentry.db.models.fields.jsonfield import JSONField +from sentry.types.grouphash_metadata import HashingMetadata # The overall grouping method used @@ -55,7 +57,15 @@ class GroupHashMetadata(Model): # Most recent config to produce this hash latest_grouping_config = models.CharField(null=True) # The primary grouping method (message, stacktrace, fingerprint, etc.) - hash_basis = models.CharField(choices=HashBasis, null=True) + hash_basis: models.Field[HashBasis | None, HashBasis | None] = models.CharField( + choices=HashBasis, null=True + ) + # Metadata about the inputs to the hashing process and the hashing process itself (what + # fingerprinting rules were matched? did we parameterize the message? etc.). For the specific + # data stored, see the class definitions of the `HashingMetadata` subtypes. + hashing_metadata: models.Field[HashingMetadata | None, HashingMetadata | None] = JSONField( + null=True + ) # SEER diff --git a/src/sentry/models/groupinbox.py b/src/sentry/models/groupinbox.py index 9f91b1218b79ae..a4272cb1ad2cdf 100644 --- a/src/sentry/models/groupinbox.py +++ b/src/sentry/models/groupinbox.py @@ -9,6 +9,7 @@ from sentry.backup.scopes import RelocationScope from sentry.db.models import FlexibleForeignKey, JSONField, Model, region_silo_model from sentry.models.activity import Activity +from sentry.models.group import Group from sentry.models.grouphistory import ( GroupHistoryStatus, bulk_record_group_history, @@ -93,7 +94,7 @@ def add_group_to_inbox(group, reason, reason_details=None): return group_inbox -def remove_group_from_inbox(group, action=None, user=None, referrer=None): +def remove_group_from_inbox(group: Group, action=None, user=None, referrer=None): try: group_inbox = GroupInbox.objects.get(group=group) group_inbox.delete() diff --git a/src/sentry/models/notificationaction.py b/src/sentry/models/notificationaction.py index 87f46b3cdb4920..2cf637182639ab 100644 --- a/src/sentry/models/notificationaction.py +++ b/src/sentry/models/notificationaction.py @@ -1,322 +1,17 @@ -from __future__ import annotations - -import logging -from abc import ABCMeta, abstractmethod -from collections.abc import Mapping, MutableMapping -from enum import IntEnum -from typing import TYPE_CHECKING, Any, TypeVar - -from django.db import models - -from sentry.backup.scopes import RelocationScope -from sentry.db.models import FlexibleForeignKey, Model, sane_repr -from sentry.db.models.base import region_silo_model -from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey -from sentry.integrations.services.integration import RpcIntegration -from sentry.integrations.types import ExternalProviders -from sentry.models.organization import Organization - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from sentry.api.serializers.rest_framework.notification_action import ( - NotificationActionInputData, - ) - - -class FlexibleIntEnum(IntEnum): - @classmethod - def as_choices(cls) -> tuple[tuple[int, str], ...]: - raise NotImplementedError - - @classmethod - def get_name(cls, value: int) -> str | None: - return dict(cls.as_choices()).get(value) - - @classmethod - def get_value(cls, name: str) -> int | None: - invert_choices = {v: k for k, v in cls.as_choices()} - return invert_choices.get(name) - - -class ActionService(FlexibleIntEnum): - """ - The available services to fire action notifications - """ - - EMAIL = 0 - PAGERDUTY = 1 - SLACK = 2 - MSTEAMS = 3 - SENTRY_APP = 4 - SENTRY_NOTIFICATION = 5 # Use personal notification platform (src/sentry/notifications) - OPSGENIE = 6 - DISCORD = 7 - - @classmethod - def as_choices(cls) -> tuple[tuple[int, str], ...]: - assert ExternalProviders.EMAIL.name is not None - assert ExternalProviders.PAGERDUTY.name is not None - assert ExternalProviders.SLACK.name is not None - assert ExternalProviders.MSTEAMS.name is not None - assert ExternalProviders.OPSGENIE.name is not None - assert ExternalProviders.DISCORD.name is not None - return ( - (cls.EMAIL.value, ExternalProviders.EMAIL.name), - (cls.PAGERDUTY.value, ExternalProviders.PAGERDUTY.name), - (cls.SLACK.value, ExternalProviders.SLACK.name), - (cls.MSTEAMS.value, ExternalProviders.MSTEAMS.name), - (cls.SENTRY_APP.value, "sentry_app"), - (cls.SENTRY_NOTIFICATION.value, "sentry_notification"), - (cls.OPSGENIE.value, ExternalProviders.OPSGENIE.name), - (cls.DISCORD.value, ExternalProviders.DISCORD.name), - ) - - -class ActionTarget(FlexibleIntEnum): - """ - Explains the contents of target_identifier - """ - - # The target_identifier is a direct reference used by the service (e.g. email address, slack channel id) - SPECIFIC = 0 - # The target_identifier is an id from the User model in Sentry - USER = 1 - # The target_identifier is an id from the Team model in Sentry - TEAM = 2 - # The target_identifier is an id from the SentryApp model in Sentry - SENTRY_APP = 3 - - @classmethod - def as_choices(cls) -> tuple[tuple[int, str], ...]: - return ( - (cls.SPECIFIC.value, "specific"), - (cls.USER.value, "user"), - (cls.TEAM.value, "team"), - (cls.SENTRY_APP.value, "sentry_app"), - ) - - -class ActionTrigger(FlexibleIntEnum): - """ - The possible sources of action notifications. - Items prefixed with 'GS_' have registrations in getsentry. - """ - - AUDIT_LOG = 0 - GS_SPIKE_PROTECTION = 100 - - @classmethod - def as_choices(cls) -> tuple[tuple[int, str], ...]: - return ( - (cls.AUDIT_LOG.value, "audit-log"), - (cls.GS_SPIKE_PROTECTION.value, "spike-protection"), - ) - - -class ActionRegistration(metaclass=ABCMeta): - def __init__(self, action: NotificationAction): - self.action = action - - @abstractmethod - def fire(self, data: Any) -> None: - """ - Handles delivering the message via the service from the action and specified data. - """ - - @classmethod - def validate_action(cls, data: NotificationActionInputData) -> None: - """ - Optional function to provide increased validation when saving incoming NotificationActions. See NotificationActionSerializer. - - :param data: The input data sent to the API before updating/creating NotificationActions - :raises serializers.ValidationError: Indicates that the incoming action would apply to this registration but is not valid. - """ - - @classmethod - def serialize_available( - cls, organization: Organization, integrations: list[RpcIntegration] | None = None - ) -> list[Any]: - """ - Optional class method to serialize this registration's available actions to an organization. See NotificationActionsAvailableEndpoint. - - :param organization: The relevant organization which will receive the serialized available action in their response. - :param integrations: A list of integrations which are set up for the organization. - """ - return [] - - -ActionRegistrationT = TypeVar("ActionRegistrationT", bound=ActionRegistration) - - -class AbstractNotificationAction(Model): - """ - Abstract model meant to retroactively create a contract for notification actions - (e.g. metric alerts, spike protection, etc.) - """ - - integration_id = HybridCloudForeignKey( - "sentry.Integration", blank=True, null=True, on_delete="CASCADE" - ) - sentry_app_id = HybridCloudForeignKey( - "sentry.SentryApp", blank=True, null=True, on_delete="CASCADE" - ) - - # The type of service which will receive the action notification (e.g. slack, pagerduty, etc.) - type = models.SmallIntegerField(choices=ActionService.as_choices()) - # The type of target which the service uses for routing (e.g. user, team) - target_type = models.SmallIntegerField(choices=ActionTarget.as_choices()) - # Identifier of the target for the given service (e.g. slack channel id, pagerdutyservice id) - target_identifier = models.TextField(null=True) - # User-friendly name of the target (e.g. #slack-channel, pagerduty-service-name) - target_display = models.TextField(null=True) - - @property - def service_type(self) -> int: - """ - Used for disambiguity of self.type - """ - return self.type - - class Meta: - abstract = True - - -@region_silo_model -class NotificationActionProject(Model): - __relocation_scope__ = {RelocationScope.Global, RelocationScope.Organization} - - project = FlexibleForeignKey("sentry.Project") - action = FlexibleForeignKey("sentry.NotificationAction") - - class Meta: - app_label = "sentry" - db_table = "sentry_notificationactionproject" - - def get_relocation_scope(self) -> RelocationScope: - action = NotificationAction.objects.get(id=self.action_id) - return action.get_relocation_scope() - - -@region_silo_model -class NotificationAction(AbstractNotificationAction): - """ - Generic notification action model to programmatically route depending on the trigger (or source) for the notification - """ - - __relocation_scope__ = {RelocationScope.Global, RelocationScope.Organization} - __repr__ = sane_repr("id", "trigger_type", "service_type", "target_display") - - _trigger_types: tuple[tuple[int, str], ...] = ActionTrigger.as_choices() - _registry: MutableMapping[str, type[ActionRegistration]] = {} - - organization = FlexibleForeignKey("sentry.Organization") - projects = models.ManyToManyField("sentry.Project", through=NotificationActionProject) - - # The type of trigger which controls when the actions will go off (e.g. 'spike-protection') - trigger_type = models.SmallIntegerField(choices=_trigger_types) - - class Meta: - app_label = "sentry" - db_table = "sentry_notificationaction" - - @classmethod - def register_action(cls, trigger_type: int, service_type: int, target_type: int): - """ - Register a new trigger/service/target combination for NotificationActions. - For example, allowing audit-logs (trigger) to fire actions to slack (service) channels (target) - - :param trigger_type: The registered trigger_type integer value saved to the database - :param service_type: The service_type integer value which must exist on ActionService - :param target_type: The target_type integer value which must exist on ActionTarget - :param registration: A subclass of `ActionRegistration`. - """ - - def inner(registration: type[ActionRegistrationT]) -> type[ActionRegistrationT]: - if trigger_type not in dict(ActionTrigger.as_choices()): - raise AttributeError( - f"Trigger type of {trigger_type} is not registered. Modify ActionTrigger." - ) - - if service_type not in dict(ActionService.as_choices()): - raise AttributeError( - f"Service type of {service_type} is not registered. Modify ActionService." - ) - - if target_type not in dict(ActionTarget.as_choices()): - raise AttributeError( - f"Target type of {target_type} is not registered. Modify ActionTarget." - ) - key = cls.get_registry_key(trigger_type, service_type, target_type) - if cls._registry.get(key) is not None: - raise AttributeError( - f"Existing registration found for trigger:{trigger_type}, service:{service_type}, target:{target_type}." - ) - - cls._registry[key] = registration - return registration - - return inner - - @classmethod - def get_trigger_types(cls): - return cls._trigger_types - - @classmethod - def get_trigger_text(self, trigger_type: int) -> str: - return dict(NotificationAction.get_trigger_types())[trigger_type] - - @classmethod - def get_registry_key(self, trigger_type: int, service_type: int, target_type: int) -> str: - return f"{trigger_type}:{service_type}:{target_type}" - - @classmethod - def get_registry(cls) -> Mapping[str, type[ActionRegistration]]: - return cls._registry - - @classmethod - def get_registration( - cls, trigger_type: int, service_type: int, target_type: int - ) -> type[ActionRegistration] | None: - key = cls.get_registry_key(trigger_type, service_type, target_type) - return cls._registry.get(key) - - def get_audit_log_data(self) -> dict[str, str]: - """ - Returns audit log data for NOTIFICATION_ACTION_ADD, NOTIFICATION_ACTION_EDIT - and NOTIFICATION_ACTION_REMOVE events - """ - return {"trigger": NotificationAction.get_trigger_text(self.trigger_type)} - - def fire(self, *args, **kwargs): - registration = NotificationAction.get_registration( - self.trigger_type, self.service_type, self.target_type - ) - if registration: - logger.info( - "fire_action", - extra={ - "action_id": self.id, - "trigger": NotificationAction.get_trigger_text(self.trigger_type), - "service": ActionService.get_name(self.service_type), - "target": ActionTarget.get_name(self.target_type), - }, - ) - return registration(action=self).fire(*args, **kwargs) - else: - logger.error( - "missing_registration", - extra={ - "id": self.id, - "service_type": self.service_type, - "trigger_type": self.trigger_type, - "target_type": self.target_type, - }, - ) - - def get_relocation_scope(self) -> RelocationScope: - if self.integration_id is not None or self.sentry_app_id is not None: - # TODO(getsentry/team-ospo#188): this should be extension scope once that gets added. - return RelocationScope.Global - - return RelocationScope.Organization +from sentry.notifications.models.notificationaction import ( + ActionRegistration, + ActionService, + ActionTarget, + ActionTrigger, + NotificationAction, + NotificationActionProject, +) + +__all__ = ( + "NotificationActionProject", + "NotificationAction", + "ActionService", + "ActionTrigger", + "ActionTarget", + "ActionRegistration", +) diff --git a/src/sentry/models/notificationsettingoption.py b/src/sentry/models/notificationsettingoption.py index 4cf79644821205..af3d50ab806eff 100644 --- a/src/sentry/models/notificationsettingoption.py +++ b/src/sentry/models/notificationsettingoption.py @@ -1,40 +1,3 @@ -from django.db import models +from sentry.notifications.models.notificationsettingoption import NotificationSettingOption -from sentry.backup.scopes import RelocationScope -from sentry.db.models import control_silo_model, sane_repr - -from .notificationsettingbase import NotificationSettingBase - - -@control_silo_model -class NotificationSettingOption(NotificationSettingBase): - __relocation_scope__ = RelocationScope.Excluded - - class Meta: - app_label = "sentry" - db_table = "sentry_notificationsettingoption" - unique_together = ( - ( - "scope_type", - "scope_identifier", - "user_id", - "team_id", - "type", - ), - ) - constraints = [ - models.CheckConstraint( - condition=models.Q(team_id__isnull=False, user_id__isnull=True) - | models.Q(team_id__isnull=True, user_id__isnull=False), - name="notification_setting_option_team_or_user_check", - ) - ] - - __repr__ = sane_repr( - "scope_type", - "scope_identifier", - "type", - "user_id", - "team_id", - "value", - ) +__all__ = ("NotificationSettingOption",) diff --git a/src/sentry/models/notificationsettingprovider.py b/src/sentry/models/notificationsettingprovider.py index e977e9ef54de13..ddd1e4ef765499 100644 --- a/src/sentry/models/notificationsettingprovider.py +++ b/src/sentry/models/notificationsettingprovider.py @@ -1,44 +1,3 @@ -from django.db import models +from sentry.notifications.models.notificationsettingprovider import NotificationSettingProvider -from sentry.backup.scopes import RelocationScope -from sentry.db.models import control_silo_model, sane_repr - -from .notificationsettingbase import NotificationSettingBase - - -@control_silo_model -class NotificationSettingProvider(NotificationSettingBase): - __relocation_scope__ = RelocationScope.Excluded - - provider = models.CharField(max_length=32, null=False) - - class Meta: - app_label = "sentry" - db_table = "sentry_notificationsettingprovider" - unique_together = ( - ( - "scope_type", - "scope_identifier", - "user_id", - "team_id", - "provider", - "type", - ), - ) - constraints = [ - models.CheckConstraint( - condition=models.Q(team_id__isnull=False, user_id__isnull=True) - | models.Q(team_id__isnull=True, user_id__isnull=False), - name="notification_setting_provider_team_or_user_check", - ) - ] - - __repr__ = sane_repr( - "scope_type", - "scope_identifier", - "user_id", - "team_id", - "provider", - "type", - "value", - ) +__all__ = ("NotificationSettingProvider",) diff --git a/src/sentry/models/release.py b/src/sentry/models/release.py index 94d33fee57ce42..0bbe6e3c0aa298 100644 --- a/src/sentry/models/release.py +++ b/src/sentry/models/release.py @@ -98,7 +98,7 @@ def get_queryset(self) -> ReleaseQuerySet: def annotate_prerelease_column(self): return self.get_queryset().annotate_prerelease_column() - def filter_to_semver(self): + def filter_to_semver(self) -> ReleaseQuerySet: return self.get_queryset().filter_to_semver() def filter_by_semver_build( diff --git a/src/sentry/models/relocation.py b/src/sentry/models/relocation.py index 897e3530cce3d1..e563ae7c03d58b 100644 --- a/src/sentry/models/relocation.py +++ b/src/sentry/models/relocation.py @@ -90,6 +90,14 @@ class Provenance(IntEnum): def get_choices(cls) -> list[tuple[int, str]]: return [(key.value, key.name) for key in cls] + def __str__(self): + if self.name == "SELF_HOSTED": + return "self-hosted" + elif self.name == "SAAS_TO_SAAS": + return "saas-to-saas" + else: + raise ValueError("Cannot extract a filename from `RelocationFile.Kind.UNKNOWN`.") + # The user that requested this relocation - if the request was made by an admin on behalf of a # user, this will be different from `owner`. Otherwise, they are identical. creator_id = BoundedBigIntegerField() diff --git a/src/sentry/monitors/clock_tasks/check_missed.py b/src/sentry/monitors/clock_tasks/check_missed.py index 0da1afc85995bd..5936ddc5e6afee 100644 --- a/src/sentry/monitors/clock_tasks/check_missed.py +++ b/src/sentry/monitors/clock_tasks/check_missed.py @@ -161,4 +161,9 @@ def mark_environment_missing(monitor_environment_id: int, ts: datetime): monitor.schedule, ) - mark_failed(checkin, failed_at=most_recent_expected_ts) + mark_failed( + checkin, + failed_at=most_recent_expected_ts, + received=ts, + clock_tick=ts, + ) diff --git a/src/sentry/monitors/clock_tasks/check_timeout.py b/src/sentry/monitors/clock_tasks/check_timeout.py index 019be6a4cdb329..7e8a770c9ab231 100644 --- a/src/sentry/monitors/clock_tasks/check_timeout.py +++ b/src/sentry/monitors/clock_tasks/check_timeout.py @@ -121,4 +121,9 @@ def mark_checkin_timeout(checkin_id: int, ts: datetime) -> None: monitor.schedule, ) - mark_failed(checkin, failed_at=most_recent_expected_ts) + mark_failed( + checkin, + failed_at=most_recent_expected_ts, + received=ts, + clock_tick=ts, + ) diff --git a/src/sentry/monitors/clock_tasks/mark_unknown.py b/src/sentry/monitors/clock_tasks/mark_unknown.py index 4fb7356fea23bb..a0061664e2d558 100644 --- a/src/sentry/monitors/clock_tasks/mark_unknown.py +++ b/src/sentry/monitors/clock_tasks/mark_unknown.py @@ -25,7 +25,7 @@ def dispatch_mark_unknown(ts: datetime): Given a clock tick timestamp datetime which was processed where an anomaly had been detected in the volume of check-ins that have been processed, determine monitors that are in-progress that can no longer be known to - complete as data loss has likely occured. + complete as data loss has likely occurred. This will dispatch MarkUnknown messages into monitors-clock-tasks. """ @@ -33,9 +33,9 @@ def dispatch_mark_unknown(ts: datetime): MonitorCheckIn.objects.filter( status=CheckInStatus.IN_PROGRESS, date_added__lte=ts, - ).values( - "id", "monitor_environment_id" - )[:CHECKINS_LIMIT] + ) + .values("id", "monitor_environment_id") + .order_by("-date_added")[:CHECKINS_LIMIT] ) metrics.gauge( diff --git a/src/sentry/monitors/consumers/clock_tick_consumer.py b/src/sentry/monitors/consumers/clock_tick_consumer.py index b81bd9749d1b1f..9886b6d2359019 100644 --- a/src/sentry/monitors/consumers/clock_tick_consumer.py +++ b/src/sentry/monitors/consumers/clock_tick_consumer.py @@ -12,14 +12,12 @@ from sentry_kafka_schemas.codecs import Codec from sentry_kafka_schemas.schema_types.monitors_clock_tick_v1 import ClockTick +from sentry import options from sentry.conf.types.kafka_definition import Topic, get_topic_codec from sentry.monitors.clock_tasks.check_missed import dispatch_check_missing from sentry.monitors.clock_tasks.check_timeout import dispatch_check_timeout -from sentry.monitors.system_incidents import ( - make_clock_tick_decision, - record_clock_tick_volume_metric, -) -from sentry.utils import metrics +from sentry.monitors.clock_tasks.mark_unknown import dispatch_mark_unknown +from sentry.monitors.system_incidents import process_clock_tick_for_system_incidents logger = logging.getLogger(__name__) @@ -33,31 +31,29 @@ def process_clock_tick(message: Message[KafkaPayload | FilteredPayload]): wrapper: ClockTick = MONITORS_CLOCK_TICK_CODEC.decode(message.payload.value) ts = datetime.fromtimestamp(wrapper["ts"], tz=timezone.utc) - record_clock_tick_volume_metric(ts) - try: - result = make_clock_tick_decision(ts) - - metrics.incr( - "monitors.tasks.clock_tick.tick_decision", - tags={"decision": result.decision}, - sample_rate=1.0, - ) - if result.transition: - metrics.incr( - "monitors.tasks.clock_tick.tick_transition", - tags={"transition": result.transition}, - sample_rate=1.0, - ) - except Exception: - logger.exception("sentry.tasks.clock_tick.clock_tick_decision_failed") - logger.info( "process_clock_tick", extra={"reference_datetime": str(ts)}, ) + try: + incident_result = process_clock_tick_for_system_incidents(ts) + except Exception: + incident_result = None + logger.exception("failed_process_clock_tick_for_system_incidents") + dispatch_check_missing(ts) - dispatch_check_timeout(ts) + + use_decision = options.get("crons.system_incidents.use_decisions") + + # During a systems incident we do NOT mark timeouts since it's possible + # we'll have lost the completing check-in. Instead we mark ALL in-progress + # check-ins as UNKNOWN. Should these check-ins recieve completing check-ins + # they will be properly updated, even after being marked as UNKNOWN. + if use_decision and incident_result and incident_result.decision.is_incident(): + dispatch_mark_unknown(ts) + else: + dispatch_check_timeout(ts) class MonitorClockTickStrategyFactory(ProcessingStrategyFactory[KafkaPayload]): diff --git a/src/sentry/monitors/consumers/incident_occurrences_consumer.py b/src/sentry/monitors/consumers/incident_occurrences_consumer.py index 3a8799e9ae5589..73e546b57da8c5 100644 --- a/src/sentry/monitors/consumers/incident_occurrences_consumer.py +++ b/src/sentry/monitors/consumers/incident_occurrences_consumer.py @@ -2,16 +2,27 @@ import logging from collections.abc import Mapping +from datetime import UTC, datetime +from typing import TypeGuard +import sentry_sdk from arroyo.backends.kafka.consumer import KafkaPayload +from arroyo.processing.strategies import MessageRejected from arroyo.processing.strategies.abstract import ProcessingStrategy, ProcessingStrategyFactory from arroyo.processing.strategies.commit import CommitOffsets from arroyo.processing.strategies.run_task import RunTask from arroyo.types import BrokerValue, Commit, FilteredPayload, Message, Partition +from cachetools.func import ttl_cache from sentry_kafka_schemas.codecs import Codec from sentry_kafka_schemas.schema_types.monitors_incident_occurrences_v1 import IncidentOccurrence +from sentry_sdk.tracing import Span, Transaction +from sentry import options from sentry.conf.types.kafka_definition import Topic, get_topic_codec +from sentry.monitors.logic.incident_occurrence import send_incident_occurrence +from sentry.monitors.models import CheckInStatus, MonitorCheckIn, MonitorIncident +from sentry.monitors.system_incidents import TickAnomalyDecision, get_clock_tick_decision +from sentry.utils import metrics logger = logging.getLogger(__name__) @@ -20,12 +31,106 @@ ) -def process_incident_occurrence(message: Message[KafkaPayload | FilteredPayload]): +@ttl_cache(ttl=5) +def memoized_tick_decision(tick: datetime) -> TickAnomalyDecision | None: + """ + Memoized version of get_clock_tick_decision. Used in + process_incident_occurrence to avoid stampeding calls when waiting for a + tick decision to resolve. + """ + return get_clock_tick_decision(tick) + + +def _process_incident_occurrence( + message: Message[KafkaPayload | FilteredPayload], txn: Transaction | Span +): + """ + Process a incident occurrence message. This will immediately dispatch an + issue occurrence via send_incident_occurrence. + """ assert not isinstance(message.payload, FilteredPayload) assert isinstance(message.value, BrokerValue) - # wrapper: IncidentOccurrence = MONITORS_INCIDENT_OCCURRENCES.decode(message.payload.value) - # TODO(epurkhiser): Do something with issue occurrence + wrapper: IncidentOccurrence = MONITORS_INCIDENT_OCCURRENCES.decode(message.payload.value) + clock_tick = datetime.fromtimestamp(wrapper["clock_tick_ts"], UTC) + + # May be used as a killswitch if system incident decisions become incorrect + # for any reason + use_decision = options.get("crons.system_incidents.use_decisions") + tick_decision = memoized_tick_decision(clock_tick) + + if use_decision and tick_decision and tick_decision.is_pending(): + # The decision is pending resolution. We need to stop consuming until + # the tick decision is resolved so we can know if it's OK to dispatch the + # incident occurrence, or if we should drop the occurrence and mark the + # associated check-ins as UNKNOWN due to a system incident. + txn.set_tag("result", "delayed") + + # XXX(epurkhiser): MessageRejected tells arroyo that we can't process + # this message right now and it should try again + raise MessageRejected() + + try: + incident = MonitorIncident.objects.get(id=int(wrapper["incident_id"])) + except MonitorIncident.DoesNotExist: + logger.exception("missing_incident") + return + + # previous_checkin_ids includes the failed_checkin_id + checkins = MonitorCheckIn.objects.filter(id__in=wrapper["previous_checkin_ids"]) + checkins_map: dict[int, MonitorCheckIn] = {checkin.id: checkin for checkin in checkins} + + failed_checkin = checkins_map.get(int(wrapper["failed_checkin_id"])) + previous_checkins = [checkins_map.get(int(id)) for id in wrapper["previous_checkin_ids"]] + + def has_all(checkins: list[MonitorCheckIn | None]) -> TypeGuard[list[MonitorCheckIn]]: + return None not in checkins + + # Unlikely, but if we can't find all the check-ins we can't produce an occurrence + if failed_checkin is None or not has_all(previous_checkins): + logger.error("missing_check_ins") + return + + received = datetime.fromtimestamp(wrapper["received_ts"], UTC) + + if use_decision and tick_decision and tick_decision.is_incident(): + # Update the failed check-in as unknown and drop the occurrence. + # + # Only consider synthetic check-ins (timeout and miss) since failed + # check-ins must have been correctly ingested and cannot have been + # produced during a system incident. + # + # XXX(epurkhiser): There is an edge case here where we'll want to + # determine if the check-in is within the system incident timeframe, + # since we dispatch occurrences for all check-ins that met a failure + # threshold. Imagine a monitor that checks-in once a day with a failure + # threshold of 5. If the last check-in happens to be a miss that is + # detected during a system-incident, then all 5 previous check-ins + # would also be marked as unknown, which is incorrect. + MonitorCheckIn.objects.filter( + id=failed_checkin.id, + status__in=CheckInStatus.SYNTHETIC_TERMINAL_VALUES, + ).update(status=CheckInStatus.UNKNOWN) + + # Do NOT send the occurrence + txn.set_tag("result", "dropped") + metrics.incr("monitors.incident_ocurrences.dropped_incident_occurrence") + return + + try: + send_incident_occurrence(failed_checkin, previous_checkins, incident, received) + txn.set_tag("result", "sent") + metrics.incr("monitors.incident_ocurrences.sent_incident_occurrence") + except Exception: + logger.exception("failed_send_incident_occurrence") + + +def process_incident_occurrence(message: Message[KafkaPayload | FilteredPayload]): + with sentry_sdk.start_transaction( + op="_process_incident_occurrence", + name="monitors.incident_occurrence_consumer", + ) as txn: + _process_incident_occurrence(message, txn) class MonitorIncidentOccurenceStrategyFactory(ProcessingStrategyFactory[KafkaPayload]): diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py index 01495eeecfd228..976f3818ac6c06 100644 --- a/src/sentry/monitors/consumers/monitor_consumer.py +++ b/src/sentry/monitors/consumers/monitor_consumer.py @@ -1013,8 +1013,7 @@ def process_single(message: Message[KafkaPayload | FilteredPayload]): ts = message.value.timestamp partition = message.value.partition.index - if wrapper["message_type"] != "clock_pulse": - update_check_in_volume([ts]) + update_check_in_volume([ts]) try: try_monitor_clock_tick(ts, partition) diff --git a/src/sentry/monitors/endpoints/base_monitor_stats.py b/src/sentry/monitors/endpoints/base_monitor_stats.py index 3f49f3d94be338..664bbfcc8192f1 100644 --- a/src/sentry/monitors/endpoints/base_monitor_stats.py +++ b/src/sentry/monitors/endpoints/base_monitor_stats.py @@ -36,6 +36,7 @@ def get_monitor_stats(self, request: Request, project, monitor) -> Response: CheckInStatus.ERROR, CheckInStatus.MISSED, CheckInStatus.TIMEOUT, + CheckInStatus.UNKNOWN, ] check_ins = MonitorCheckIn.objects.filter( diff --git a/src/sentry/monitors/endpoints/organization_monitor_index_stats.py b/src/sentry/monitors/endpoints/organization_monitor_index_stats.py index 08961488278d85..ba08695ccca103 100644 --- a/src/sentry/monitors/endpoints/organization_monitor_index_stats.py +++ b/src/sentry/monitors/endpoints/organization_monitor_index_stats.py @@ -23,6 +23,7 @@ CheckInStatus.ERROR, CheckInStatus.MISSED, CheckInStatus.TIMEOUT, + CheckInStatus.UNKNOWN, ] diff --git a/src/sentry/monitors/logic/incident_occurrence.py b/src/sentry/monitors/logic/incident_occurrence.py index 07e8d077b1ed9d..cb57f4e7155f59 100644 --- a/src/sentry/monitors/logic/incident_occurrence.py +++ b/src/sentry/monitors/logic/incident_occurrence.py @@ -7,33 +7,135 @@ from datetime import datetime, timezone from typing import TYPE_CHECKING +from arroyo import Topic as ArroyoTopic +from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration from django.utils.text import get_text_list from django.utils.translation import gettext_lazy as _ +from sentry_kafka_schemas.codecs import Codec +from sentry_kafka_schemas.schema_types.monitors_incident_occurrences_v1 import IncidentOccurrence +from sentry import options +from sentry.conf.types.kafka_definition import Topic, get_topic_codec from sentry.issues.grouptype import MonitorIncidentType +from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence +from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka +from sentry.issues.status_change_message import StatusChangeMessage +from sentry.models.group import GroupStatus from sentry.monitors.models import ( CheckInStatus, MonitorCheckIn, MonitorEnvironment, MonitorIncident, ) -from sentry.monitors.types import SimpleCheckIn +from sentry.utils.arroyo_producer import SingletonProducer +from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition if TYPE_CHECKING: from django.utils.functional import _StrPromise logger = logging.getLogger(__name__) +MONITORS_INCIDENT_OCCURRENCES: Codec[IncidentOccurrence] = get_topic_codec( + Topic.MONITORS_INCIDENT_OCCURRENCES +) + + +def _get_producer() -> KafkaProducer: + cluster_name = get_topic_definition(Topic.MONITORS_INCIDENT_OCCURRENCES)["cluster"] + producer_config = get_kafka_producer_cluster_options(cluster_name) + producer_config.pop("compression.type", None) + producer_config.pop("message.max.bytes", None) + return KafkaProducer(build_kafka_configuration(default_config=producer_config)) + + +_incident_occurrence_producer = SingletonProducer(_get_producer) + + +def dispatch_incident_occurrence( + failed_checkin: MonitorCheckIn, + previous_checkins: Sequence[MonitorCheckIn], + incident: MonitorIncident, + received: datetime, + clock_tick: datetime | None, +) -> None: + """ + Determine how to route a incident occurrence. -def create_incident_occurrence( - failed_checkins: Sequence[SimpleCheckIn], + - When failed check-in triggers mark_failed directly from the + monitor_consumer we will immediately dispatch the associated incident + occurrence. + + This is indicated by the lack of a `clock_tick`. + + - When a synthetic failed check-in (time-out or miss) triggers mark_failed + we will queue the incident occurrence to be processed later, allowing for + the occurrence to be delayed or dropped in the case a systems incident. + + This is indicated by the presence of a `clock_tick`. + """ + # XXX(epurkhiser): Dispatching via the consumer is behind a flag while we + # verify things are working well. + consumer_dispatch_enabled = options.get("crons.dispatch_incident_occurrences_to_consumer") + + if clock_tick and consumer_dispatch_enabled: + queue_incident_occurrence(failed_checkin, previous_checkins, incident, received, clock_tick) + else: + send_incident_occurrence(failed_checkin, previous_checkins, incident, received) + + +def queue_incident_occurrence( failed_checkin: MonitorCheckIn, + previous_checkins: Sequence[MonitorCheckIn], incident: MonitorIncident, - received: datetime | None, + received: datetime, + clock_tick: datetime, ) -> None: - from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence - from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka + """ + Queue an issue occurrence for a monitor incident. + This incident occurrence will be processed by the + `incident_occurrence_consumer`. Queuing is used here to allow for issue + occurrences to be delayed in the scenario where Sentry may be experiencing + a systems incident (eg. Relay cannot process check-ins). In these scenarios + the consumer will delay consumption until we can determine there is no + systems incident. Or in the worst case, will drop incident occurrences + since we can no longer reliably guarantee the occurrences are accurate. + """ + monitor_env = failed_checkin.monitor_environment + + if monitor_env is None: + return + + incident_occurrence: IncidentOccurrence = { + "incident_id": incident.id, + "failed_checkin_id": failed_checkin.id, + "previous_checkin_ids": [checkin.id for checkin in previous_checkins], + "received_ts": int(received.timestamp()), + "clock_tick_ts": int(clock_tick.timestamp()), + } + + # The incident occurrence is partitioned by monitor environment ID, just + # the same as the clock tasks. Ensures issue occurences are sent in order. + payload = KafkaPayload( + str(monitor_env.id).encode(), + MONITORS_INCIDENT_OCCURRENCES.encode(incident_occurrence), + [], + ) + + topic = get_topic_definition(Topic.MONITORS_INCIDENT_OCCURRENCES)["real_topic_name"] + _incident_occurrence_producer.produce(ArroyoTopic(topic), payload) + + +def send_incident_occurrence( + failed_checkin: MonitorCheckIn, + previous_checkins: Sequence[MonitorCheckIn], + incident: MonitorIncident, + received: datetime, +) -> None: + """ + Construct and send an issue occurrence given an incident and the associated + failing check-ins which caused that incident. + """ monitor_env = failed_checkin.monitor_environment if monitor_env is None: @@ -59,7 +161,7 @@ def create_incident_occurrence( evidence_display=[ IssueEvidence( name="Failure reason", - value=str(get_failure_reason(failed_checkins)), + value=str(get_failure_reason(previous_checkins)), important=True, ), IssueEvidence( @@ -92,8 +194,10 @@ def create_incident_occurrence( "fingerprint": [incident.grouphash], "platform": "other", "project_id": monitor_env.monitor.project_id, - # We set this to the time that the checkin that triggered the occurrence was written to relay if available - "received": (received if received else current_timestamp).isoformat(), + # This is typically the time that the checkin that triggered the + # occurrence was written to relay, otherwise it is when we detected a + # missed or timeout. + "received": received.isoformat(), "sdk": None, "tags": { "monitor.id": str(monitor_env.monitor.guid), @@ -127,7 +231,7 @@ def create_incident_occurrence( } -def get_failure_reason(failed_checkins: Sequence[SimpleCheckIn]): +def get_failure_reason(failed_checkins: Sequence[MonitorCheckIn]): """ Builds a human readable string from a list of failed check-ins. @@ -137,9 +241,9 @@ def get_failure_reason(failed_checkins: Sequence[SimpleCheckIn]): """ status_counts = Counter( - checkin["status"] + checkin.status for checkin in failed_checkins - if checkin["status"] in HUMAN_FAILURE_STATUS_MAP.keys() + if checkin.status in HUMAN_FAILURE_STATUS_MAP.keys() ) if sum(status_counts.values()) == 1: @@ -169,3 +273,16 @@ def get_monitor_environment_context(monitor_environment: MonitorEnvironment): "status": monitor_environment.get_status_display(), "type": monitor_environment.monitor.get_type_display(), } + + +def resolve_incident_group(incident: MonitorIncident, project_id: int): + status_change = StatusChangeMessage( + fingerprint=[incident.grouphash], + project_id=project_id, + new_status=GroupStatus.RESOLVED, + new_substatus=None, + ) + produce_occurrence_to_kafka( + payload_type=PayloadType.STATUS_CHANGE, + status_change=status_change, + ) diff --git a/src/sentry/monitors/logic/incidents.py b/src/sentry/monitors/logic/incidents.py index 10fb253543e294..2067286cf987ad 100644 --- a/src/sentry/monitors/logic/incidents.py +++ b/src/sentry/monitors/logic/incidents.py @@ -1,20 +1,52 @@ from __future__ import annotations import logging -from datetime import datetime -from typing import cast +from dataclasses import dataclass +from datetime import datetime, timedelta -from sentry.monitors.logic.incident_occurrence import create_incident_occurrence +from django.utils import timezone + +from sentry import analytics +from sentry.monitors.logic.incident_occurrence import ( + dispatch_incident_occurrence, + resolve_incident_group, +) from sentry.monitors.models import CheckInStatus, MonitorCheckIn, MonitorIncident, MonitorStatus -from sentry.monitors.types import SimpleCheckIn +from sentry.monitors.tasks.detect_broken_monitor_envs import NUM_DAYS_BROKEN_PERIOD logger = logging.getLogger(__name__) +@dataclass +class SimpleCheckIn: + """ + A stripped down check in object + """ + + id: int + date_added: datetime + status: int + + @classmethod + def from_checkin(cls, checkin: MonitorCheckIn) -> SimpleCheckIn: + return cls(checkin.id, checkin.date_added, checkin.status) + + def try_incident_threshold( failed_checkin: MonitorCheckIn, - received: datetime | None, + received: datetime, + clock_tick: datetime | None = None, ) -> bool: + """ + Determine if a monitor environment has reached it's incident threshold + given the most recent failed check-in. When the threshold is reached a + MonitorIncident will be created and an incident occurrence will be + dispatched, which will later produce an issue occurrence. + + If an incident already exists additional occurrences will be dispatched. + + Returns True if we produce an incident occurrence. + """ from sentry.signals import monitor_environment_failed monitor_env = failed_checkin.monitor_environment @@ -29,30 +61,25 @@ def try_incident_threshold( # check to see if we need to update the status if monitor_env.status in [MonitorStatus.OK, MonitorStatus.ACTIVE]: if failure_issue_threshold == 1: - previous_checkins: list[SimpleCheckIn] = [ - { - "id": failed_checkin.id, - "date_added": failed_checkin.date_added, - "status": failed_checkin.status, - } - ] + previous_checkins: list[SimpleCheckIn] = [SimpleCheckIn.from_checkin(failed_checkin)] else: - previous_checkins = cast( - list[SimpleCheckIn], + previous_checkins = [ + SimpleCheckIn(**row) + for row in # Using .values for performance reasons MonitorCheckIn.objects.filter( monitor_environment=monitor_env, date_added__lte=failed_checkin.date_added ) .order_by("-date_added") - .values("id", "date_added", "status"), - ) + .values("id", "date_added", "status")[:failure_issue_threshold] + ] # reverse the list after slicing in order to start with oldest check-in - previous_checkins = list(reversed(previous_checkins[:failure_issue_threshold])) + previous_checkins = list(reversed(previous_checkins)) # If we have any successful check-ins within the threshold of # commits we have NOT reached an incident state - if any([checkin["status"] == CheckInStatus.OK for checkin in previous_checkins]): + if any([checkin.status == CheckInStatus.OK for checkin in previous_checkins]): return False # change monitor status + update fingerprint timestamp @@ -67,21 +94,15 @@ def try_incident_threshold( resolving_checkin=None, defaults={ "monitor": monitor_env.monitor, - "starting_checkin_id": starting_checkin["id"], - "starting_timestamp": starting_checkin["date_added"], + "starting_checkin_id": starting_checkin.id, + "starting_timestamp": starting_checkin.date_added, }, ) elif monitor_env.status == MonitorStatus.ERROR: - # if monitor environment has a failed status, use the failed - # check-in and send occurrence - previous_checkins = [ - { - "id": failed_checkin.id, - "date_added": failed_checkin.date_added, - "status": failed_checkin.status, - } - ] + # If the monitor was already in an incident there are no previous + # check-ins to pass long when creating the occurrence + previous_checkins = [SimpleCheckIn.from_checkin(failed_checkin)] # get the active incident from the monitor environment incident = monitor_env.active_incident @@ -93,15 +114,83 @@ def try_incident_threshold( # - We have an active incident and fingerprint # - The monitor and env are not muted if not monitor_env.monitor.is_muted and not monitor_env.is_muted and incident: - checkins = MonitorCheckIn.objects.filter(id__in=[c["id"] for c in previous_checkins]) + checkins = list(MonitorCheckIn.objects.filter(id__in=[c.id for c in previous_checkins])) for checkin in checkins: - create_incident_occurrence( - previous_checkins, - checkin, - incident, - received=received, - ) + dispatch_incident_occurrence(checkin, checkins, incident, received, clock_tick) monitor_environment_failed.send(monitor_environment=monitor_env, sender=type(monitor_env)) return True + + +def try_incident_resolution(ok_checkin: MonitorCheckIn) -> bool: + """ + Attempt to resolve any open incidents for a monitor given he most recent + successful check-in. + + Returns True if the incident was resolved. + """ + monitor_env = ok_checkin.monitor_environment + + if monitor_env is None: + return False + + if monitor_env.status == MonitorStatus.OK or ok_checkin.status != CheckInStatus.OK: + return False + + recovery_threshold = monitor_env.monitor.config.get("recovery_threshold", 1) + if not recovery_threshold: + recovery_threshold = 1 + + # Run incident logic if recovery threshold is set + if recovery_threshold > 1: + # Check if our incident is recovering + previous_checkins = ( + MonitorCheckIn.objects.filter(monitor_environment=monitor_env) + .values("id", "date_added", "status") + .order_by("-date_added")[:recovery_threshold] + ) + + # Incident recovers when we have successive threshold check-ins + incident_recovering = all( + previous_checkin["status"] == CheckInStatus.OK for previous_checkin in previous_checkins + ) + else: + # Mark any open incidents as recovering by default + incident_recovering = True + + if not incident_recovering: + return False + + incident = monitor_env.active_incident + if incident: + resolve_incident_group(incident, ok_checkin.monitor.project_id) + incident.update( + resolving_checkin=ok_checkin, + resolving_timestamp=ok_checkin.date_added, + ) + logger.info( + "monitors.logic.mark_ok.resolving_incident", + extra={ + "monitor_env_id": monitor_env.id, + "incident_id": incident.id, + "grouphash": incident.grouphash, + }, + ) + # if incident was longer than the broken env time, check if there was a + # broken detection that is also now resolved + if ( + incident.starting_timestamp is not None + and incident.starting_timestamp + <= timezone.now() - timedelta(days=NUM_DAYS_BROKEN_PERIOD) + ): + if incident.monitorenvbrokendetection_set.exists(): + analytics.record( + "cron_monitor_broken_status.recovery", + organization_id=monitor_env.monitor.organization_id, + project_id=monitor_env.monitor.project_id, + monitor_id=monitor_env.monitor.id, + monitor_env_id=monitor_env.id, + ) + + return True diff --git a/src/sentry/monitors/logic/mark_failed.py b/src/sentry/monitors/logic/mark_failed.py index 27912aa15754e8..6877380d528aa0 100644 --- a/src/sentry/monitors/logic/mark_failed.py +++ b/src/sentry/monitors/logic/mark_failed.py @@ -15,6 +15,7 @@ def mark_failed( failed_checkin: MonitorCheckIn, failed_at: datetime, received: datetime | None = None, + clock_tick: datetime | None = None, ) -> bool: """ Given a failing check-in, mark the monitor environment as failed and trigger @@ -30,6 +31,10 @@ def mark_failed( if monitor_env is None: return False + # Use the failure time as recieved if there is no received time + if received is None: + received = failed_at + # Compute the next check-in time from our reference time next_checkin = monitor_env.monitor.get_next_expected_checkin(failed_at) next_checkin_latest = monitor_env.monitor.get_next_expected_checkin_latest(failed_at) @@ -72,4 +77,4 @@ def mark_failed( monitor_env.refresh_from_db() # Create incidents + issues - return try_incident_threshold(failed_checkin, received) + return try_incident_threshold(failed_checkin, received, clock_tick) diff --git a/src/sentry/monitors/logic/mark_ok.py b/src/sentry/monitors/logic/mark_ok.py index e49f37d51f2966..1d49eccc26fa4f 100644 --- a/src/sentry/monitors/logic/mark_ok.py +++ b/src/sentry/monitors/logic/mark_ok.py @@ -1,14 +1,11 @@ from __future__ import annotations import logging -from datetime import datetime, timedelta +from datetime import datetime from typing import NotRequired, TypedDict -from django.utils import timezone - -from sentry import analytics -from sentry.monitors.models import CheckInStatus, MonitorCheckIn, MonitorEnvironment, MonitorStatus -from sentry.monitors.tasks.detect_broken_monitor_envs import NUM_DAYS_BROKEN_PERIOD +from sentry.monitors.logic.incidents import try_incident_resolution +from sentry.monitors.models import MonitorCheckIn, MonitorEnvironment, MonitorStatus logger = logging.getLogger(__name__) @@ -43,83 +40,11 @@ def mark_ok(checkin: MonitorCheckIn, succeeded_at: datetime) -> None: "next_checkin_latest": next_checkin_latest, } - if monitor_env.status != MonitorStatus.OK and checkin.status == CheckInStatus.OK: - recovery_threshold = monitor_env.monitor.config.get("recovery_threshold", 1) - if not recovery_threshold: - recovery_threshold = 1 - - # Run incident logic if recovery threshold is set - if recovery_threshold > 1: - # Check if our incident is recovering - previous_checkins = ( - MonitorCheckIn.objects.filter(monitor_environment=monitor_env) - .values("id", "date_added", "status") - .order_by("-date_added")[:recovery_threshold] - ) + incident_resolved = try_incident_resolution(checkin) - # Incident recovers when we have successive threshold check-ins - incident_recovering = all( - previous_checkin["status"] == CheckInStatus.OK - for previous_checkin in previous_checkins - ) - else: - # Mark any open incidents as recovering by default - incident_recovering = True - - # Resolve any open incidents - if incident_recovering: - params["status"] = MonitorStatus.OK - incident = monitor_env.active_incident - if incident: - resolve_incident_group(incident.grouphash, checkin.monitor.project_id) - incident.update( - resolving_checkin=checkin, - resolving_timestamp=checkin.date_added, - ) - logger.info( - "monitors.logic.mark_ok.resolving_incident", - extra={ - "monitor_env_id": monitor_env.id, - "incident_id": incident.id, - "grouphash": incident.grouphash, - }, - ) - # if incident was longer than the broken env time, check if there was a broken detection that is also now resolved - if ( - incident.starting_timestamp is not None - and incident.starting_timestamp - <= timezone.now() - timedelta(days=NUM_DAYS_BROKEN_PERIOD) - ): - if incident.monitorenvbrokendetection_set.exists(): - analytics.record( - "cron_monitor_broken_status.recovery", - organization_id=monitor_env.monitor.organization_id, - project_id=monitor_env.monitor.project_id, - monitor_id=monitor_env.monitor.id, - monitor_env_id=monitor_env.id, - ) + if incident_resolved: + params["status"] = MonitorStatus.OK MonitorEnvironment.objects.filter(id=monitor_env.id).exclude( last_checkin__gt=succeeded_at ).update(**params) - - -def resolve_incident_group( - fingerprint: str, - project_id: int, -): - from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka - from sentry.issues.status_change_message import StatusChangeMessage - from sentry.models.group import GroupStatus - - status_change = StatusChangeMessage( - fingerprint=[fingerprint], - project_id=project_id, - new_status=GroupStatus.RESOLVED, - new_substatus=None, - ) - - produce_occurrence_to_kafka( - payload_type=PayloadType.STATUS_CHANGE, - status_change=status_change, - ) diff --git a/src/sentry/monitors/models.py b/src/sentry/monitors/models.py index 51eb187c4989e6..511b6019b5b3e8 100644 --- a/src/sentry/monitors/models.py +++ b/src/sentry/monitors/models.py @@ -563,6 +563,8 @@ class Meta: models.Index(fields=["monitor_environment", "status", "date_added"]), # used for timeout task models.Index(fields=["status", "timeout_at"]), + # used for dispatch_mark_unknown + models.Index(fields=["status", "date_added"]), # used for check-in list models.Index(fields=["trace_id"]), ] diff --git a/src/sentry/monitors/system_incidents.py b/src/sentry/monitors/system_incidents.py index 8d2b6d93fb248b..4b7531802b90e7 100644 --- a/src/sentry/monitors/system_incidents.py +++ b/src/sentry/monitors/system_incidents.py @@ -13,7 +13,7 @@ from collections import Counter from collections.abc import Generator, Sequence from dataclasses import dataclass -from datetime import datetime, timedelta +from datetime import UTC, datetime, timedelta from enum import StrEnum from itertools import batched, chain @@ -22,7 +22,7 @@ from sentry import options from sentry.utils import metrics, redis -logger = logging.getLogger("sentry") +logger = logging.getLogger(__name__) # This key is used to record historical date about the volume of check-ins. MONITOR_VOLUME_HISTORY = "sentry.monitors.volume_history:{ts}" @@ -30,9 +30,12 @@ # This key is used to record the metric volume metric for the tick. MONITOR_TICK_METRIC = "sentry.monitors.volume_metric:{ts}" -# This key is used to record the anomaly decision for a tick +# This key is used to record the anomaly decision for a tick. MONITOR_TICK_DECISION = "sentry.monitors.tick_decision:{ts}" +# Tracks the timestamp of the first clock tick of a system incident. +MONITR_LAST_SYSTEM_INCIDENT_TS = "sentry.monitors.last_system_incident_ts" + # When fetching historic volume data to make a decision whether we have lost # data this value will determine how many historic volume data-points we fetch # of the window of the MONITOR_VOLUME_RETENTION. It is important to consider @@ -78,6 +81,90 @@ def update_check_in_volume(ts_list: Sequence[datetime]): pipeline.execute() +def process_clock_tick_for_system_incidents(tick: datetime) -> DecisionResult: + """ + Encapsulates logic specific to determining if we are in a system incident + during each clock tick. + """ + record_clock_tick_volume_metric(tick) + result = make_clock_tick_decision(tick) + + logger.info( + "process_clock_tick", + extra={"decision": result.decision, "transition": result.transition}, + ) + + # Record metrics for each tick decision + metrics.incr( + "monitors.tasks.clock_tick.tick_decision", + tags={"decision": result.decision}, + sample_rate=1.0, + ) + if result.transition: + metrics.incr( + "monitors.tasks.clock_tick.tick_transition", + tags={"transition": result.transition}, + sample_rate=1.0, + ) + + # When entering an incident record the starting timestamp of the incident + if result.transition == AnomalyTransition.INCIDENT_STARTED: + record_last_incident_ts(result.ts) + + # When exiting an incident prune check-in volume during that incident + if result.transition == AnomalyTransition.INCIDENT_RECOVERED: + if start := get_last_incident_ts(): + prune_incident_check_in_volume(start, result.ts) + else: + logger.error("recovered_without_start_ts") + + return result + + +def record_last_incident_ts(ts: datetime) -> None: + """ + Records the timestamp of the most recent + """ + redis_client = redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) + redis_client.set(MONITR_LAST_SYSTEM_INCIDENT_TS, int(ts.timestamp())) + + +def get_last_incident_ts() -> datetime | None: + """ + Retrieves the timestamp of the last system incident + """ + redis_client = redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) + value = _int_or_none(redis_client.get(MONITR_LAST_SYSTEM_INCIDENT_TS)) + return datetime.fromtimestamp(value, UTC) if value else None + + +def prune_incident_check_in_volume(start: datetime, end: datetime) -> None: + """ + After recovering from a system incident the volume data must be discarded + to avoid skewing future computations. Note that the start time is inclusive + and the end time is exclusive. + """ + redis_client = redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) + + # Length of the incident in minutes + length = int((end - start).total_seconds()) // 60 + + # XXX(epurkhiser): Because we make clock tick decisions at the timestamp of + # the clock ticking, we are storing the decision at the tick timestamp + # AFTER the tick timestamp where the volume and metric values are stored. + # + # Adjust for this by moving the start back a minute. + start = start - timedelta(minutes=1) + dates = (start + timedelta(minutes=offset) for offset in range(length)) + + # Batch deletes + for timestamp_batch in batched(dates, 30): + pipeline = redis_client.pipeline() + for ts in timestamp_batch: + pipeline.delete(MONITOR_VOLUME_HISTORY.format(ts=_make_reference_ts(ts))) + pipeline.execute() + + def record_clock_tick_volume_metric(tick: datetime) -> None: """ Look at the historic volume of check-ins for this tick over the last @@ -91,7 +178,7 @@ def record_clock_tick_volume_metric(tick: datetime) -> None: NOTE that this records a metric for the tick timestamp that we just ticked over. So when ticking at 12:01 the metric is recorded for 12:00. """ - if not options.get("crons.tick_volume_anomaly_detection"): + if not options.get("crons.system_incidents.collect_metrics"): return redis_client = redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) @@ -120,10 +207,12 @@ def record_clock_tick_volume_metric(tick: datetime) -> None: # Can't make any decisions if we didn't have data for the past minute if past_minute_volume is None: + logger.info("past_minute_volume_missing", extra={"reference_datetime": tick}) return # We need AT LEAST two data points to calculate standard deviation if len(historic_volume) < 2: + logger.info("history_volume_low", extra={"reference_datetime": tick}) return # Record some statistics about the past_minute_volume volume in comparison @@ -155,7 +244,7 @@ def record_clock_tick_volume_metric(tick: datetime) -> None: metrics.gauge("monitors.task.volume_history.pct_deviation", pct_deviation, sample_rate=1.0) logger.info( - "monitors.system_incidents.volume_history", + "volume_history", extra={ "reference_datetime": str(tick), "evaluation_minute": past_ts.strftime("%H:%M"), @@ -223,6 +312,16 @@ class TickAnomalyDecision(StrEnum): either NORMAL or back into INCIDENT. """ + def is_pending(self) -> bool: + """ + Returns True when the decision is ABNORMAL or RECOVERING, indicating + that we are currently pending resolution of this decision. + """ + return self in [TickAnomalyDecision.ABNORMAL, TickAnomalyDecision.RECOVERING] + + def is_incident(self) -> bool: + return self == TickAnomalyDecision.INCIDENT + @classmethod def from_str(cls, st: str) -> TickAnomalyDecision: return cls[st.upper()] @@ -267,6 +366,16 @@ class AnomalyTransition(StrEnum): @dataclass class DecisionResult: + ts: datetime + """ + The associated timestamp of the decision. Typically this will be the clock + tick when the decision was made. However for a incident start and end + transitions this will be the back-dated timestamp of when the state began. + + INCIDENT_STARTED -> Tick when the incident truly starts + INCIDENT_RECOVERED -> Tick when the incident truly recovered + """ + decision: TickAnomalyDecision """ The recorded decision made for the clock tick @@ -354,8 +463,8 @@ def make_clock_tick_decision(tick: datetime) -> DecisionResult: # Alias TickAnomalyDecision to improve code readability Decision = TickAnomalyDecision - if not options.get("crons.tick_volume_anomaly_detection"): - return DecisionResult(Decision.NORMAL) + if not options.get("crons.system_incidents.collect_metrics"): + return DecisionResult(tick, Decision.NORMAL) redis_client = redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) @@ -395,6 +504,7 @@ def make_clock_tick_decision(tick: datetime) -> DecisionResult: def make_decision( decision: TickAnomalyDecision, transition: AnomalyTransition | None = None, + ts: datetime | None = None, ) -> DecisionResult: decision_key = MONITOR_TICK_DECISION.format(ts=_make_reference_ts(tick)) pipeline = redis_client.pipeline() @@ -402,7 +512,16 @@ def make_decision( pipeline.expire(decision_key, MONITOR_VOLUME_RETENTION) pipeline.execute() - return DecisionResult(decision, transition) + logger.info( + "clock_tick_decision", + extra={ + "reference_datetime": str(tick), + "decision": decision, + "transition": transition, + }, + ) + + return DecisionResult(ts or tick, decision, transition) def metrics_match(metric: Metric) -> Generator[bool]: return (d == metric for d in tick_metrics) @@ -434,8 +553,8 @@ def metrics_match(metric: Metric) -> Generator[bool]: # If the previous result was recovering, check if we have recovered and can # backfill these decisions as normal. if last_decision == Decision.RECOVERING and all(metrics_match(Metric.NORMAL)): - _backfill_decisions(past_ts, Decision.NORMAL, Decision.RECOVERING) - return make_decision(Decision.NORMAL, AnomalyTransition.INCIDENT_RECOVERED) + ts = _backfill_decisions(past_ts, Decision.NORMAL, Decision.RECOVERING) + return make_decision(Decision.NORMAL, AnomalyTransition.INCIDENT_RECOVERED, ts) # E: RECOVERING -> INCIDENT # @@ -451,8 +570,8 @@ def metrics_match(metric: Metric) -> Generator[bool]: # an incident, mark this tick as an incident and backfill all abnormal # decisions to an incident decision. if last_decision != Decision.INCIDENT and last_metric == Metric.INCIDENT: - _backfill_decisions(past_ts, Decision.INCIDENT, Decision.ABNORMAL) - return make_decision(Decision.INCIDENT, AnomalyTransition.INCIDENT_STARTED) + ts = _backfill_decisions(past_ts, Decision.INCIDENT, Decision.ABNORMAL) + return make_decision(Decision.INCIDENT, AnomalyTransition.INCIDENT_STARTED, ts) # NORMAL -> NORMAL # ABNORMAL -> ABNORMAL @@ -475,10 +594,16 @@ def get_clock_tick_decision(tick: datetime) -> TickAnomalyDecision | None: return None -def _backfill_keys(start: datetime, until_not: TickAnomalyDecision) -> Generator[str]: +@dataclass +class BackfillItem: + key: str + ts: datetime + + +def _make_backfill(start: datetime, until_not: TickAnomalyDecision) -> Generator[BackfillItem]: """ - Yields keys from the `start` tick until the value of the key is not a - `until_not` tick decision. + Yields keys and associated timestamps from the `start` tick until the value + of the key is not a `until_not` tick decision. """ redis_client = redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) @@ -486,13 +611,15 @@ def _backfill_keys(start: datetime, until_not: TickAnomalyDecision) -> Generator pipeline = redis_client.pipeline() keys: list[str] = [] + timestamps: list[datetime] = [] for offset in chunked_offsets: ts = start - timedelta(minutes=offset) key = MONITOR_TICK_DECISION.format(ts=_make_reference_ts(ts)) pipeline.get(key) keys.append(key) + timestamps.append(ts) - for key, value in zip(keys, pipeline.execute()): + for key, ts, value in zip(keys, timestamps, pipeline.execute()): # Edge case, we found a hole gap in decisions if value is None: return @@ -502,29 +629,41 @@ def _backfill_keys(start: datetime, until_not: TickAnomalyDecision) -> Generator if prev_decision != until_not: return - yield key + yield BackfillItem(key, ts) # If we've iterated through the entire BACKFILL_CUTOFF we have a # "decision runaway" and should report this as an error - logger.error("sentry.system_incidents.decision_backfill_runaway") + logger.error("decision_backfill_runaway") def _backfill_decisions( start: datetime, decision: TickAnomalyDecision, until_not: TickAnomalyDecision, -) -> None: +) -> datetime | None: """ Update historic tick decisions from `start` to `decision` until we no longer see the `until_not` decision. + + If a backfill occurred, returns the timestamp just before """ redis_client = redis.redis_clusters.get(settings.SENTRY_MONITORS_REDIS_CLUSTER) pipeline = redis_client.pipeline() - for key in _backfill_keys(start, until_not): - pipeline.set(key, decision.value) + backfill_items = list(_make_backfill(start, until_not)) + + for item in backfill_items: + pipeline.set(item.key, decision.value) pipeline.execute() + # Return the timestamp just before we reached until_not. Note + # backfill_items is in reverse chronological order here. + if backfill_items: + return backfill_items[-1].ts + + # In the case that we didn't backfill anything return None + return None + def _make_reference_ts(ts: datetime): """ diff --git a/src/sentry/monitors/types.py b/src/sentry/monitors/types.py index afed758efa55f0..50f17140da8558 100644 --- a/src/sentry/monitors/types.py +++ b/src/sentry/monitors/types.py @@ -101,16 +101,6 @@ def from_dict(cls, data: CheckinItemData) -> CheckinItem: ) -class SimpleCheckIn(TypedDict): - """ - A stripped down check in object - """ - - id: int - date_added: datetime - status: int - - IntervalUnit = Literal["year", "month", "week", "day", "hour", "minute"] diff --git a/src/sentry/new_migrations/migrations.py b/src/sentry/new_migrations/migrations.py index ecb9968ebf677b..9dc3a5128e2807 100644 --- a/src/sentry/new_migrations/migrations.py +++ b/src/sentry/new_migrations/migrations.py @@ -1,4 +1,5 @@ -from django.db.migrations import Migration +from django.db.migrations import Migration, RunSQL +from django_zero_downtime_migrations.backends.postgres.schema import UnsafeOperationException class CheckedMigration(Migration): @@ -16,7 +17,20 @@ class CheckedMigration(Migration): # the `owners-migrations` team. checked = True + # This determines whether we allow `RunSQL` to be used in migrations. We want to discourage this going forward, + # because it's hard for our framework to determine whether SQL is safe. It can also cause problems with setting + # lock/statement timeouts appropriately. + allow_run_sql = False + def apply(self, project_state, schema_editor, collect_sql=False): if self.checked: schema_editor.safe = True + for op in self.operations: + if not self.allow_run_sql and type(op) is RunSQL: + raise UnsafeOperationException( + "Using RunSQL is unsafe because our migrations safety framework can't detect problems with the " + "migration. If you need to use RunSQL, set `allow_run_sql = True` and get approval from " + "`owners-migrations` to make sure that it's safe." + ) + return super().apply(project_state, schema_editor, collect_sql) diff --git a/src/sentry/new_migrations/monkey/__init__.py b/src/sentry/new_migrations/monkey/__init__.py index a6b52294b510c1..1f581c9e0a6323 100644 --- a/src/sentry/new_migrations/monkey/__init__.py +++ b/src/sentry/new_migrations/monkey/__init__.py @@ -1,5 +1,4 @@ from django import VERSION -from django.db import models from sentry.new_migrations.monkey.executor import SentryMigrationExecutor from sentry.new_migrations.monkey.fields import deconstruct @@ -19,6 +18,10 @@ is copied and modified from `Queryset.update()` to add `RETURNING ` to the update query. Verify that the `update` code hasn't significantly changed, and if it has update as needed. + - We monkeypatch `SentryProjectState` over `ProjectState` in a few places. Check where + Django is importing it and make sure that we're still patching correctly. + We also need to verify that the patched `SentryProjectState` isn't missing new + features added by Django. When you're happy that these changes are good to go, update `LAST_VERIFIED_DJANGO_VERSION` to the version of Django you're upgrading to. If the @@ -77,6 +80,8 @@ class Migration(CheckedMigration): def monkey_migrations(): + from django.db import models + # This import needs to be below the other imports for `executor` and `writer` so # that we can successfully monkeypatch them. from django.db.migrations import executor, migration, writer @@ -86,3 +91,11 @@ def monkey_migrations(): migration.Migration.initial = None writer.MIGRATION_TEMPLATE = SENTRY_MIGRATION_TEMPLATE models.Field.deconstruct = deconstruct # type: ignore[method-assign] + + from django.db.migrations import graph, state + + from sentry.new_migrations.monkey.state import SentryProjectState + + state.ProjectState = SentryProjectState # type: ignore[misc] + graph.ProjectState = SentryProjectState # type: ignore[attr-defined] + executor.ProjectState = SentryProjectState # type: ignore[attr-defined] diff --git a/src/sentry/new_migrations/monkey/fields.py b/src/sentry/new_migrations/monkey/fields.py index d45d426bbe6c0f..b11e363dfb68ca 100644 --- a/src/sentry/new_migrations/monkey/fields.py +++ b/src/sentry/new_migrations/monkey/fields.py @@ -1,4 +1,10 @@ -from django.db.models import Field +from django.db.migrations import RemoveField +from django.db.models import Field, ManyToManyField +from django.db.models.fields import NOT_PROVIDED +from django_zero_downtime_migrations.backends.postgres.schema import UnsafeOperationException + +from sentry.db.postgres.schema import SafePostgresDatabaseSchemaEditor +from sentry.new_migrations.monkey.state import DeletionAction, SentryProjectState IGNORED_ATTRS = ["verbose_name", "help_text", "choices"] original_deconstruct = Field.deconstruct @@ -14,3 +20,66 @@ def deconstruct(self): for attr in IGNORED_ATTRS: kwargs.pop(attr, None) return name, path, args, kwargs + + +class SafeRemoveField(RemoveField): + def __init__(self, *args, deletion_action: DeletionAction, **kwargs): + super().__init__(*args, **kwargs) + self.deletion_action = deletion_action + + def state_forwards(self, app_label: str, state: SentryProjectState) -> None: # type: ignore[override] + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + field = state.apps.get_model(app_label, self.model_name_lower)._meta.get_field( + self.name_lower + ) + if getattr(field, "db_constraint", False): + raise UnsafeOperationException( + f"Foreign key db constraint must be removed before dropping {app_label}.{self.model_name_lower}.{self.name}. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-columns" + ) + if ( + not isinstance(field, ManyToManyField) + and not field.null + and field.db_default is NOT_PROVIDED + ): + raise UnsafeOperationException( + f"Field {app_label}.{self.model_name_lower}.{self.name} must either be nullable or have a db_default before dropping. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-columns" + ) + + state.remove_field( + app_label, self.model_name_lower, self.name_lower, deletion_action=self.deletion_action + ) + + def database_forwards( + self, + app_label: str, + schema_editor: SafePostgresDatabaseSchemaEditor, # type: ignore[override] + from_state: SentryProjectState, # type: ignore[override] + to_state: SentryProjectState, # type: ignore[override] + ) -> None: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return + + field = from_state.get_pending_deletion_field(app_label, self.model_name, self.name) + if self.allow_migrate_model(schema_editor.connection.alias, field.model): + schema_editor.remove_field(field.model, field, is_safe=True) + + def database_backwards( + self, + app_label: str, + schema_editor: SafePostgresDatabaseSchemaEditor, # type: ignore[override] + from_state: SentryProjectState, # type: ignore[override] + to_state: SentryProjectState, # type: ignore[override] + ) -> None: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return + field = to_state.get_pending_deletion_field(app_label, self.model_name, self.name) + if self.allow_migrate_model(schema_editor.connection.alias, field.model): + schema_editor.add_field(field.model, field) + + def describe(self) -> str: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return f"Moved {self.model_name}.{self.name} field to pending deletion state" + else: + return super().describe() diff --git a/src/sentry/new_migrations/monkey/models.py b/src/sentry/new_migrations/monkey/models.py new file mode 100644 index 00000000000000..e744e11b356e70 --- /dev/null +++ b/src/sentry/new_migrations/monkey/models.py @@ -0,0 +1,58 @@ +from django.db.migrations import DeleteModel +from django_zero_downtime_migrations.backends.postgres.schema import UnsafeOperationException + +from sentry.db.postgres.schema import SafePostgresDatabaseSchemaEditor +from sentry.new_migrations.monkey.state import DeletionAction, SentryProjectState + + +class SafeDeleteModel(DeleteModel): + def __init__(self, *args, deletion_action: DeletionAction, **kwargs): + super().__init__(*args, **kwargs) + self.deletion_action = deletion_action + + def state_forwards(self, app_label: str, state: SentryProjectState) -> None: # type: ignore[override] + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + model = state.apps.get_model(app_label, self.name) + fields_with_constraints = [ + f.name for f in model._meta.fields if getattr(f, "db_constraint", False) + ] + if fields_with_constraints: + raise UnsafeOperationException( + "Foreign key db constraints must be removed before dropping " + f"{app_label}.{self.name}. Fields with constraints: {fields_with_constraints}" + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-tables" + ) + state.remove_model(app_label, self.name_lower, deletion_action=self.deletion_action) + + def database_forwards( + self, + app_label: str, + schema_editor: SafePostgresDatabaseSchemaEditor, # type: ignore[override] + from_state: SentryProjectState, # type: ignore[override] + to_state: SentryProjectState, # type: ignore[override] + ) -> None: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return + + model = from_state.get_pending_deletion_model(app_label, self.name) + if self.allow_migrate_model(schema_editor.connection.alias, model): + schema_editor.delete_model(model, is_safe=True) + + def database_backwards( + self, + app_label: str, + schema_editor: SafePostgresDatabaseSchemaEditor, # type: ignore[override] + from_state: SentryProjectState, # type: ignore[override] + to_state: SentryProjectState, # type: ignore[override] + ) -> None: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return + model = to_state.get_pending_deletion_model(app_label, self.name) + if self.allow_migrate_model(schema_editor.connection.alias, model): + schema_editor.create_model(model) + + def describe(self) -> str: + if self.deletion_action == DeletionAction.MOVE_TO_PENDING: + return f"Moved model {self.name} to pending deletion state" + else: + return super().describe() diff --git a/src/sentry/new_migrations/monkey/state.py b/src/sentry/new_migrations/monkey/state.py new file mode 100644 index 00000000000000..aa78cf1b3569a1 --- /dev/null +++ b/src/sentry/new_migrations/monkey/state.py @@ -0,0 +1,96 @@ +from __future__ import annotations + +from copy import deepcopy +from enum import Enum + +from django.db.migrations.state import ProjectState +from django.db.models import Field, Model +from django_zero_downtime_migrations.backends.postgres.schema import UnsafeOperationException + + +class DeletionAction(Enum): + MOVE_TO_PENDING = 0 + DELETE = 1 + + +class SentryProjectState(ProjectState): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.pending_deletion_models: dict[tuple[str, str], type[Model]] = {} + self.pending_deletion_fields: dict[tuple[str, str, str], type[Field]] = {} + + def get_pending_deletion_model(self, app_label: str, model_name: str) -> type[Model]: + model_key = (app_label.lower(), model_name.lower()) + if model_key not in self.pending_deletion_models: + raise UnsafeOperationException( + "Model must be in the pending deletion state before full deletion. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-tables" + ) + return self.pending_deletion_models[model_key] + + def get_pending_deletion_field( + self, app_label: str, model_name: str, field_name: str + ) -> type[Field]: + field_key = (app_label.lower(), model_name.lower(), field_name.lower()) + if field_key not in self.pending_deletion_fields: + raise UnsafeOperationException( + "Field must be in the pending deletion state before full deletion. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-columns" + ) + return self.pending_deletion_fields[field_key] + + def remove_model( + self, app_label: str, model_name: str, deletion_action: DeletionAction | None = None + ) -> None: + model_key = (app_label.lower(), model_name.lower()) + if deletion_action == DeletionAction.DELETE: + if model_key not in self.pending_deletion_models: + raise UnsafeOperationException( + "Model must be in the pending deletion state before full deletion. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-tables" + ) + del self.pending_deletion_models[model_key] + return + if deletion_action == DeletionAction.MOVE_TO_PENDING: + if model_key in self.pending_deletion_models: + raise UnsafeOperationException( + f"{app_label}.{model_name} is already pending deletion. Use DeletionAction.DELETE to delete" + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-tables" + ) + self.pending_deletion_models[model_key] = self.apps.get_model(app_label, model_name) + super().remove_model(app_label, model_name) + + def remove_field( + self, + app_label: str, + model_name: str, + name: str, + deletion_action: DeletionAction | None = None, + ): + field_key = app_label.lower(), model_name.lower(), name.lower() + if deletion_action == DeletionAction.DELETE: + if field_key not in self.pending_deletion_fields: + raise UnsafeOperationException( + "Field must be in the pending deletion state before full deletion. " + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-columns" + ) + del self.pending_deletion_fields[field_key] + return + + if deletion_action == DeletionAction.MOVE_TO_PENDING: + if field_key in self.pending_deletion_fields: + raise UnsafeOperationException( + f"{app_label}.{model_name}.{name} is already pending deletion. Use DeletionAction.DELETE to delete" + "More info: https://develop.sentry.dev/api-server/application-domains/database-migrations/#deleting-columns" + ) + self.pending_deletion_fields[field_key] = self.apps.get_model( + app_label, model_name + )._meta.get_field(name) + + super().remove_field(app_label, model_name, name) + + def clone(self) -> SentryProjectState: + new_state = super().clone() + new_state.pending_deletion_models = deepcopy(self.pending_deletion_models) # type: ignore[attr-defined] + new_state.pending_deletion_fields = deepcopy(self.pending_deletion_fields) # type: ignore[attr-defined] + return new_state # type: ignore[return-value] diff --git a/src/sentry/notifications/models/__init__.py b/src/sentry/notifications/models/__init__.py new file mode 100644 index 00000000000000..3b403cc7775ac4 --- /dev/null +++ b/src/sentry/notifications/models/__init__.py @@ -0,0 +1,25 @@ +from sentry.notifications.models.notificationaction import ( + ActionRegistration, + ActionService, + ActionTarget, + ActionTrigger, + NotificationAction, + NotificationActionProject, +) +from sentry.notifications.models.notificationmessage import NotificationMessage +from sentry.notifications.models.notificationsettingbase import NotificationSettingBase +from sentry.notifications.models.notificationsettingoption import NotificationSettingOption +from sentry.notifications.models.notificationsettingprovider import NotificationSettingProvider + +__all__ = ( + "NotificationActionProject", + "NotificationAction", + "ActionService", + "ActionTrigger", + "ActionTarget", + "ActionRegistration", + "NotificationSettingBase", + "NotificationMessage", + "NotificationSettingOption", + "NotificationSettingProvider", +) diff --git a/src/sentry/notifications/models/notificationaction.py b/src/sentry/notifications/models/notificationaction.py new file mode 100644 index 00000000000000..c73d7560da6d85 --- /dev/null +++ b/src/sentry/notifications/models/notificationaction.py @@ -0,0 +1,322 @@ +from __future__ import annotations + +import logging +from abc import ABCMeta, abstractmethod +from collections.abc import Mapping, MutableMapping +from enum import IntEnum +from typing import TYPE_CHECKING, Any, TypeVar + +from django.db import models + +from sentry.backup.scopes import RelocationScope +from sentry.db.models import FlexibleForeignKey, Model, sane_repr +from sentry.db.models.base import region_silo_model +from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey +from sentry.integrations.types import ExternalProviders +from sentry.models.organization import Organization + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from sentry.api.serializers.rest_framework.notification_action import ( + NotificationActionInputData, + ) + from sentry.integrations.services.integration import RpcIntegration + + +class FlexibleIntEnum(IntEnum): + @classmethod + def as_choices(cls) -> tuple[tuple[int, str], ...]: + raise NotImplementedError + + @classmethod + def get_name(cls, value: int) -> str | None: + return dict(cls.as_choices()).get(value) + + @classmethod + def get_value(cls, name: str) -> int | None: + invert_choices = {v: k for k, v in cls.as_choices()} + return invert_choices.get(name) + + +class ActionService(FlexibleIntEnum): + """ + The available services to fire action notifications + """ + + EMAIL = 0 + PAGERDUTY = 1 + SLACK = 2 + MSTEAMS = 3 + SENTRY_APP = 4 + SENTRY_NOTIFICATION = 5 # Use personal notification platform (src/sentry/notifications) + OPSGENIE = 6 + DISCORD = 7 + + @classmethod + def as_choices(cls) -> tuple[tuple[int, str], ...]: + assert ExternalProviders.EMAIL.name is not None + assert ExternalProviders.PAGERDUTY.name is not None + assert ExternalProviders.SLACK.name is not None + assert ExternalProviders.MSTEAMS.name is not None + assert ExternalProviders.OPSGENIE.name is not None + assert ExternalProviders.DISCORD.name is not None + return ( + (cls.EMAIL.value, ExternalProviders.EMAIL.name), + (cls.PAGERDUTY.value, ExternalProviders.PAGERDUTY.name), + (cls.SLACK.value, ExternalProviders.SLACK.name), + (cls.MSTEAMS.value, ExternalProviders.MSTEAMS.name), + (cls.SENTRY_APP.value, "sentry_app"), + (cls.SENTRY_NOTIFICATION.value, "sentry_notification"), + (cls.OPSGENIE.value, ExternalProviders.OPSGENIE.name), + (cls.DISCORD.value, ExternalProviders.DISCORD.name), + ) + + +class ActionTarget(FlexibleIntEnum): + """ + Explains the contents of target_identifier + """ + + # The target_identifier is a direct reference used by the service (e.g. email address, slack channel id) + SPECIFIC = 0 + # The target_identifier is an id from the User model in Sentry + USER = 1 + # The target_identifier is an id from the Team model in Sentry + TEAM = 2 + # The target_identifier is an id from the SentryApp model in Sentry + SENTRY_APP = 3 + + @classmethod + def as_choices(cls) -> tuple[tuple[int, str], ...]: + return ( + (cls.SPECIFIC.value, "specific"), + (cls.USER.value, "user"), + (cls.TEAM.value, "team"), + (cls.SENTRY_APP.value, "sentry_app"), + ) + + +class ActionTrigger(FlexibleIntEnum): + """ + The possible sources of action notifications. + Items prefixed with 'GS_' have registrations in getsentry. + """ + + AUDIT_LOG = 0 + GS_SPIKE_PROTECTION = 100 + + @classmethod + def as_choices(cls) -> tuple[tuple[int, str], ...]: + return ( + (cls.AUDIT_LOG.value, "audit-log"), + (cls.GS_SPIKE_PROTECTION.value, "spike-protection"), + ) + + +class ActionRegistration(metaclass=ABCMeta): + def __init__(self, action: NotificationAction): + self.action = action + + @abstractmethod + def fire(self, data: Any) -> None: + """ + Handles delivering the message via the service from the action and specified data. + """ + + @classmethod + def validate_action(cls, data: NotificationActionInputData) -> None: + """ + Optional function to provide increased validation when saving incoming NotificationActions. See NotificationActionSerializer. + + :param data: The input data sent to the API before updating/creating NotificationActions + :raises serializers.ValidationError: Indicates that the incoming action would apply to this registration but is not valid. + """ + + @classmethod + def serialize_available( + cls, organization: Organization, integrations: list[RpcIntegration] | None = None + ) -> list[Any]: + """ + Optional class method to serialize this registration's available actions to an organization. See NotificationActionsAvailableEndpoint. + + :param organization: The relevant organization which will receive the serialized available action in their response. + :param integrations: A list of integrations which are set up for the organization. + """ + return [] + + +ActionRegistrationT = TypeVar("ActionRegistrationT", bound=ActionRegistration) + + +class AbstractNotificationAction(Model): + """ + Abstract model meant to retroactively create a contract for notification actions + (e.g. metric alerts, spike protection, etc.) + """ + + integration_id = HybridCloudForeignKey( + "sentry.Integration", blank=True, null=True, on_delete="CASCADE" + ) + sentry_app_id = HybridCloudForeignKey( + "sentry.SentryApp", blank=True, null=True, on_delete="CASCADE" + ) + + # The type of service which will receive the action notification (e.g. slack, pagerduty, etc.) + type = models.SmallIntegerField(choices=ActionService.as_choices()) + # The type of target which the service uses for routing (e.g. user, team) + target_type = models.SmallIntegerField(choices=ActionTarget.as_choices()) + # Identifier of the target for the given service (e.g. slack channel id, pagerdutyservice id) + target_identifier = models.TextField(null=True) + # User-friendly name of the target (e.g. #slack-channel, pagerduty-service-name) + target_display = models.TextField(null=True) + + @property + def service_type(self) -> int: + """ + Used for disambiguity of self.type + """ + return self.type + + class Meta: + abstract = True + + +@region_silo_model +class NotificationActionProject(Model): + __relocation_scope__ = {RelocationScope.Global, RelocationScope.Organization} + + project = FlexibleForeignKey("sentry.Project") + action = FlexibleForeignKey("sentry.NotificationAction") + + class Meta: + app_label = "sentry" + db_table = "sentry_notificationactionproject" + + def get_relocation_scope(self) -> RelocationScope: + action = NotificationAction.objects.get(id=self.action_id) + return action.get_relocation_scope() + + +@region_silo_model +class NotificationAction(AbstractNotificationAction): + """ + Generic notification action model to programmatically route depending on the trigger (or source) for the notification + """ + + __relocation_scope__ = {RelocationScope.Global, RelocationScope.Organization} + __repr__ = sane_repr("id", "trigger_type", "service_type", "target_display") + + _trigger_types: tuple[tuple[int, str], ...] = ActionTrigger.as_choices() + _registry: MutableMapping[str, type[ActionRegistration]] = {} + + organization = FlexibleForeignKey("sentry.Organization") + projects = models.ManyToManyField("sentry.Project", through=NotificationActionProject) + + # The type of trigger which controls when the actions will go off (e.g. 'spike-protection') + trigger_type = models.SmallIntegerField(choices=_trigger_types) + + class Meta: + app_label = "sentry" + db_table = "sentry_notificationaction" + + @classmethod + def register_action(cls, trigger_type: int, service_type: int, target_type: int): + """ + Register a new trigger/service/target combination for NotificationActions. + For example, allowing audit-logs (trigger) to fire actions to slack (service) channels (target) + + :param trigger_type: The registered trigger_type integer value saved to the database + :param service_type: The service_type integer value which must exist on ActionService + :param target_type: The target_type integer value which must exist on ActionTarget + :param registration: A subclass of `ActionRegistration`. + """ + + def inner(registration: type[ActionRegistrationT]) -> type[ActionRegistrationT]: + if trigger_type not in dict(ActionTrigger.as_choices()): + raise AttributeError( + f"Trigger type of {trigger_type} is not registered. Modify ActionTrigger." + ) + + if service_type not in dict(ActionService.as_choices()): + raise AttributeError( + f"Service type of {service_type} is not registered. Modify ActionService." + ) + + if target_type not in dict(ActionTarget.as_choices()): + raise AttributeError( + f"Target type of {target_type} is not registered. Modify ActionTarget." + ) + key = cls.get_registry_key(trigger_type, service_type, target_type) + if cls._registry.get(key) is not None: + raise AttributeError( + f"Existing registration found for trigger:{trigger_type}, service:{service_type}, target:{target_type}." + ) + + cls._registry[key] = registration + return registration + + return inner + + @classmethod + def get_trigger_types(cls): + return cls._trigger_types + + @classmethod + def get_trigger_text(self, trigger_type: int) -> str: + return dict(NotificationAction.get_trigger_types())[trigger_type] + + @classmethod + def get_registry_key(self, trigger_type: int, service_type: int, target_type: int) -> str: + return f"{trigger_type}:{service_type}:{target_type}" + + @classmethod + def get_registry(cls) -> Mapping[str, type[ActionRegistration]]: + return cls._registry + + @classmethod + def get_registration( + cls, trigger_type: int, service_type: int, target_type: int + ) -> type[ActionRegistration] | None: + key = cls.get_registry_key(trigger_type, service_type, target_type) + return cls._registry.get(key) + + def get_audit_log_data(self) -> dict[str, str]: + """ + Returns audit log data for NOTIFICATION_ACTION_ADD, NOTIFICATION_ACTION_EDIT + and NOTIFICATION_ACTION_REMOVE events + """ + return {"trigger": NotificationAction.get_trigger_text(self.trigger_type)} + + def fire(self, *args, **kwargs): + registration = NotificationAction.get_registration( + self.trigger_type, self.service_type, self.target_type + ) + if registration: + logger.info( + "fire_action", + extra={ + "action_id": self.id, + "trigger": NotificationAction.get_trigger_text(self.trigger_type), + "service": ActionService.get_name(self.service_type), + "target": ActionTarget.get_name(self.target_type), + }, + ) + return registration(action=self).fire(*args, **kwargs) + else: + logger.error( + "missing_registration", + extra={ + "id": self.id, + "service_type": self.service_type, + "trigger_type": self.trigger_type, + "target_type": self.target_type, + }, + ) + + def get_relocation_scope(self) -> RelocationScope: + if self.integration_id is not None or self.sentry_app_id is not None: + # TODO(getsentry/team-ospo#188): this should be extension scope once that gets added. + return RelocationScope.Global + + return RelocationScope.Organization diff --git a/src/sentry/models/notificationmessage.py b/src/sentry/notifications/models/notificationmessage.py similarity index 100% rename from src/sentry/models/notificationmessage.py rename to src/sentry/notifications/models/notificationmessage.py diff --git a/src/sentry/models/notificationsettingbase.py b/src/sentry/notifications/models/notificationsettingbase.py similarity index 100% rename from src/sentry/models/notificationsettingbase.py rename to src/sentry/notifications/models/notificationsettingbase.py diff --git a/src/sentry/notifications/models/notificationsettingoption.py b/src/sentry/notifications/models/notificationsettingoption.py new file mode 100644 index 00000000000000..0f60f02b1081e9 --- /dev/null +++ b/src/sentry/notifications/models/notificationsettingoption.py @@ -0,0 +1,39 @@ +from django.db import models + +from sentry.backup.scopes import RelocationScope +from sentry.db.models import control_silo_model, sane_repr +from sentry.notifications.models.notificationsettingbase import NotificationSettingBase + + +@control_silo_model +class NotificationSettingOption(NotificationSettingBase): + __relocation_scope__ = RelocationScope.Excluded + + class Meta: + app_label = "sentry" + db_table = "sentry_notificationsettingoption" + unique_together = ( + ( + "scope_type", + "scope_identifier", + "user_id", + "team_id", + "type", + ), + ) + constraints = [ + models.CheckConstraint( + condition=models.Q(team_id__isnull=False, user_id__isnull=True) + | models.Q(team_id__isnull=True, user_id__isnull=False), + name="notification_setting_option_team_or_user_check", + ) + ] + + __repr__ = sane_repr( + "scope_type", + "scope_identifier", + "type", + "user_id", + "team_id", + "value", + ) diff --git a/src/sentry/notifications/models/notificationsettingprovider.py b/src/sentry/notifications/models/notificationsettingprovider.py new file mode 100644 index 00000000000000..57e947b4878c67 --- /dev/null +++ b/src/sentry/notifications/models/notificationsettingprovider.py @@ -0,0 +1,43 @@ +from django.db import models + +from sentry.backup.scopes import RelocationScope +from sentry.db.models import control_silo_model, sane_repr +from sentry.notifications.models.notificationsettingbase import NotificationSettingBase + + +@control_silo_model +class NotificationSettingProvider(NotificationSettingBase): + __relocation_scope__ = RelocationScope.Excluded + + provider = models.CharField(max_length=32, null=False) + + class Meta: + app_label = "sentry" + db_table = "sentry_notificationsettingprovider" + unique_together = ( + ( + "scope_type", + "scope_identifier", + "user_id", + "team_id", + "provider", + "type", + ), + ) + constraints = [ + models.CheckConstraint( + condition=models.Q(team_id__isnull=False, user_id__isnull=True) + | models.Q(team_id__isnull=True, user_id__isnull=False), + name="notification_setting_provider_team_or_user_check", + ) + ] + + __repr__ = sane_repr( + "scope_type", + "scope_identifier", + "user_id", + "team_id", + "provider", + "type", + "value", + ) diff --git a/src/sentry/notifications/notificationcontroller.py b/src/sentry/notifications/notificationcontroller.py index 9909452ea613ff..6c21d8dbd7cf77 100644 --- a/src/sentry/notifications/notificationcontroller.py +++ b/src/sentry/notifications/notificationcontroller.py @@ -14,8 +14,6 @@ ExternalProviderEnum, ExternalProviders, ) -from sentry.models.notificationsettingoption import NotificationSettingOption -from sentry.models.notificationsettingprovider import NotificationSettingProvider from sentry.models.organizationmapping import OrganizationMapping from sentry.models.team import Team from sentry.notifications.helpers import ( @@ -26,6 +24,8 @@ recipient_is_user, team_is_valid_recipient, ) +from sentry.notifications.models.notificationsettingoption import NotificationSettingOption +from sentry.notifications.models.notificationsettingprovider import NotificationSettingProvider from sentry.notifications.types import ( GroupSubscriptionStatus, NotificationScopeEnum, diff --git a/src/sentry/notifications/notifications/activity/release.py b/src/sentry/notifications/notifications/activity/release.py index 8b767f85f2a4ea..ac0b73e2e6d769 100644 --- a/src/sentry/notifications/notifications/activity/release.py +++ b/src/sentry/notifications/notifications/activity/release.py @@ -106,10 +106,10 @@ def get_projects(self, recipient: Actor) -> set[Project]: else: team_ids = [recipient.id] - projects: set[Project] = Project.objects.get_for_team_ids(team_ids).filter( + projects = Project.objects.get_for_team_ids(team_ids).filter( id__in={p.id for p in self.projects} ) - return projects + return set(projects) def get_recipient_context( self, recipient: Actor, extra_context: Mapping[str, Any] diff --git a/src/sentry/notifications/serializers.py b/src/sentry/notifications/serializers.py index f4ef2f4144698a..f7ef5c1fc14771 100644 --- a/src/sentry/notifications/serializers.py +++ b/src/sentry/notifications/serializers.py @@ -4,8 +4,8 @@ from typing import Any from sentry.api.serializers import Serializer -from sentry.models.notificationsettingoption import NotificationSettingOption -from sentry.models.notificationsettingprovider import NotificationSettingProvider +from sentry.notifications.models.notificationsettingoption import NotificationSettingOption +from sentry.notifications.models.notificationsettingprovider import NotificationSettingProvider class NotificationSettingsBaseSerializer(Serializer): diff --git a/src/sentry/notifications/services/impl.py b/src/sentry/notifications/services/impl.py index 9740cc34aa589a..0fde09dd733761 100644 --- a/src/sentry/notifications/services/impl.py +++ b/src/sentry/notifications/services/impl.py @@ -5,8 +5,8 @@ from django.db import router, transaction from sentry.integrations.types import EXTERNAL_PROVIDERS, ExternalProviderEnum, ExternalProviders -from sentry.models.notificationsettingoption import NotificationSettingOption -from sentry.models.notificationsettingprovider import NotificationSettingProvider +from sentry.notifications.models.notificationsettingoption import NotificationSettingOption +from sentry.notifications.models.notificationsettingprovider import NotificationSettingProvider from sentry.notifications.notificationcontroller import NotificationController from sentry.notifications.services import NotificationsService from sentry.notifications.services.model import RpcSubscriptionStatus diff --git a/src/sentry/notifications/utils/__init__.py b/src/sentry/notifications/utils/__init__.py index f9aa249fcfbb72..370a6d5259305c 100644 --- a/src/sentry/notifications/utils/__init__.py +++ b/src/sentry/notifications/utils/__init__.py @@ -68,9 +68,7 @@ def get_release(activity: Activity, organization: Organization) -> Release | Non return None -def get_group_counts_by_project( - release: Release, projects: Iterable[Project] -) -> Mapping[Project, int]: +def get_group_counts_by_project(release: Release, projects: Iterable[Project]) -> dict[int, int]: return dict( Group.objects.filter( project__in=projects, diff --git a/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py b/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py index 2aed874fa9d4b7..c7510abb4c66c1 100644 --- a/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py +++ b/src/sentry/onboarding_tasks/backends/organization_onboarding_task.py @@ -1,3 +1,4 @@ +from django.contrib.auth.models import AnonymousUser from django.db import IntegrityError, router, transaction from django.db.models import Q from django.utils import timezone @@ -10,6 +11,8 @@ OrganizationOnboardingTask, ) from sentry.onboarding_tasks.base import OnboardingTaskBackend +from sentry.users.models.user import User +from sentry.users.services.user.model import RpcUser from sentry.utils import json @@ -27,7 +30,9 @@ def create_or_update_onboarding_task(self, organization, user, task, values): defaults={"user_id": user.id}, ) - def try_mark_onboarding_complete(self, organization_id): + def try_mark_onboarding_complete( + self, organization_id: int, user: User | RpcUser | AnonymousUser + ): if OrganizationOption.objects.filter( organization_id=organization_id, key="onboarding:complete" ).exists(): @@ -40,8 +45,8 @@ def try_mark_onboarding_complete(self, organization_id): ).values_list("task", flat=True) ) - organization = Organization.objects.get(id=organization_id) - if features.has("organizations:quick-start-updates", organization): + organization = Organization.objects.get_from_cache(id=organization_id) + if features.has("organizations:quick-start-updates", organization, actor=user): required_tasks = OrganizationOnboardingTask.NEW_REQUIRED_ONBOARDING_TASKS else: required_tasks = OrganizationOnboardingTask.REQUIRED_ONBOARDING_TASKS @@ -54,8 +59,6 @@ def try_mark_onboarding_complete(self, organization_id): key="onboarding:complete", value={"updated": json.datetime_to_str(timezone.now())}, ) - - organization = Organization.objects.get(id=organization_id) analytics.record( "onboarding.complete", user_id=organization.default_owner_id, diff --git a/src/sentry/onboarding_tasks/base.py b/src/sentry/onboarding_tasks/base.py index f17a743d3222c9..146a87f9b68adf 100644 --- a/src/sentry/onboarding_tasks/base.py +++ b/src/sentry/onboarding_tasks/base.py @@ -2,8 +2,13 @@ from typing import Generic, TypeVar +from django.contrib.auth.models import AnonymousUser + from sentry import features +from sentry.models.organization import Organization from sentry.models.organizationonboardingtask import AbstractOnboardingTask +from sentry.users.models.user import User +from sentry.users.services.user.model import RpcUser from sentry.utils.services import Service T = TypeVar("T", bound=AbstractOnboardingTask) @@ -26,8 +31,8 @@ def get_task_lookup_by_key(self, key): def get_status_lookup_by_key(self, key): return self.Model.STATUS_LOOKUP_BY_KEY.get(key) - def get_skippable_tasks(self, organization): - if features.has("organizations:quick-start-updates", organization): + def get_skippable_tasks(self, organization: Organization, user: User | RpcUser | AnonymousUser): + if features.has("organizations:quick-start-updates", organization, actor=user): return self.Model.NEW_SKIPPABLE_TASKS return self.Model.SKIPPABLE_TASKS @@ -37,5 +42,7 @@ def fetch_onboarding_tasks(self, organization, user): def create_or_update_onboarding_task(self, organization, user, task, values): raise NotImplementedError - def try_mark_onboarding_complete(self, organization_id): + def try_mark_onboarding_complete( + self, organization_id: int, user: User | RpcUser | AnonymousUser + ): raise NotImplementedError diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 49c13bc991020d..56868202949446 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -2010,10 +2010,28 @@ # Killswitch for monitor check-ins register("crons.organization.disable-check-in", type=Sequence, default=[]) -# Enables system incident anomaly detection based on the volume of check-ins -# being processed + +# Temporary killswitch to enable dispatching incident occurrences into the +# incident_occurrence_consumer register( - "crons.tick_volume_anomaly_detection", + "crons.dispatch_incident_occurrences_to_consumer", + default=False, + flags=FLAG_BOOL | FLAG_AUTOMATOR_MODIFIABLE, +) + +# Enables recording tick volume metrics and tick decisions based on those +# metrics. Decisions are used to delay notifications in a system incident. +register( + "crons.system_incidents.collect_metrics", + default=False, + flags=FLAG_BOOL | FLAG_AUTOMATOR_MODIFIABLE, +) + +# Enables the the crons incident occurrence consumer to consider the clock-tick +# decision made based on volume metrics to determine if a incident occurrence +# should be processed, delayed, or dropped entirely. +register( + "crons.system_incidents.use_decisions", default=False, flags=FLAG_BOOL | FLAG_AUTOMATOR_MODIFIABLE, ) @@ -2348,12 +2366,29 @@ # Relocation: the step at which new relocations should be autopaused, requiring admin approval # before continuing. +# DEPRECATED: will be removed after the new `relocation.autopause.*` options are fully rolled out. register( "relocation.autopause", default="", flags=FLAG_AUTOMATOR_MODIFIABLE, ) +# Relocation: the step at which new `SELF_HOSTED` relocations should be autopaused, requiring an +# admin to unpause before continuing. +register( + "relocation.autopause.self-hosted", + default="", + flags=FLAG_AUTOMATOR_MODIFIABLE, +) + +# Relocation: the step at which new `SELF_HOSTED` relocations should be autopaused, requiring an +# admin to unpause before continuing. +register( + "relocation.autopause.saas-to-saas", + default="", + flags=FLAG_AUTOMATOR_MODIFIABLE, +) + # Relocation: globally limits the number of small (<=10MB) relocations allowed per silo per day. register( "relocation.daily-limit.small", @@ -2858,24 +2893,17 @@ flags=FLAG_AUTOMATOR_MODIFIABLE, ) -# killswitch for profile consumers outcome emission. -# If false, processed outcomes for profiles will keep -# being emitted in the billing metrics consumer. -# -# If true, we'll stop emitting processed outcomes for -# profiles in the billing metrics consumer and we'll -# start emitting them in the profiling consumers +# option for sample size when fetching project tag keys register( - "profiling.emit_outcomes_in_profiling_consumer.enabled", - default=False, - type=Bool, + "visibility.tag-key-sample-size", + default=1_000_000, flags=FLAG_AUTOMATOR_MODIFIABLE, ) -# option for sample size when fetching project tag keys +# option for clamping project tag key date range register( - "visibility.tag-key-sample-size", - default=1_000_000, + "visibility.tag-key-max-date-range.days", + default=14, flags=FLAG_AUTOMATOR_MODIFIABLE, ) @@ -2897,3 +2925,29 @@ default=[], flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE, ) +register( + "performance.event-tracker.sample-rate.transactions", + default=0.0, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) + +# migrating send_alert_event task to not pass Event +register( + "sentryapps.send_alert_event.use-eventid", + type=Float, + default=0.0, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) +register( + "transactions.do_post_process_in_save", + default=0.0, + flags=FLAG_AUTOMATOR_MODIFIABLE | FLAG_RATE, +) + +# allows us to disable indexing during maintenance events +register( + "sentry.similarity.indexing.enabled", + default=True, + type=Bool, + flags=FLAG_AUTOMATOR_MODIFIABLE, +) diff --git a/src/sentry/organizations/services/organization/impl.py b/src/sentry/organizations/services/organization/impl.py index 136ec177bad66a..033aecddd3bd28 100644 --- a/src/sentry/organizations/services/organization/impl.py +++ b/src/sentry/organizations/services/organization/impl.py @@ -17,9 +17,9 @@ from sentry.hybridcloud.outbox.category import OutboxCategory, OutboxScope from sentry.hybridcloud.rpc import OptionValue, logger from sentry.incidents.models.alert_rule import AlertRule, AlertRuleActivity -from sentry.incidents.models.incident import IncidentActivity, IncidentSubscription +from sentry.incidents.models.incident import IncidentActivity from sentry.models.activity import Activity -from sentry.models.dashboard import Dashboard +from sentry.models.dashboard import Dashboard, DashboardFavoriteUser from sentry.models.dynamicsampling import CustomDynamicSamplingRule from sentry.models.groupassignee import GroupAssignee from sentry.models.groupbookmark import GroupBookmark @@ -590,6 +590,7 @@ def merge_users(self, *, organization_id: int, from_user_id: int, to_user_id: in AlertRuleActivity, CustomDynamicSamplingRule, Dashboard, + DashboardFavoriteUser, GroupAssignee, GroupBookmark, GroupSeen, @@ -597,7 +598,6 @@ def merge_users(self, *, organization_id: int, from_user_id: int, to_user_id: in GroupSearchView, GroupSubscription, IncidentActivity, - IncidentSubscription, OrganizationAccessRequest, ProjectBookmark, RecentSearch, diff --git a/src/sentry/plugins/config.py b/src/sentry/plugins/config.py index 0aaf870be57eb4..2b53fd594815e2 100644 --- a/src/sentry/plugins/config.py +++ b/src/sentry/plugins/config.py @@ -83,12 +83,6 @@ def validate_field(self, name, value): class PluginConfigMixin(ProviderMixin): - asset_key = None - assets = [] - - def get_assets(self): - return self.assets - def get_metadata(self): """ Return extra metadata which is used to represent this plugin. diff --git a/src/sentry/plugins/providers/base.py b/src/sentry/plugins/providers/base.py index 88acf22ae7ec89..a7befe1f862187 100644 --- a/src/sentry/plugins/providers/base.py +++ b/src/sentry/plugins/providers/base.py @@ -8,6 +8,7 @@ from sentry.exceptions import InvalidIdentity, PluginError from sentry.integrations.services.integration import integration_service from sentry.organizations.services.organization.serial import serialize_rpc_organization +from sentry.users.models.user import User from sentry.users.services.user import RpcUser from sentry.users.services.usersocialauth.model import RpcUserSocialAuth from sentry.users.services.usersocialauth.service import usersocialauth_service @@ -98,7 +99,7 @@ def needs_auth(self, user, **kwargs): ) return len(auths) == 0 - def get_auth(self, user: RpcUser, **kwargs) -> RpcUserSocialAuth | None: + def get_auth(self, user: RpcUser | User, **kwargs) -> RpcUserSocialAuth | None: if self.auth_provider is None: return None diff --git a/src/sentry/profiles/flamegraph.py b/src/sentry/profiles/flamegraph.py index d732f9f6a01537..dd105dab14b504 100644 --- a/src/sentry/profiles/flamegraph.py +++ b/src/sentry/profiles/flamegraph.py @@ -6,11 +6,9 @@ from snuba_sdk import ( And, - BooleanCondition, Column, Condition, Direction, - Entity, Function, Limit, Op, @@ -26,245 +24,10 @@ from sentry.search.events.builder.profile_functions import ProfileFunctionsQueryBuilder from sentry.search.events.fields import resolve_datetime64 from sentry.search.events.types import QueryBuilderConfig, SnubaParams -from sentry.snuba import functions -from sentry.snuba.dataset import Dataset, EntityKey, StorageKey +from sentry.snuba.dataset import Dataset, StorageKey from sentry.snuba.referrer import Referrer from sentry.utils.iterators import chunked -from sentry.utils.snuba import bulk_snuba_queries, raw_snql_query - - -class StartEnd(TypedDict): - start: str - end: str - - -class ProfileIds(TypedDict): - profile_ids: list[str] - - -def get_profile_ids( - snuba_params: SnubaParams, - query: str | None = None, -) -> ProfileIds: - builder = DiscoverQueryBuilder( - dataset=Dataset.Discover, - params={}, - snuba_params=snuba_params, - query=query, - selected_columns=["profile.id"], - limit=options.get("profiling.flamegraph.profile-set.size"), - ) - - builder.add_conditions( - [ - Condition(Column("type"), Op.EQ, "transaction"), - Condition(Column("profile_id"), Op.IS_NOT_NULL), - ] - ) - - result = builder.run_query(Referrer.API_PROFILING_PROFILE_FLAMEGRAPH.value) - - return {"profile_ids": [row["profile.id"] for row in result["data"]]} - - -def get_profiles_with_function( - organization_id: int, - project_id: int, - function_fingerprint: int, - snuba_params: SnubaParams, - query: str, -) -> ProfileIds: - conditions = [query, f"fingerprint:{function_fingerprint}"] - - result = functions.query( - selected_columns=["timestamp", "unique_examples()"], - query=" ".join(cond for cond in conditions if cond), - snuba_params=snuba_params, - limit=100, - orderby=["-timestamp"], - referrer=Referrer.API_PROFILING_FUNCTION_SCOPED_FLAMEGRAPH.value, - auto_aggregations=True, - use_aggregate_conditions=True, - transform_alias_to_input_format=True, - ) - - def extract_profile_ids() -> list[str]: - max_profiles = options.get("profiling.flamegraph.profile-set.size") - profile_ids = [] - - for i in range(5): - for row in result["data"]: - examples = row["unique_examples()"] - if i < len(examples): - profile_ids.append(examples[i]) - - if len(profile_ids) >= max_profiles: - return profile_ids - - return profile_ids - - return {"profile_ids": extract_profile_ids()} - - -class IntervalMetadata(TypedDict): - start: str - end: str - active_thread_id: str - - -def get_spans_from_group( - organization_id: int, - project_id: int, - snuba_params: SnubaParams, - span_group: str, -) -> dict[str, list[IntervalMetadata]]: - query = Query( - match=Entity(EntityKey.Spans.value), - select=[ - Column("start_timestamp_precise"), - Column("end_timestamp_precise"), - Function( - "arrayElement", - parameters=[ - Column("sentry_tags.value"), - Function( - "indexOf", - parameters=[ - Column("sentry_tags.key"), - "profiler_id", - ], - ), - ], - alias="profiler_id", - ), - Function( - "arrayElement", - parameters=[ - Column("sentry_tags.value"), - Function( - "indexOf", - parameters=[ - Column("sentry_tags.key"), - "thread.id", - ], - ), - ], - alias="active_thread_id", - ), - ], - where=[ - Condition(Column("project_id"), Op.EQ, project_id), - Condition(Column("timestamp"), Op.GTE, snuba_params.start), - Condition(Column("timestamp"), Op.LT, snuba_params.end), - Condition(Column("group"), Op.EQ, span_group), - Condition(Column("profiler_id"), Op.NEQ, ""), - ], - limit=Limit(100), - ) - request = Request( - dataset=Dataset.SpansIndexed.value, - app_id="default", - query=query, - tenant_ids={ - "referrer": Referrer.API_PROFILING_FLAMEGRAPH_SPANS_WITH_GROUP.value, - "organization_id": organization_id, - }, - ) - data = raw_snql_query( - request, - referrer=Referrer.API_PROFILING_FLAMEGRAPH_SPANS_WITH_GROUP.value, - )["data"] - spans: dict[str, list[IntervalMetadata]] = defaultdict(list) - for row in data: - spans[row["profiler_id"]].append( - { - "active_thread_id": row["active_thread_id"], - "start": row["start_timestamp_precise"], - "end": row["end_timestamp_precise"], - } - ) - - return spans - - -class SpanMetadata(TypedDict): - profiler_id: list[IntervalMetadata] - - -def get_chunk_snuba_conditions_from_spans_metadata( - spans: dict[str, list[IntervalMetadata]], -) -> list[BooleanCondition | Condition]: - cond = [] - for profiler_id, intervals in spans.items(): - chunk_range_cond = [] - for interval in intervals: - start = interval.get("start") - end = interval.get("end") - chunk_range_cond.append( - And( - [ - Condition(Column("end_timestamp"), Op.GTE, start), - Condition(Column("start_timestamp"), Op.LT, end), - ], - ) - ) - cond.append( - And( - [ - Condition(Column("profiler_id"), Op.EQ, profiler_id), - Or(chunk_range_cond) if len(chunk_range_cond) >= 2 else chunk_range_cond[0], - ] - ) - ) - return [Or(cond)] if len(cond) >= 2 else cond - - -def get_chunks_from_spans_metadata( - organization_id: int, - project_id: int, - spans: dict[str, list[IntervalMetadata]], -) -> list[dict[str, Any]]: - query = Query( - match=Storage(StorageKey.ProfileChunks.value), - select=[ - Column("profiler_id"), - Column("chunk_id"), - ], - where=[Condition(Column("project_id"), Op.EQ, project_id)] - + get_chunk_snuba_conditions_from_spans_metadata(spans), - limit=Limit(100), - ) - request = Request( - dataset=Dataset.Profiles.value, - app_id="default", - query=query, - tenant_ids={ - "referrer": Referrer.API_PROFILING_FLAMEGRAPH_CHUNKS_FROM_SPANS.value, - "organization_id": organization_id, - }, - ) - data = raw_snql_query( - request, - referrer=Referrer.API_PROFILING_FLAMEGRAPH_CHUNKS_FROM_SPANS.value, - )["data"] - chunks = [] - for row in data: - intervals = [ - { - "start": str(int(datetime.fromisoformat(el["start"]).timestamp() * 1e9)), - "end": str(int(datetime.fromisoformat(el["end"]).timestamp() * 1e9)), - "active_thread_id": el["active_thread_id"], - } - for el in spans[row["profiler_id"]] - ] - chunks.append( - { - "profiler_id": row["profiler_id"], - "chunk_id": row["chunk_id"], - "span_intervals": intervals, - } - ) - return chunks +from sentry.utils.snuba import bulk_snuba_queries class TransactionProfileCandidate(TypedDict): @@ -295,7 +58,7 @@ class ProfilerMeta: thread_id: str start: float end: float - transaction_id: str + transaction_id: str | None = None def as_condition(self) -> Condition: return And( @@ -341,14 +104,13 @@ def get_profile_candidates(self) -> ProfileCandidates: raise NotImplementedError def get_profile_candidates_from_functions(self) -> ProfileCandidates: - # TODO: continuous profiles support max_profiles = options.get("profiling.flamegraph.profile-set.size") builder = ProfileFunctionsQueryBuilder( dataset=Dataset.Functions, params={}, snuba_params=self.snuba_params, - selected_columns=["project.id", "timestamp", "unique_examples()"], + selected_columns=["project.id", "timestamp", "all_examples()"], query=self.query, limit=max_profiles, config=QueryBuilderConfig( @@ -365,49 +127,58 @@ def get_profile_candidates_from_functions(self) -> ProfileCandidates: results = builder.process_results(results) transaction_profile_candidates: list[TransactionProfileCandidate] = [] + profiler_metas: list[ProfilerMeta] = [] for row in results["data"]: project = row["project.id"] - for example in row["unique_examples()"]: + for example in row["all_examples()"]: if len(transaction_profile_candidates) > max_profiles: break - transaction_profile_candidates.append( - { - "project_id": project, - "profile_id": example, - } - ) + + if "profile_id" in example: + transaction_profile_candidates.append( + { + "project_id": project, + "profile_id": example["profile_id"], + } + ) + elif "profiler_id" in example: + profiler_metas.append( + ProfilerMeta( + project_id=project, + profiler_id=example["profiler_id"], + thread_id=example["thread_id"], + start=example["start"], + end=example["end"], + ) + ) + else: + # Will go to break if the inner loop breaks + continue + break + + max_continuous_profile_candidates = max( + max_profiles - len(transaction_profile_candidates), 0 + ) return { "transaction": transaction_profile_candidates, - "continuous": [], + "continuous": self.get_chunks_for_profilers( + profiler_metas, + max_continuous_profile_candidates, + ), } def get_profile_candidates_from_transactions(self) -> ProfileCandidates: - builder = self.get_transactions_based_candidate_query(query=self.query) + max_profiles = options.get("profiling.flamegraph.profile-set.size") + + builder = self.get_transactions_based_candidate_query(query=self.query, limit=max_profiles) results = builder.run_query( Referrer.API_PROFILING_PROFILE_FLAMEGRAPH_TRANSACTION_CANDIDATES.value, ) results = builder.process_results(results) - continuous_profile_candidates: list[ContinuousProfileCandidate] = ( - self.get_chunks_for_profilers( - [ - ProfilerMeta( - project_id=row["project.id"], - profiler_id=row["profiler.id"], - thread_id=row["thread.id"], - start=row["precise.start_ts"], - end=row["precise.finish_ts"], - transaction_id=row["id"], - ) - for row in results["data"] - if row["profiler.id"] is not None and row["thread.id"] - ] - ) - ) - transaction_profile_candidates: list[TransactionProfileCandidate] = [ { "project_id": row["project.id"], @@ -417,14 +188,32 @@ def get_profile_candidates_from_transactions(self) -> ProfileCandidates: if row["profile.id"] is not None ] + max_continuous_profile_candidates = max( + max_profiles - len(transaction_profile_candidates), 0 + ) + return { "transaction": transaction_profile_candidates, - "continuous": continuous_profile_candidates, + "continuous": self.get_chunks_for_profilers( + [ + ProfilerMeta( + project_id=row["project.id"], + profiler_id=row["profiler.id"], + thread_id=row["thread.id"], + start=row["precise.start_ts"], + end=row["precise.finish_ts"], + transaction_id=row["id"], + ) + for row in results["data"] + if row["profiler.id"] is not None and row["thread.id"] + ], + max_continuous_profile_candidates, + ), } - def get_transactions_based_candidate_query(self, query: str | None) -> DiscoverQueryBuilder: - max_profiles = options.get("profiling.flamegraph.profile-set.size") - + def get_transactions_based_candidate_query( + self, query: str | None, limit: int + ) -> DiscoverQueryBuilder: builder = DiscoverQueryBuilder( dataset=Dataset.Discover, params={}, @@ -441,7 +230,7 @@ def get_transactions_based_candidate_query(self, query: str | None) -> DiscoverQ ], query=query, orderby=["-timestamp"], - limit=max_profiles, + limit=limit, config=QueryBuilderConfig( transform_alias_to_input_format=True, ), @@ -470,7 +259,7 @@ def get_transactions_based_candidate_query(self, query: str | None) -> DiscoverQ return builder def get_chunks_for_profilers( - self, profiler_metas: list[ProfilerMeta] + self, profiler_metas: list[ProfilerMeta], limit: int ) -> list[ContinuousProfileCandidate]: if len(profiler_metas) == 0: return [] @@ -499,17 +288,30 @@ def get_chunks_for_profilers( if start > profiler_meta.end or end < profiler_meta.start: continue - continuous_profile_candidates.append( - { - "project_id": profiler_meta.project_id, - "profiler_id": profiler_meta.profiler_id, - "chunk_id": row["chunk_id"], - "thread_id": profiler_meta.thread_id, - "start": str(int(profiler_meta.start * 1.0e9)), - "end": str(int(profiler_meta.end * 1.0e9)), - "transaction_id": profiler_meta.transaction_id, - } - ) + if len(continuous_profile_candidates) > limit: + break + + candidate: ContinuousProfileCandidate = { + "project_id": profiler_meta.project_id, + "profiler_id": profiler_meta.profiler_id, + "chunk_id": row["chunk_id"], + "thread_id": profiler_meta.thread_id, + "start": str(int(profiler_meta.start * 1e9)), + "end": str(int(profiler_meta.end * 1e9)), + } + + if profiler_meta.transaction_id is not None: + candidate["transaction_id"] = profiler_meta.transaction_id + + continuous_profile_candidates.append(candidate) + else: + # Will go to break if the inner loop breaks + continue + break + else: + # Will go to break if the inner loop breaks + continue + break return continuous_profile_candidates @@ -594,7 +396,9 @@ def get_profile_candidates_from_profiles(self) -> ProfileCandidates: referrer = Referrer.API_PROFILING_PROFILE_FLAMEGRAPH_PROFILE_CANDIDATES.value - transaction_profiles_builder = self.get_transactions_based_candidate_query(query=None) + transaction_profiles_builder = self.get_transactions_based_candidate_query( + query=None, limit=max_profiles + ) project_condition = Condition( Column("project_id"), @@ -656,6 +460,10 @@ def get_profile_candidates_from_profiles(self) -> ProfileCandidates: if row["profile.id"] is not None ] + max_continuous_profile_candidates = max( + max_profiles - len(transaction_profile_candidates), 0 + ) + profiler_metas = [ ProfilerMeta( project_id=row["project.id"], @@ -674,7 +482,9 @@ def get_profile_candidates_from_profiles(self) -> ProfileCandidates: # If there are continuous profiles attached to transactions, we prefer those as # the active thread id gives us more user friendly flamegraphs than without. if profiler_metas: - continuous_profile_candidates = self.get_chunks_for_profilers(profiler_metas) + continuous_profile_candidates = self.get_chunks_for_profilers( + profiler_metas, max_continuous_profile_candidates + ) # If we still don't have any continuous profile candidates, we'll fall back to # directly using the continuous profiling data @@ -689,7 +499,7 @@ def get_profile_candidates_from_profiles(self) -> ProfileCandidates: ), "end": str(int(datetime.fromisoformat(row["end_timestamp"]).timestamp() * 1e9)), } - for row in continuous_profile_results["data"] + for row in continuous_profile_results["data"][:max_continuous_profile_candidates] ] return { diff --git a/src/sentry/profiles/task.py b/src/sentry/profiles/task.py index 254d5f5555ec36..71cc3c98db3bf2 100644 --- a/src/sentry/profiles/task.py +++ b/src/sentry/profiles/task.py @@ -179,9 +179,7 @@ def process_profile_task( except Exception as e: sentry_sdk.capture_exception(e) - if options.get("profiling.stack_trace_rules.enabled") and project.id in options.get( - "profiling.stack_trace_rules.allowed_project_ids" - ): + if options.get("profiling.stack_trace_rules.enabled"): try: with metrics.timer("process_profile.apply_stack_trace_rules"): rules_config = project.get_option("sentry:grouping_enhancements") @@ -205,22 +203,15 @@ def process_profile_task( except Exception as e: sentry_sdk.capture_exception(e) if "profiler_id" not in profile: - if options.get("profiling.emit_outcomes_in_profiling_consumer.enabled"): - _track_outcome( - profile=profile, - project=project, - outcome=Outcome.ACCEPTED, - categories=[DataCategory.PROFILE, DataCategory.PROFILE_INDEXED], - ) - else: - _track_outcome_legacy( - profile=profile, project=project, outcome=Outcome.ACCEPTED - ) + _track_outcome( + profile=profile, + project=project, + outcome=Outcome.ACCEPTED, + categories=[DataCategory.PROFILE, DataCategory.PROFILE_INDEXED], + ) + else: - if ( - options.get("profiling.emit_outcomes_in_profiling_consumer.enabled") - and "profiler_id" not in profile - ): + if "profiler_id" not in profile: _track_outcome( profile=profile, project=project, @@ -506,6 +497,7 @@ def symbolicate( ) -> Any: if platform in SHOULD_SYMBOLICATE_JS: return symbolicator.process_js( + platform=platform, stacktraces=stacktraces, modules=modules, release=profile.get("release"), @@ -514,6 +506,7 @@ def symbolicate( ) elif platform == "android": return symbolicator.process_jvm( + platform=platform, exceptions=[], stacktraces=stacktraces, modules=modules, @@ -522,7 +515,7 @@ def symbolicate( classes=[], ) return symbolicator.process_payload( - stacktraces=stacktraces, modules=modules, apply_source_context=False + platform=platform, stacktraces=stacktraces, modules=modules, apply_source_context=False ) @@ -916,26 +909,6 @@ def get_data_category(profile: Profile) -> DataCategory: return DataCategory.PROFILE_INDEXED -@metrics.wraps("process_profile.track_outcome") -def _track_outcome_legacy( - profile: Profile, - project: Project, - outcome: Outcome, - reason: str | None = None, -) -> None: - track_outcome( - org_id=project.organization_id, - project_id=project.id, - key_id=None, - outcome=outcome, - reason=reason, - timestamp=datetime.now(timezone.utc), - event_id=get_event_id(profile), - category=get_data_category(profile), - quantity=1, - ) - - @metrics.wraps("process_profile.track_outcome") def _track_outcome( profile: Profile, @@ -960,28 +933,20 @@ def _track_outcome( def _track_failed_outcome(profile: Profile, project: Project, reason: str) -> None: - if options.get("profiling.emit_outcomes_in_profiling_consumer.enabled"): - categories = [] - if "profiler_id" not in profile: - categories.append(DataCategory.PROFILE) - if profile.get("sampled"): - categories.append(DataCategory.PROFILE_INDEXED) - else: - categories.append(DataCategory.PROFILE_CHUNK) - _track_outcome( - profile=profile, - project=project, - outcome=Outcome.INVALID, - categories=categories, - reason=reason, - ) + categories = [] + if "profiler_id" not in profile: + categories.append(DataCategory.PROFILE) + if profile.get("sampled"): + categories.append(DataCategory.PROFILE_INDEXED) else: - _track_outcome_legacy( - profile=profile, - project=project, - outcome=Outcome.INVALID, - reason=reason, - ) + categories.append(DataCategory.PROFILE_CHUNK) + _track_outcome( + profile=profile, + project=project, + outcome=Outcome.INVALID, + categories=categories, + reason=reason, + ) @metrics.wraps("process_profile.insert_vroom_profile") diff --git a/src/sentry/projects/services/project/impl.py b/src/sentry/projects/services/project/impl.py index b5d73ae6e28153..e935baa96ed331 100644 --- a/src/sentry/projects/services/project/impl.py +++ b/src/sentry/projects/services/project/impl.py @@ -2,6 +2,7 @@ from django.db import router, transaction +from sentry.api.helpers.default_symbol_sources import set_default_symbol_sources from sentry.api.serializers import ProjectSerializer from sentry.auth.services.auth import AuthenticationContext from sentry.constants import ObjectStatus @@ -126,6 +127,8 @@ def create_project_for_organization( if team: project.add_team(team) + set_default_symbol_sources(project) + project_created.send( project=project, default_rules=True, diff --git a/src/sentry/receivers/onboarding.py b/src/sentry/receivers/onboarding.py index 9caa9c85bc52f6..97d8976ebcba98 100644 --- a/src/sentry/receivers/onboarding.py +++ b/src/sentry/receivers/onboarding.py @@ -104,7 +104,13 @@ def record_new_project(project, user=None, user_id=None, **kwargs): organization_id=project.organization_id, task=OnboardingTask.SECOND_PLATFORM, user_id=user_id, - status=OnboardingTaskStatus.PENDING, + status=( + OnboardingTaskStatus.COMPLETE + if features.has( + "organizations:quick-start-updates", project.organization, actor=user + ) + else OnboardingTaskStatus.PENDING + ), project_id=project.id, ) @@ -131,7 +137,9 @@ def record_first_event(project, event, **kwargs): ) try: - user: RpcUser = Organization.objects.get(id=project.organization_id).get_default_owner() + user: RpcUser = Organization.objects.get_from_cache( + id=project.organization_id + ).get_default_owner() except IndexError: logger.warning( "Cannot record first event for organization (%s) due to missing owners", @@ -255,7 +263,16 @@ def record_first_replay(project, **kwargs): platform=project.platform, ) logger.info("record_first_replay_analytics_end") - try_mark_onboarding_complete(project.organization_id) + # TODO(Telemetry): Remove this once we remove the feature flag 'quick-start-updates' + try: + user: RpcUser = project.organization.get_default_owner() + except IndexError: + logger.warning( + "Cannot record first replay for organization (%s) due to missing owners", + project.organization_id, + ) + return + try_mark_onboarding_complete(project.organization_id, user) @first_feedback_received.connect(weak=False) @@ -402,11 +419,22 @@ def record_member_joined(organization_id: int, organization_member_id: int, **kw }, ) if created or rows_affected: - try_mark_onboarding_complete(organization_id) + # TODO(Telemetry): Remove this once we remove the feature flag 'quick-start-updates' + try: + user: RpcUser = Organization.objects.get_from_cache( + id=organization_id + ).get_default_owner() + except IndexError: + logger.warning( + "Cannot record member joined an organization (%s) due to missing owners", + organization_id, + ) + return + try_mark_onboarding_complete(organization_id, user) def record_release_received(project, event, **kwargs): - if not event.get_tag("sentry:release"): + if not event.data.get("release"): return success = OrganizationOnboardingTask.objects.record( @@ -417,8 +445,9 @@ def record_release_received(project, event, **kwargs): ) if success: organization = Organization.objects.get_from_cache(id=project.organization_id) - owner_id = organization.default_owner_id - if not owner_id: + try: + owner: RpcUser = organization.get_default_owner() + except IndexError: logger.warning( "Cannot record release received for organization (%s) due to missing owners", project.organization_id, @@ -427,11 +456,11 @@ def record_release_received(project, event, **kwargs): analytics.record( "first_release_tag.sent", - user_id=owner_id, + user_id=owner.id, project_id=project.id, organization_id=project.organization_id, ) - try_mark_onboarding_complete(project.organization_id) + try_mark_onboarding_complete(project.organization_id, owner) event_processed.connect(record_release_received, weak=False) @@ -454,8 +483,9 @@ def record_user_context_received(project, event, **kwargs): ) if success: organization = Organization.objects.get_from_cache(id=project.organization_id) - owner_id = organization.default_owner_id - if not owner_id: + try: + owner: RpcUser = organization.get_default_owner() + except IndexError: logger.warning( "Cannot record user context received for organization (%s) due to missing owners", project.organization_id, @@ -464,11 +494,12 @@ def record_user_context_received(project, event, **kwargs): analytics.record( "first_user_context.sent", - user_id=owner_id, + user_id=owner.id, organization_id=project.organization_id, project_id=project.id, ) - try_mark_onboarding_complete(project.organization_id) + + try_mark_onboarding_complete(project.organization_id, owner) event_processed.connect(record_user_context_received, weak=False) @@ -525,8 +556,9 @@ def record_sourcemaps_received(project, event, **kwargs): ) if success: organization = Organization.objects.get_from_cache(id=project.organization_id) - owner_id = organization.default_owner_id - if not owner_id: + try: + owner: RpcUser = organization.get_default_owner() + except IndexError: logger.warning( "Cannot record sourcemaps received for organization (%s) due to missing owners", project.organization_id, @@ -534,14 +566,14 @@ def record_sourcemaps_received(project, event, **kwargs): return analytics.record( "first_sourcemaps.sent", - user_id=owner_id, + user_id=owner.id, organization_id=project.organization_id, project_id=project.id, platform=event.platform, project_platform=project.platform, url=dict(event.tags).get("url", None), ) - try_mark_onboarding_complete(project.organization_id) + try_mark_onboarding_complete(project.organization_id, owner) @event_processed.connect(weak=False) @@ -596,7 +628,7 @@ def record_plugin_enabled(plugin, project, user, **kwargs): data={"plugin": plugin.slug}, ) if success: - try_mark_onboarding_complete(project.organization_id) + try_mark_onboarding_complete(project.organization_id, user) analytics.record( "plugin.enabled", @@ -624,7 +656,7 @@ def record_alert_rule_created(user, project: Project, rule_type: str, **kwargs): ) if rows_affected or created: - try_mark_onboarding_complete(project.organization_id) + try_mark_onboarding_complete(project.organization_id, user) @issue_tracker_used.connect(weak=False) @@ -643,7 +675,7 @@ def record_issue_tracker_used(plugin, project, user, **kwargs): ) if rows_affected or created: - try_mark_onboarding_complete(project.organization_id) + try_mark_onboarding_complete(project.organization_id, user) if user and user.is_authenticated: user_id = default_user_id = user.id @@ -678,9 +710,17 @@ def record_integration_added( if integration is None: return - organization = Organization.objects.get(id=organization_id) + organization = Organization.objects.get_from_cache(id=organization_id) + try: + user: RpcUser = organization.get_default_owner() + except IndexError: + logger.warning( + "Cannot record first integration for organization (%s) due to missing owners", + organization_id, + ) + return - if features.has("organizations:quick-start-updates", organization): + if features.has("organizations:quick-start-updates", organization, actor=user): integration_types = get_integration_types(integration.provider) task_mapping = { diff --git a/src/sentry/receivers/sentry_apps.py b/src/sentry/receivers/sentry_apps.py index 5a46a0878eeea3..2db122bc92991d 100644 --- a/src/sentry/receivers/sentry_apps.py +++ b/src/sentry/receivers/sentry_apps.py @@ -152,7 +152,7 @@ def send_workflow_webhooks( def installations_to_notify( organization: Organization, resource_type: str ) -> list[RpcSentryAppInstallation]: - installations = app_service.get_installed_for_organization(organization_id=organization.id) + installations = app_service.installations_for_organization(organization_id=organization.id) # All issue webhooks are under one subscription, so if an intallation is subscribed to any issue # events it should get notified for all the issue events # TODO: Refactor sentry_app model so it doesn't store event, instead it stores subscription diff --git a/src/sentry/remote_subscriptions/migrations/0003_drop_remote_subscription.py b/src/sentry/remote_subscriptions/migrations/0003_drop_remote_subscription.py index 650fd61c8b7e04..32b828597f44fa 100644 --- a/src/sentry/remote_subscriptions/migrations/0003_drop_remote_subscription.py +++ b/src/sentry/remote_subscriptions/migrations/0003_drop_remote_subscription.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("remote_subscriptions", "0002_remove_separate_remote_subscription"), ("uptime", "0003_drop_remote_subscription"), diff --git a/src/sentry/replays/lib/query.py b/src/sentry/replays/lib/query.py index 57b041c4689845..ae57c15a3f795c 100644 --- a/src/sentry/replays/lib/query.py +++ b/src/sentry/replays/lib/query.py @@ -1,503 +1,10 @@ """Dynamic query parsing library.""" import uuid -from typing import Any -from rest_framework.exceptions import ParseError -from snuba_sdk import Column, Condition, Function, Identifier, Lambda, Op +from snuba_sdk import Column, Condition, Function, Identifier, Lambda from snuba_sdk.conditions import And, Or from snuba_sdk.expressions import Expression -from snuba_sdk.orderby import Direction, OrderBy - -from sentry.api.event_search import ParenExpression, SearchFilter -from sentry.replays.lib.selector.parse import QueryType, parse_selector -from sentry.replays.lib.selector.query import union_find - -OPERATOR_MAP = { - "=": Op.EQ, - "!=": Op.NEQ, - ">": Op.GT, - ">=": Op.GTE, - "<": Op.LT, - "<=": Op.LTE, - "IN": Op.IN, - "NOT IN": Op.NOT_IN, -} - - -class Field: - def __init__( - self, - name: str | None = None, - field_alias: str | None = None, - query_alias: str | None = None, - is_filterable: bool = True, - is_sortable: bool = True, - is_uuid: bool = False, - operators: list | None = None, - validators: list | None = None, - ) -> None: - self.attribute_name = None - self.field_alias = field_alias or name - self.query_alias = query_alias or name - self.is_filterable = is_filterable - self.is_sortable = is_sortable - self.is_uuid = is_uuid - self.operators = operators or self._operators - self.validators = validators or [] - - def deserialize_operator(self, operator: str) -> tuple[Any, list[str]]: - op = OPERATOR_MAP.get(operator) - if op is None: - return None, ["Operator not found."] - elif op not in self.operators: - return None, ["Operator not permitted."] - else: - return op, [] - - def deserialize_values(self, values: list[str]) -> tuple[list[Any], list[str]]: - parsed_values = [] - for value in values: - parsed_value, errors = self.deserialize_value(value) - if errors: - return None, errors - - parsed_values.append(parsed_value) - - return parsed_values, [] - - def deserialize_value(self, value: list[str] | str) -> tuple[Any, list[str]]: - if isinstance(value, list): - return self.deserialize_values(value) - - try: - if self._python_type: - typed_value = self._python_type(value) - else: - typed_value = value - except ValueError: - return None, ["Invalid value specified."] - - for validator in self.validators: - error = validator(typed_value) - if error: - return None, [error] - - return typed_value, [] - - def as_condition( - self, - field_alias: str, - operator: Op, - value: list[str] | str, - is_wildcard: bool = False, - ) -> Condition: - return Condition(Column(self.query_alias or self.attribute_name), operator, value) - - -class UUIDField(Field): - """ - as our minimum supported clickhouse version is 20.3, we don't have - isUUIDOrZero function, so we must validate the uuid before supplying to clickhouse. - """ - - _operators = [Op.EQ, Op.NEQ, Op.IN, Op.NOT_IN] - _python_type = str - - def as_condition( - self, field_alias: str, operator: Op, value: list[str] | str, is_wildcard: bool - ) -> Condition: - if isinstance(value, list): - uuids = _transform_uuids(value) - if uuids is None: - return Condition(Function("identity", parameters=[1]), Op.EQ, 2) - value = uuids - else: - uuids = _transform_uuids([value]) - if uuids is None: - return Condition(Function("identity", parameters=[1]), Op.EQ, 2) - - value = uuids[0] - - return super().as_condition(field_alias, operator, value) - - -class IPAddress(Field): - _operators = [Op.EQ, Op.NEQ, Op.IN, Op.NOT_IN] - _python_type = str - - def as_condition( - self, - field_alias: str, - operator: Op, - value: list[str] | str, - is_wildcard: bool = False, - ) -> Condition: - if isinstance(value, list): - value = [Function("toIPv4", parameters=[v]) for v in value] - else: - value = Function("toIPv4", parameters=[value]) - - return Condition(Column(self.query_alias or self.attribute_name), operator, value) - - -class String(Field): - _operators = [Op.EQ, Op.NEQ, Op.IN, Op.NOT_IN] - _python_type = str - - def as_condition( - self, field_alias: str, operator: Op, value: list[str] | str, is_wildcard: bool - ) -> Condition: - if is_wildcard: - return Condition( - _wildcard_search_function(value, Column(self.query_alias or self.attribute_name)), - operator, - 1, - ) - - return super().as_condition(field_alias, operator, value, is_wildcard) - - -class Selector(Field): - _operators = [Op.EQ, Op.NEQ] - _python_type = str - - def as_condition( - self, field_alias: str, operator: Op, value: list[str] | str, is_wildcard: bool - ) -> Condition: - # This list of queries implies an `OR` operation between each item in the set. To `AND` - # selector queries apply them separately. - queries: list[QueryType] = parse_selector(value) - - # A valid selector will always return at least one query condition. If this did not occur - # then the selector was not well-formed. We return an empty resultset. - if len(queries) == 0: - return Condition(Function("identity", parameters=[1]), Op.EQ, 2) - - # Conditions are pre-made and intended for application in the HAVING clause. - conditions: list[Condition] = [] - - for query in queries: - columns, values = [], [] - - if query.alt: - columns.append(Column("click_alt")) - values.append(query.alt) - if query.aria_label: - columns.append(Column("click_aria_label")) - values.append(query.aria_label) - if query.classes: - columns.append(Column("click_classes")) - values.append(query.classes) - if query.id: - columns.append(Column("click_id")) - values.append(query.id) - if query.role: - columns.append(Column("click_role")) - values.append(query.role) - if query.tag: - columns.append(Column("click_tag")) - values.append(query.tag) - if query.testid: - columns.append(Column("click_testid")) - values.append(query.testid) - if query.title: - columns.append(Column("click_title")) - values.append(query.title) - if query.component_name: - columns.append(Column("click_component_name")) - values.append(query.component_name) - - if columns and values: - conditions.append(Condition(union_find(columns, values), operator, 1)) - - if len(conditions) == 1: - return conditions[0] - else: - return Or(conditions) - - -class Number(Field): - _operators = [Op.EQ, Op.NEQ, Op.GT, Op.GTE, Op.LT, Op.LTE, Op.IN, Op.NOT_IN] - _python_type = int - - -class ListField(Field): - _operators = [Op.EQ, Op.NEQ, Op.IN, Op.NOT_IN] - _python_type = None - - def as_condition( - self, _: str, operator: Op, value: list[str] | str, is_wildcard: bool = False - ) -> Condition: - if operator in [Op.EQ, Op.NEQ]: - if is_wildcard: - # wildcard search isn't supported with the IN operator - return self._wildcard_condition(operator, value) - - return self._has_condition(operator, value) - else: - return self._has_any_condition(operator, value) - - def _wildcard_condition(self, operator: Op, value: str): - return Condition( - Function( - "arrayExists", - parameters=[ - Lambda( - ["list_element"], - _wildcard_search_function(value, Identifier("list_element")), - ), - Column(self.query_alias or self.attribute_name), - ], - ), - Op.EQ, - 1 if operator == Op.EQ else 0, - ) - - def _has_condition( - self, - operator: Op, - value: list[str] | str, - ) -> Condition: - if isinstance(value, list): - return self._has_any_condition(Op.IN if operator == Op.EQ else Op.NOT_IN, value) - - if self.is_uuid: - # Client side UUID validation. If this fails we use a condition which is always - # false. E.g. 1 == 2. We don't use toUUIDOrZero because our Clickhouse version does - # not support it. - uuids = _transform_uuids([value]) - if uuids is None: - return Condition(Function("identity", parameters=[1]), Op.EQ, 2) - - v = Function("toUUID", parameters=uuids) - else: - v = value - - return Condition( - Function("has", parameters=[Column(self.query_alias or self.attribute_name), v]), - Op.EQ, - 1 if operator == Op.EQ else 0, - ) - - def _has_any_condition( - self, - operator: Op, - values: list[str] | str, - ) -> Condition: - if not isinstance(values, list): - return self._has_condition(Op.EQ if operator == Op.IN else Op.NEQ, values) - - if self.is_uuid: - # Client side UUID validation. If this fails we use a condition which is always - # false. E.g. 1 == 2. We don't use toUUIDOrZero because our Clickhouse version does - # not support it. - uuids = _transform_uuids(values) - if uuids is None: - return Condition(Function("identity", parameters=[1]), Op.EQ, 2) - - vs = [Function("toUUID", parameters=[uid]) for uid in uuids] - else: - vs = values - - return Condition( - Function("hasAny", parameters=[Column(self.query_alias or self.attribute_name), vs]), - Op.EQ, - 1 if operator == Op.IN else 0, - ) - - -class Tag(Field): - _operators = [Op.EQ, Op.NEQ, Op.IN, Op.NOT_IN] - _negation_map = [False, True, False, True] - _python_type = str - - def __init__(self, tag_key_alias="tk", tag_value_alias="tv", **kwargs): - self.tag_key_alias = tag_key_alias - self.tag_value_alias = tag_value_alias - kwargs.pop("operators", None) - return super().__init__(**kwargs) - - def deserialize_operator(self, operator: str) -> tuple[Op, list[str]]: - op = OPERATOR_MAP.get(operator) - if op is None: - return None, ["Operator not found."] - elif op not in self._operators: - return None, ["Operator not permitted."] - else: - return op, [] - - def as_condition( - self, - field_alias: str, - operator: Op, - value: list[str] | str, - is_wildcard: bool = False, - ) -> Condition: - - if is_wildcard: - return self._filter_tag_by_wildcard_search(field_alias, value, operator) - - return self._filter_tag_by_value(field_alias, value, operator) - - def _filter_tag_by_wildcard_search(self, field_alias: str, value: str, operator: Op): - return Condition( - Function( - "arrayExists", - parameters=[ - Lambda( - ["tag_value"], _wildcard_search_function(value, Identifier("tag_value")) - ), - all_values_for_tag_key( - field_alias, Column(self.tag_key_alias), Column(self.tag_value_alias) - ), - ], - ), - operator, - 1, - ) - - def _filter_tag_by_value(self, key: str, values: list[str] | str, operator: Op) -> Condition: - """Helper function that allows filtering a tag by multiple values.""" - expected = 0 if operator not in (Op.EQ, Op.IN) else 1 - function = "hasAny" if isinstance(values, list) else "has" - return Condition( - Function( - function, - parameters=[ - all_values_for_tag_key( - key, Column(self.tag_key_alias), Column(self.tag_value_alias) - ), - values, - ], - ), - Op.EQ, - expected, - ) - - -class InvalidField(Field): - _operators = [Op.EQ, Op.NEQ, Op.IN, Op.NOT_IN] - _python_type = str - - def as_condition( - self, _: str, operator: Op, value: list[str] | str, is_wildcard: bool = False - ) -> Condition: - raise ParseError() - - def _wildcard_condition(self, operator: Op, value: str): - raise ParseError() - - def _has_condition( - self, - operator: Op, - value: list[str] | str, - ) -> Condition: - raise ParseError() - - def _has_any_condition( - self, - operator: Op, - values: list[str] | str, - ) -> Condition: - raise ParseError() - - -class QueryConfig: - def __init__(self, only: tuple[str] | None = None) -> None: - self.fields = {} - for field_name in only or self.__class__.__dict__: - field = getattr(self, field_name) - if isinstance(field, Field): - field.attribute_name = field_name - if field.field_alias: - self.insert(field.field_alias, field) - else: - self.insert(field_name, field) - - def get(self, field_name: str, default=None) -> Field: - return self.fields.get(field_name, default) - - def insert(self, field_name: str | None, value: Field) -> None: - if field_name is None: - return None - elif field_name in self.fields: - raise KeyError(f"Field already exists: {field_name}") - else: - self.fields[field_name] = value - - -# Implementation. - - -def generate_valid_conditions( - query: list[SearchFilter | ParenExpression | str], query_config: QueryConfig -) -> list[Expression]: - """Convert search filters to snuba conditions.""" - result: list[Expression] = [] - look_back = None - for search_filter in query: - # SearchFilters are appended to the result set. If they are top level filters they are - # implicitly And'ed in the WHERE/HAVING clause. - if isinstance(search_filter, SearchFilter): - condition = filter_to_condition(search_filter, query_config) - if look_back == "AND": - look_back = None - attempt_compressed_condition(result, condition, And) - elif look_back == "OR": - look_back = None - attempt_compressed_condition(result, condition, Or) - else: - result.append(condition) - # ParenExpressions are recursively computed. If more than one condition is returned then - # those conditions are And'ed. - elif isinstance(search_filter, ParenExpression): - conditions = generate_valid_conditions(search_filter.children, query_config) - if len(conditions) < 2: - result.extend(conditions) - else: - result.append(And(conditions)) - # String types are limited to AND and OR... I think? In the case where its not a valid - # look-back it is implicitly ignored. - elif isinstance(search_filter, str): - look_back = search_filter - - return result - - -def filter_to_condition(search_filter: SearchFilter, query_config: QueryConfig) -> Condition: - """Coerce SearchFilter syntax to snuba Condition syntax.""" - # Validate field exists and is filterable. - field_alias = search_filter.key.name - field = query_config.get(field_alias) - is_tag = field is None - - if is_tag: - field = query_config.get("*") - - if field is None: - raise ParseError(f"Invalid field specified: {field_alias}.") - if not field.is_filterable: - raise ParseError(f'"{field_alias}" is not filterable.') - - # Validate strategy is correct. - query_operator = search_filter.operator - operator, errors = field.deserialize_operator(query_operator) - if errors: - raise ParseError(f"Invalid operator specified: {field_alias}.") - - # Deserialize value to its correct type or error. - query_value = search_filter.value.value - value, errors = field.deserialize_value(query_value) - if errors: - raise ParseError(f"Invalid value specified: {field_alias}.") - - is_wildcard = search_filter.value.is_wildcard() - - if is_tag and field_alias.startswith("tags[") and field_alias.endswith("]"): - field_alias = field_alias[5:-1] - - return field.as_condition(field_alias, operator, value, is_wildcard) def attempt_compressed_condition( @@ -518,35 +25,6 @@ def attempt_compressed_condition( result.append(condition_type([result.pop(), condition])) -def get_valid_sort_commands( - sort: str | None, - default: OrderBy, - query_config: QueryConfig, -) -> list[OrderBy]: - if not sort: - return [default] - - if sort.startswith("-"): - strategy = Direction.DESC - field_name = sort[1:] - else: - strategy = Direction.ASC - field_name = sort - - field = query_config.get(field_name) - if not field: - raise ParseError(f"Invalid field specified: {field_name}.") - - if isinstance(field, InvalidField): - raise ParseError("field can't be used to sort query") - - else: - return [OrderBy(Column(field.query_alias or field.attribute_name), strategy)] - - -# Tag filtering behavior. - - def all_values_for_tag_key(key: str, tag_key_column: Column, tag_value_column: Column) -> Function: return Function( "arrayFilter", @@ -594,9 +72,6 @@ def _wildcard_search_function(value, identifier): ) -# Helpers - - def _transform_uuids(values: list[str]) -> list[str] | None: try: return [str(uuid.UUID(value)) for value in values] diff --git a/src/sentry/rules/actions/integrations/create_ticket/utils.py b/src/sentry/rules/actions/integrations/create_ticket/utils.py index 4d9476c8cb377c..67c2e826e802ce 100644 --- a/src/sentry/rules/actions/integrations/create_ticket/utils.py +++ b/src/sentry/rules/actions/integrations/create_ticket/utils.py @@ -9,9 +9,14 @@ from sentry.eventstore.models import GroupEvent from sentry.integrations.base import IntegrationInstallation from sentry.integrations.models.external_issue import ExternalIssue +from sentry.integrations.project_management.metrics import ( + ProjectManagementActionType, + ProjectManagementEvent, +) from sentry.integrations.services.integration.model import RpcIntegration from sentry.integrations.services.integration.service import integration_service from sentry.models.grouplink import GroupLink +from sentry.shared_integrations.exceptions import IntegrationFormError from sentry.silo.base import region_silo_function from sentry.types.rules import RuleFuture from sentry.utils import metrics @@ -114,27 +119,34 @@ def create_issue(event: GroupEvent, futures: Sequence[RuleFuture]) -> None: }, ) return - try: - response = installation.create_issue(data) - except Exception as e: - logger.info( - "%s.rule_trigger.create_ticket.failure", - provider, - extra={ - "rule_id": rule_id, - "provider": provider, - "integration_id": integration.id, - "error_message": str(e), - "exception_type": type(e).__name__, - }, - ) - metrics.incr( - f"{provider}.rule_trigger.create_ticket.failure", - tags={ - "provider": provider, - }, - ) - raise + with ProjectManagementEvent( + action_type=ProjectManagementActionType.CREATE_EXTERNAL_ISSUE, + integration=integration, + ).capture() as lifecycle: + lifecycle.add_extra("provider", provider) + lifecycle.add_extra("integration_id", integration.id) + lifecycle.add_extra("rule_id", rule_id) + + try: + response = installation.create_issue(data) + except Exception as e: + if isinstance(e, IntegrationFormError): + # Most of the time, these aren't explicit failures, they're + # some misconfiguration of an issue field - typically Jira. + lifecycle.record_halt(str(e)) + else: + # Don't pass the full exception here, as it can contain a + # massive request response along with its stacktrace + lifecycle.record_failure(str(e)) + + metrics.incr( + f"{provider}.rule_trigger.create_ticket.failure", + tags={ + "provider": provider, + }, + ) + + raise create_link(integration, installation, event, response) diff --git a/src/sentry/rules/conditions/event_attribute.py b/src/sentry/rules/conditions/event_attribute.py index e5fb5699a1707b..0c630c418ead2f 100644 --- a/src/sentry/rules/conditions/event_attribute.py +++ b/src/sentry/rules/conditions/event_attribute.py @@ -63,8 +63,8 @@ def _handle(cls, path: list[str], event: GroupEvent) -> list[str]: "stacktrace.package": Columns.STACK_PACKAGE, "unreal.crashtype": Columns.UNREAL_CRASH_TYPE, "app.in_foreground": Columns.APP_IN_FOREGROUND, - "os.distribution.name": Columns.OS_DISTRIBUTION_NAME, - "os.distribution.version": Columns.OS_DISTRIBUTION_VERSION, + "os.distribution_name": Columns.OS_DISTRIBUTION_NAME, + "os.distribution_version": Columns.OS_DISTRIBUTION_VERSION, } @@ -418,21 +418,14 @@ def _handle(cls, path: list[str], event: GroupEvent) -> list[str]: @attribute_registry.register("os") class OsAttributeHandler(AttributeHandler): - minimum_path_length = 3 + minimum_path_length = 2 @classmethod def _handle(cls, path: list[str], event: GroupEvent) -> list[str]: - if path[1] in ("distribution"): - if path[2] in ("name", "version"): - contexts = event.data.get("contexts", {}) - os_context = contexts.get("os") - if os_context is None: - os_context = {} - - distribution = os_context.get(path[1]) - if distribution is None: - distribution = {} - - return [distribution.get(path[2])] - return [] + if path[1] in ("distribution_name", "distribution_version"): + contexts = event.data.get("contexts", {}) + os_context = contexts.get("os") + if os_context is None: + os_context = {} + return [os_context.get(path[1])] return [] diff --git a/src/sentry/rules/processing/delayed_processing.py b/src/sentry/rules/processing/delayed_processing.py index 67c01edcab890d..6e24c0226b5d3c 100644 --- a/src/sentry/rules/processing/delayed_processing.py +++ b/src/sentry/rules/processing/delayed_processing.py @@ -207,17 +207,17 @@ def bulk_fetch_events(event_ids: list[str], project_id: int) -> dict[str, Event] def parse_rulegroup_to_event_data( rulegroup_to_event_data: dict[str, str] -) -> dict[tuple[str, str], dict[str, str]]: +) -> dict[tuple[int, int], dict[str, str]]: parsed_rulegroup_to_event_data = {} for rule_group, instance_data in rulegroup_to_event_data.items(): event_data = json.loads(instance_data) rule_id, group_id = rule_group.split(":") - parsed_rulegroup_to_event_data[(rule_id, group_id)] = event_data + parsed_rulegroup_to_event_data[(int(rule_id), int(group_id))] = event_data return parsed_rulegroup_to_event_data def build_group_to_groupevent( - parsed_rulegroup_to_event_data: dict[tuple[str, str], dict[str, str]], + parsed_rulegroup_to_event_data: dict[tuple[int, int], dict[str, str]], bulk_event_id_to_events: dict[str, Event], bulk_occurrence_id_to_occurrence: dict[str, IssueOccurrence], group_id_to_group: dict[int, Group], @@ -258,7 +258,7 @@ def build_group_to_groupevent( def get_group_to_groupevent( - parsed_rulegroup_to_event_data: dict[tuple[str, str], dict[str, str]], + parsed_rulegroup_to_event_data: dict[tuple[int, int], dict[str, str]], project_id: int, group_ids: set[int], ) -> dict[Group, GroupEvent]: @@ -413,7 +413,7 @@ def get_rules_to_fire( def fire_rules( rules_to_fire: DefaultDict[Rule, set[int]], - parsed_rulegroup_to_event_data: dict[tuple[str, str], dict[str, str]], + parsed_rulegroup_to_event_data: dict[tuple[int, int], dict[str, str]], alert_rules: list[Rule], project: Project, ) -> None: diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py index ce55cc8a3e7902..5a2b9a947a8f4a 100644 --- a/src/sentry/runner/commands/devserver.py +++ b/src/sentry/runner/commands/devserver.py @@ -37,6 +37,7 @@ "transactions-subscription-results", "generic-metrics-subscription-results", "metrics-subscription-results", + "eap-spans-subscription-results", ] @@ -353,7 +354,10 @@ def devserver( # Create all topics if the Kafka eventstream is selected if kafka_consumers: - if "sentry_kafka" not in containers and "shared-kafka-kafka-1" not in containers: + kafka_container_name = ( + "kafka-kafka-1" if os.environ.get("USE_NEW_DEVSERVICES") == "1" else "sentry_kafka" + ) + if kafka_container_name not in containers: raise click.ClickException( f""" Devserver is configured to start some kafka consumers, but Kafka diff --git a/src/sentry/runner/commands/openai.py b/src/sentry/runner/commands/openai.py deleted file mode 100644 index 2152d8c894d555..00000000000000 --- a/src/sentry/runner/commands/openai.py +++ /dev/null @@ -1,29 +0,0 @@ -# The sentry utils json cannot pretty print -import json # noqa: S003 -from typing import IO - -import click - - -@click.command("openai") -@click.option("--event", type=click.File("r")) -@click.option("--model", default="gpt-3.5-turbo") -@click.option("--dump-prompt", is_flag=True) -def openai(event: IO[str], model: str, dump_prompt: bool) -> None: - """ - Runs the OpenAI assistent against a JSON event payload. - """ - from sentry.runner import configure - - configure() - - from sentry.api.endpoints.event_ai_suggested_fix import describe_event_for_ai, suggest_fix - - event_data = json.load(event) - if dump_prompt: - click.echo(json.dumps(describe_event_for_ai(event_data, model=model), indent=2)) - else: - resp = suggest_fix(event_data, stream=True, model=model) - for chunk in resp: - click.echo(chunk, nl=False) - click.echo() diff --git a/src/sentry/runner/commands/run.py b/src/sentry/runner/commands/run.py index 1c17cbf8d85712..73814b5e4e77d6 100644 --- a/src/sentry/runner/commands/run.py +++ b/src/sentry/runner/commands/run.py @@ -237,6 +237,7 @@ def worker(ignore_unknown_queues: bool, **options: Any) -> None: run_worker(**options) +@run.command() @click.option("--rpc-host", help="The hostname for the taskworker-rpc", default="127.0.0.1:50051") @click.option("--autoreload", is_flag=True, default=False, help="Enable autoreloading.") @click.option( @@ -253,6 +254,53 @@ def taskworker(rpc_host: str, max_task_count: int, **options: Any) -> None: raise SystemExit(exitcode) +@run.command() +@log_options() +@configuration +@click.option( + "--repeat", + type=int, + help="Number of messages to send to the kafka topic", + default=1, + show_default=True, +) +@click.option( + "--kwargs", + type=str, + help="Task function keyword arguments", +) +@click.option( + "--args", + type=str, + help="Task function arguments", +) +@click.option( + "--task-function-path", + type=str, + help="The path to the function name of the task to execute", + required=True, +) +def taskbroker_send_tasks( + task_function_path: str, + args: str, + kwargs: str, + repeat: int, +) -> None: + from sentry.utils.imports import import_string + + try: + func = import_string(task_function_path) + except Exception as e: + click.echo(f"Error: {e}") + raise click.Abort() + task_args = [] if not args else eval(args) + task_kwargs = {} if not kwargs else eval(kwargs) + + for _ in range(repeat): + func.delay(*task_args, **task_kwargs) + click.echo(message=f"Successfully sent {repeat} messages.") + + @run.command() @click.option( "--pidfile", diff --git a/src/sentry/runner/main.py b/src/sentry/runner/main.py index a24249045386f8..2c3517b94945ae 100644 --- a/src/sentry/runner/main.py +++ b/src/sentry/runner/main.py @@ -68,7 +68,6 @@ def cli(config: str) -> None: "sentry.runner.commands.performance.performance", "sentry.runner.commands.spans.spans", "sentry.runner.commands.spans.write_hashes", - "sentry.runner.commands.openai.openai", "sentry.runner.commands.llm.llm", "sentry.runner.commands.workstations.workstations", ), diff --git a/src/sentry/search/eap/columns.py b/src/sentry/search/eap/columns.py index 7664d981d5106c..58245dc36b872e 100644 --- a/src/sentry/search/eap/columns.py +++ b/src/sentry/search/eap/columns.py @@ -1,10 +1,13 @@ from collections.abc import Callable from dataclasses import dataclass -from typing import Any +from datetime import datetime +from typing import Any, Literal +from dateutil.tz import tz from sentry_protos.snuba.v1.trace_item_attribute_pb2 import ( AttributeAggregation, AttributeKey, + ExtrapolationMode, Function, VirtualColumnContext, ) @@ -16,16 +19,14 @@ from sentry.utils.validators import is_event_id, is_span_id -@dataclass(frozen=True) -class ResolvedColumn: +@dataclass(frozen=True, kw_only=True) +class ResolvedAttribute: # The alias for this column public_alias: ( str # `p95() as foo` has the public alias `foo` and `p95()` has the public alias `p95()` ) - # The internal rpc alias for this column - internal_name: str | Function.ValueType # The public type for this column - search_type: str + search_type: constants.SearchType # The internal rpc type for this column, optional as it can mostly be inferred from search_type internal_type: AttributeKey.Type.ValueType | None = None # Only for aggregates, we only support functions with 1 argument right now @@ -34,6 +35,9 @@ class ResolvedColumn: processor: Callable[[Any], Any] | None = None # Validator to check if the value in a query is correct validator: Callable[[Any], bool] | None = None + # Indicates this attribute is a secondary alias for the attribute. + # It exists for compatibility or convenience reasons and should NOT be preferred. + secondary_alias: bool = False def process_column(self, value: Any) -> Any: """Given the value from results, return a processed value if a processor is defined otherwise return it""" @@ -46,21 +50,6 @@ def validate(self, value: Any) -> None: if not self.validator(value): raise InvalidSearchQuery(f"{value} is an invalid value for {self.public_alias}") - @property - def proto_definition(self) -> AttributeAggregation | AttributeKey: - """The definition of this function as needed by the RPC""" - if isinstance(self.internal_name, Function.ValueType): - return AttributeAggregation( - aggregate=self.internal_name, - key=self.argument, - label=self.public_alias, - ) - else: - return AttributeKey( - name=self.internal_name, - type=self.proto_type, - ) - @property def proto_type(self) -> AttributeKey.Type.ValueType: """The proto's AttributeKey type for this column""" @@ -69,20 +58,24 @@ def proto_type(self) -> AttributeKey.Type.ValueType: else: return constants.TYPE_MAP[self.search_type] + +@dataclass(frozen=True, kw_only=True) +class ResolvedColumn(ResolvedAttribute): + # The internal rpc alias for this column + internal_name: str + @property - def meta_type(self) -> str: - """This column's type for the meta response from the API""" - if self.search_type == "duration": - return "duration" - elif self.search_type == "number": - return "integer" - else: - return self.search_type + def proto_definition(self) -> AttributeKey: + """The definition of this function as needed by the RPC""" + return AttributeKey( + name=self.internal_name, + type=self.proto_type, + ) @dataclass class ArgumentDefinition: - argument_types: list[str] | None = None + argument_types: list[constants.SearchType] | None = None # The public alias for the default arg, the SearchResolver will resolve this value default_arg: str | None = None # Whether this argument is completely ignored, used for `count()` @@ -92,24 +85,74 @@ class ArgumentDefinition: @dataclass class FunctionDefinition: internal_function: Function.ValueType - # the search_type the argument should be + # The list of arguments for this function arguments: list[ArgumentDefinition] - # The public type for this column - search_type: str + # The search_type the argument should be the default type for this column + default_search_type: constants.SearchType + # Try to infer the search type from the function arguments + infer_search_type_from_arguments: bool = True # The internal rpc type for this function, optional as it can mostly be inferred from search_type internal_type: AttributeKey.Type.ValueType | None = None # Processor is the function run in the post process step to transform a row into the final result processor: Callable[[Any], Any] | None = None + # Whether to request extrapolation or not, should be true for all functions except for _sample functions for debugging + extrapolation: bool = True @property def required_arguments(self) -> list[ArgumentDefinition]: return [arg for arg in self.arguments if arg.default_arg is None and not arg.ignored] +@dataclass(frozen=True, kw_only=True) +class ResolvedFunction(ResolvedAttribute): + # The internal rpc alias for this column + internal_name: Function.ValueType + # Whether to enable extrapolation + extrapolation: bool = True + + @property + def proto_definition(self) -> AttributeAggregation: + """The definition of this function as needed by the RPC""" + return AttributeAggregation( + aggregate=self.internal_name, + key=self.argument, + label=self.public_alias, + extrapolation_mode=( + ExtrapolationMode.EXTRAPOLATION_MODE_SAMPLE_WEIGHTED + if self.extrapolation + else ExtrapolationMode.EXTRAPOLATION_MODE_NONE + ), + ) + + @property + def proto_type(self) -> AttributeKey.Type.ValueType: + """The rpc always returns functions as floats, especially count() even though it should be an integer + + see: https://www.notion.so/sentry/Should-count-return-an-int-in-the-v1-RPC-API-1348b10e4b5d80498bfdead194cc304e + """ + return constants.FLOAT + + def simple_sentry_field(field) -> ResolvedColumn: """For a good number of fields, the public alias matches the internal alias - This helper functions makes defining them easier""" - return ResolvedColumn(field, f"sentry.{field}", "string") + without the `sentry.` suffix. This helper functions makes defining them easier""" + return ResolvedColumn(public_alias=field, internal_name=f"sentry.{field}", search_type="string") + + +def simple_measurements_field( + field, search_type: constants.SearchType = "number" +) -> ResolvedColumn: + """For a good number of fields, the public alias matches the internal alias + with the `measurements.` prefix. This helper functions makes defining them easier""" + return ResolvedColumn( + public_alias=f"measurements.{field}", + internal_name=field, + search_type=search_type, + ) + + +def datetime_processor(datetime_string: str) -> str: + return datetime.fromisoformat(datetime_string).replace(tzinfo=tz.tzutc()).isoformat() SPAN_COLUMN_DEFINITIONS = { @@ -123,7 +166,7 @@ def simple_sentry_field(field) -> ResolvedColumn: ), ResolvedColumn( public_alias="parent_span", - internal_name="sentry.sentry,parent_span_id", + internal_name="sentry.parent_span_id", search_type="string", validator=is_span_id, ), @@ -143,6 +186,7 @@ def simple_sentry_field(field) -> ResolvedColumn: internal_name="sentry.project_id", internal_type=constants.INT, search_type="string", + secondary_alias=True, ), ResolvedColumn( public_alias="span.action", @@ -158,12 +202,14 @@ def simple_sentry_field(field) -> ResolvedColumn: public_alias="description", internal_name="sentry.name", search_type="string", + secondary_alias=True, ), # Message maps to description, this is to allow wildcard searching ResolvedColumn( public_alias="message", internal_name="sentry.name", search_type="string", + secondary_alias=True, ), ResolvedColumn( public_alias="span.domain", @@ -188,12 +234,12 @@ def simple_sentry_field(field) -> ResolvedColumn: ResolvedColumn( public_alias="span.self_time", internal_name="sentry.exclusive_time_ms", - search_type="duration", + search_type="millisecond", ), ResolvedColumn( public_alias="span.duration", internal_name="sentry.duration_ms", - search_type="duration", + search_type="millisecond", ), ResolvedColumn( public_alias="span.status", @@ -210,7 +256,16 @@ def simple_sentry_field(field) -> ResolvedColumn: public_alias="transaction", internal_name="sentry.segment_name", search_type="string", - validator=is_event_id, + ), + ResolvedColumn( + public_alias="transaction.span_id", + internal_name="sentry.segment_id", + search_type="string", + ), + ResolvedColumn( + public_alias="profile.id", + internal_name="sentry.profile_id", + search_type="string", ), ResolvedColumn( public_alias="replay.id", @@ -232,15 +287,45 @@ def simple_sentry_field(field) -> ResolvedColumn: internal_name="ai.total_cost", search_type="number", ), + ResolvedColumn( + public_alias="http.decoded_response_content_length", + internal_name="http.decoded_response_content_length", + search_type="byte", + ), + ResolvedColumn( + public_alias="http.response_content_length", + internal_name="http.response_content_length", + search_type="byte", + ), + ResolvedColumn( + public_alias="http.response_transfer_size", + internal_name="http.response_transfer_size", + search_type="byte", + ), + ResolvedColumn( + public_alias="sampling_rate", + internal_name="sentry.sampling_factor", + search_type="percentage", + ), + ResolvedColumn( + public_alias="timestamp", + internal_name="sentry.timestamp", + search_type="string", + processor=datetime_processor, + ), simple_sentry_field("browser.name"), + simple_sentry_field("environment"), simple_sentry_field("messaging.destination.name"), simple_sentry_field("messaging.message.id"), + simple_sentry_field("platform"), simple_sentry_field("release"), simple_sentry_field("sdk.name"), + simple_sentry_field("sdk.version"), simple_sentry_field("span.status_code"), simple_sentry_field("span_id"), simple_sentry_field("trace.status"), simple_sentry_field("transaction.method"), + simple_sentry_field("transaction.op"), simple_sentry_field("user"), simple_sentry_field("user.email"), simple_sentry_field("user.geo.country_code"), @@ -248,10 +333,76 @@ def simple_sentry_field(field) -> ResolvedColumn: simple_sentry_field("user.id"), simple_sentry_field("user.ip"), simple_sentry_field("user.username"), + simple_measurements_field("app_start_cold", "millisecond"), + simple_measurements_field("app_start_warm", "millisecond"), + simple_measurements_field("frames_frozen"), + simple_measurements_field("frames_frozen_rate", "percentage"), + simple_measurements_field("frames_slow"), + simple_measurements_field("frames_slow_rate", "percentage"), + simple_measurements_field("frames_total"), + simple_measurements_field("time_to_initial_display", "millisecond"), + simple_measurements_field("time_to_full_display", "millisecond"), + simple_measurements_field("stall_count"), + simple_measurements_field("stall_percentage", "percentage"), + simple_measurements_field("stall_stall_longest_time"), + simple_measurements_field("stall_stall_total_time"), + simple_measurements_field("cls"), + simple_measurements_field("fcp", "millisecond"), + simple_measurements_field("fid", "millisecond"), + simple_measurements_field("fp", "millisecond"), + simple_measurements_field("inp", "millisecond"), + simple_measurements_field("lcp", "millisecond"), + simple_measurements_field("ttfb", "millisecond"), + simple_measurements_field("ttfb.requesttime", "millisecond"), + simple_measurements_field("score.cls"), + simple_measurements_field("score.fcp"), + simple_measurements_field("score.fid"), + simple_measurements_field("score.fp"), + simple_measurements_field("score.inp"), + simple_measurements_field("score.lcp"), + simple_measurements_field("score.ttfb"), + simple_measurements_field("score.total"), + simple_measurements_field("score.weight.cls"), + simple_measurements_field("score.weight.fcp"), + simple_measurements_field("score.weight.fid"), + simple_measurements_field("score.weight.fp"), + simple_measurements_field("score.weight.inp"), + simple_measurements_field("score.weight.lcp"), + simple_measurements_field("score.weight.ttfb"), + simple_measurements_field("cache.item_size"), + simple_measurements_field("messaging.message.body.size"), + simple_measurements_field("messaging.message.receive.latency"), + simple_measurements_field("messaging.message.retry.count"), ] } +INTERNAL_TO_PUBLIC_ALIAS_MAPPINGS: dict[Literal["string", "number"], dict[str, str]] = { + "string": { + definition.internal_name: definition.public_alias + for definition in SPAN_COLUMN_DEFINITIONS.values() + if not definition.secondary_alias and definition.search_type == "string" + } + | { + # sentry.service is the project id as a string, but map to project for convenience + "sentry.service": "project", + }, + "number": { + definition.internal_name: definition.public_alias + for definition in SPAN_COLUMN_DEFINITIONS.values() + if not definition.secondary_alias and definition.search_type != "string" + }, +} + + +def translate_internal_to_public_alias( + internal_alias: str, + type: Literal["string", "number"], +) -> str | None: + mappings = INTERNAL_TO_PUBLIC_ALIAS_MAPPINGS.get(type, {}) + return mappings.get(internal_alias) + + def project_context_constructor(column_name: str) -> Callable[[SnubaParams], VirtualColumnContext]: def context_constructor(params: SnubaParams) -> VirtualColumnContext: return VirtualColumnContext( @@ -290,77 +441,152 @@ def device_class_context_constructor(params: SnubaParams) -> VirtualColumnContex SPAN_FUNCTION_DEFINITIONS = { "sum": FunctionDefinition( internal_function=Function.FUNCTION_SUM, - search_type="duration", + default_search_type="duration", arguments=[ - ArgumentDefinition(argument_types=["duration", "number"], default_arg="span.duration") + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number"], + default_arg="span.duration", + ) ], ), "avg": FunctionDefinition( internal_function=Function.FUNCTION_AVERAGE, - search_type="duration", + default_search_type="duration", + arguments=[ + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number", "percentage"], + default_arg="span.duration", + ) + ], + ), + "avg_sample": FunctionDefinition( + internal_function=Function.FUNCTION_AVERAGE, + default_search_type="duration", arguments=[ - ArgumentDefinition(argument_types=["duration", "number"], default_arg="span.duration") + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number", "percentage"], + default_arg="span.duration", + ) ], + extrapolation=False, ), "count": FunctionDefinition( internal_function=Function.FUNCTION_COUNT, - search_type="number", + infer_search_type_from_arguments=False, + default_search_type="integer", + arguments=[ + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number"], + default_arg="span.duration", + ) + ], + ), + "count_sample": FunctionDefinition( + internal_function=Function.FUNCTION_COUNT, + infer_search_type_from_arguments=False, + default_search_type="integer", arguments=[ - ArgumentDefinition(argument_types=["duration", "number"], default_arg="span.duration") + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number"], + default_arg="span.duration", + ) ], + extrapolation=False, ), "p50": FunctionDefinition( internal_function=Function.FUNCTION_P50, - search_type="duration", + default_search_type="duration", + arguments=[ + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number"], + default_arg="span.duration", + ) + ], + ), + "p50_sample": FunctionDefinition( + internal_function=Function.FUNCTION_P50, + default_search_type="duration", + arguments=[ + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number"], + default_arg="span.duration", + ) + ], + extrapolation=False, + ), + "p75": FunctionDefinition( + internal_function=Function.FUNCTION_P75, + default_search_type="duration", arguments=[ - ArgumentDefinition(argument_types=["duration", "number"], default_arg="span.duration") + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number"], + default_arg="span.duration", + ) ], ), "p90": FunctionDefinition( internal_function=Function.FUNCTION_P90, - search_type="duration", + default_search_type="duration", arguments=[ - ArgumentDefinition(argument_types=["duration", "number"], default_arg="span.duration") + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number"], + default_arg="span.duration", + ) ], ), "p95": FunctionDefinition( internal_function=Function.FUNCTION_P95, - search_type="duration", + default_search_type="duration", arguments=[ - ArgumentDefinition(argument_types=["duration", "number"], default_arg="span.duration") + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number"], + default_arg="span.duration", + ) ], ), "p99": FunctionDefinition( internal_function=Function.FUNCTION_P99, - search_type="duration", + default_search_type="duration", arguments=[ - ArgumentDefinition(argument_types=["duration", "number"], default_arg="span.duration") + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number"], + default_arg="span.duration", + ) ], ), "p100": FunctionDefinition( internal_function=Function.FUNCTION_MAX, - search_type="duration", + default_search_type="duration", arguments=[ - ArgumentDefinition(argument_types=["duration", "number"], default_arg="span.duration") + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number"], + default_arg="span.duration", + ) ], ), "max": FunctionDefinition( internal_function=Function.FUNCTION_MAX, - search_type="duration", + default_search_type="duration", arguments=[ - ArgumentDefinition(argument_types=["duration", "number"], default_arg="span.duration") + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number", "percentage"], + default_arg="span.duration", + ) ], ), "min": FunctionDefinition( internal_function=Function.FUNCTION_MIN, - search_type="duration", + default_search_type="duration", arguments=[ - ArgumentDefinition(argument_types=["duration", "number"], default_arg="span.duration") + ArgumentDefinition( + argument_types=["byte", "duration", "millisecond", "number", "percentage"], + default_arg="span.duration", + ) ], ), "count_unique": FunctionDefinition( internal_function=Function.FUNCTION_UNIQ, - search_type="number", + default_search_type="number", arguments=[ ArgumentDefinition( argument_types=["string"], diff --git a/src/sentry/search/eap/constants.py b/src/sentry/search/eap/constants.py index 860b0481c1005b..f1968b47dea9a1 100644 --- a/src/sentry/search/eap/constants.py +++ b/src/sentry/search/eap/constants.py @@ -1,3 +1,5 @@ +from typing import Literal + from sentry_protos.snuba.v1.trace_item_attribute_pb2 import AttributeKey from sentry_protos.snuba.v1.trace_item_filter_pb2 import ComparisonFilter @@ -13,6 +15,16 @@ } IN_OPERATORS = ["IN", "NOT IN"] +SearchType = Literal[ + "byte", + "duration", + "integer", + "millisecond", + "number", + "percentage", + "string", +] + STRING = AttributeKey.TYPE_STRING BOOLEAN = AttributeKey.TYPE_BOOLEAN FLOAT = AttributeKey.TYPE_FLOAT @@ -20,9 +32,13 @@ # TODO: we need a datetime type # Maps search types back to types for the proto -TYPE_MAP = { +TYPE_MAP: dict[SearchType, AttributeKey.Type.ValueType] = { + "byte": FLOAT, + "duration": FLOAT, + "integer": INT, + "millisecond": FLOAT, # TODO: need to update these to float once the proto supports float arrays "number": INT, - "duration": FLOAT, + "percentage": FLOAT, "string": STRING, } diff --git a/src/sentry/search/eap/spans.py b/src/sentry/search/eap/spans.py index d47d4bdb77b934..e1b477bfc528fb 100644 --- a/src/sentry/search/eap/spans.py +++ b/src/sentry/search/eap/spans.py @@ -29,6 +29,7 @@ SPAN_FUNCTION_DEFINITIONS, VIRTUAL_CONTEXTS, ResolvedColumn, + ResolvedFunction, ) from sentry.search.eap.types import SearchResolverConfig from sentry.search.events import constants as qb_constants @@ -46,7 +47,12 @@ class SearchResolver: params: SnubaParams config: SearchResolverConfig - resolved_columns: dict[str, ResolvedColumn] = field(default_factory=dict) + _resolved_attribute_cache: dict[str, tuple[ResolvedColumn, VirtualColumnContext | None]] = ( + field(default_factory=dict) + ) + _resolved_function_cache: dict[str, tuple[ResolvedFunction, VirtualColumnContext | None]] = ( + field(default_factory=dict) + ) def resolve_meta(self, referrer: str) -> RequestMeta: if self.params.organization_id is None: @@ -59,8 +65,59 @@ def resolve_meta(self, referrer: str) -> RequestMeta: end_timestamp=self.params.rpc_end_date, ) - def resolve_query(self, querystring: str) -> TraceItemFilter | None: + def resolve_query(self, querystring: str | None) -> TraceItemFilter | None: """Given a query string in the public search syntax eg. `span.description:foo` construct the TraceItemFilter""" + environment_query = self.__resolve_environment_query() + query = self.__resolve_query(querystring) + + # The RPC request meta does not contain the environment. + # So we have to inject it as a query condition. + # + # To do so, we want to AND it with the query. + # So if either one is not defined, we just use the other. + # But if both are defined, we AND them together. + + if not environment_query: + return query + + if not query: + return environment_query + + return TraceItemFilter( + and_filter=AndFilter( + filters=[ + environment_query, + query, + ] + ) + ) + + def __resolve_environment_query(self) -> TraceItemFilter | None: + resolved_column, _ = self.resolve_column("environment") + if not isinstance(resolved_column.proto_definition, AttributeKey): + return None + + # TODO: replace this with an IN condition when the RPC supports it + filters = [ + TraceItemFilter( + comparison_filter=ComparisonFilter( + key=resolved_column.proto_definition, + op=ComparisonFilter.OP_EQUALS, + value=AttributeValue(val_str=environment.name), + ) + ) + for environment in self.params.environments + if environment is not None + ] + + if not filters: + return None + + return TraceItemFilter(and_filter=AndFilter(filters=filters)) + + def __resolve_query(self, querystring: str | None) -> TraceItemFilter | None: + if querystring is None: + return None try: parsed_terms = event_search.parse_search_query( querystring, @@ -158,44 +215,7 @@ def _resolve_terms(self, terms: event_filter.ParsedTerms) -> TraceItemFilter | N parsed_terms = [] for item in terms: if isinstance(item, event_search.SearchFilter): - resolved_column, context = self.resolve_column(item.key.name) - raw_value = item.value.raw_value - if item.value.is_wildcard(): - if item.operator == "=": - operator = ComparisonFilter.OP_LIKE - elif item.operator == "!=": - operator = ComparisonFilter.OP_NOT_LIKE - else: - raise InvalidSearchQuery( - f"Cannot use a wildcard with a {item.operator} filter" - ) - # Slashes have to be double escaped so they are - # interpreted as a string literal. - raw_value = ( - str(item.value.raw_value) - .replace("\\", "\\\\") - .replace("%", "\\%") - .replace("_", "\\_") - .replace("*", "%") - ) - elif item.operator in constants.OPERATOR_MAP: - operator = constants.OPERATOR_MAP[item.operator] - else: - raise InvalidSearchQuery(f"Unknown operator: {item.operator}") - if isinstance(resolved_column.proto_definition, AttributeKey): - parsed_terms.append( - TraceItemFilter( - comparison_filter=ComparisonFilter( - key=resolved_column.proto_definition, - op=operator, - value=self._resolve_search_value( - resolved_column, item.operator, raw_value - ), - ) - ) - ) - else: - raise NotImplementedError("Can't filter on aggregates yet") + parsed_terms.append(self.resolve_term(cast(event_search.SearchFilter, item))) else: if self.config.use_aggregate_conditions: raise NotImplementedError("Can't filter on aggregates yet") @@ -207,6 +227,40 @@ def _resolve_terms(self, terms: event_filter.ParsedTerms) -> TraceItemFilter | N else: return None + def resolve_term(self, term: event_search.SearchFilter) -> TraceItemFilter: + resolved_column, context = self.resolve_column(term.key.name) + raw_value = term.value.raw_value + if term.value.is_wildcard(): + if term.operator == "=": + operator = ComparisonFilter.OP_LIKE + elif term.operator == "!=": + operator = ComparisonFilter.OP_NOT_LIKE + else: + raise InvalidSearchQuery(f"Cannot use a wildcard with a {term.operator} filter") + # Slashes have to be double escaped so they are + # interpreted as a string literal. + raw_value = ( + str(term.value.raw_value) + .replace("\\", "\\\\") + .replace("%", "\\%") + .replace("_", "\\_") + .replace("*", "%") + ) + elif term.operator in constants.OPERATOR_MAP: + operator = constants.OPERATOR_MAP[term.operator] + else: + raise InvalidSearchQuery(f"Unknown operator: {term.operator}") + if isinstance(resolved_column.proto_definition, AttributeKey): + return TraceItemFilter( + comparison_filter=ComparisonFilter( + key=resolved_column.proto_definition, + op=operator, + value=self._resolve_search_value(resolved_column, term.operator, raw_value), + ) + ) + else: + raise NotImplementedError("Can't filter on aggregates yet") + def _resolve_search_value( self, column: ResolvedColumn, @@ -237,7 +291,7 @@ def _resolve_search_value( raise InvalidSearchQuery( f"{value} is not a valid value for doing an IN filter" ) - elif isinstance(value, float): + elif isinstance(value, (float, int)): return AttributeValue(val_int=int(value)) elif column_type == constants.FLOAT: if operator in constants.IN_OPERATORS: @@ -273,7 +327,7 @@ def clean_contexts( def resolve_columns( self, selected_columns: list[str] - ) -> tuple[list[ResolvedColumn], list[VirtualColumnContext]]: + ) -> tuple[list[ResolvedColumn | ResolvedFunction], list[VirtualColumnContext]]: """Given a list of columns resolve them and get their context if applicable This function will also dedupe the virtual column contexts if necessary @@ -305,7 +359,7 @@ def resolve_columns( def resolve_column( self, column: str, match: Match | None = None - ) -> tuple[ResolvedColumn, VirtualColumnContext | None]: + ) -> tuple[ResolvedColumn | ResolvedFunction, VirtualColumnContext | None]: """Column is either an attribute or an aggregate, this function will determine which it is and call the relevant resolve function""" match = fields.is_function(column) @@ -314,10 +368,6 @@ def resolve_column( else: return self.resolve_attribute(column) - # TODO: Cache the column - # self.resolved_coluumn[alias] = ResolvedColumn() - # return ResolvedColumn() - def get_field_type(self, column: str) -> str: resolved_column, _ = self.resolve_column(column) return resolved_column.search_type @@ -338,6 +388,8 @@ def resolve_attribute(self, column: str) -> tuple[ResolvedColumn, VirtualColumnC """Attributes are columns that aren't 'functions' or 'aggregates', usually this means string or numeric attributes (aka. tags), but can also refer to fields like span.description""" # If a virtual context is defined the column definition is always the same + if column in self._resolved_attribute_cache: + return self._resolved_attribute_cache[column] if column in VIRTUAL_CONTEXTS: column_context = VIRTUAL_CONTEXTS[column](self.params) column_definition = ResolvedColumn( @@ -368,19 +420,21 @@ def resolve_attribute(self, column: str) -> tuple[ResolvedColumn, VirtualColumnC if field_type not in constants.TYPE_MAP: raise InvalidSearchQuery(f"Unsupported type {field_type} in {column}") + search_type = cast(constants.SearchType, field_type) column_definition = ResolvedColumn( - public_alias=column, internal_name=field, search_type=field_type + public_alias=column, internal_name=field, search_type=search_type ) column_context = None if column_definition: - return column_definition, column_context + self._resolved_attribute_cache[column] = (column_definition, column_context) + return self._resolved_attribute_cache[column] else: raise InvalidSearchQuery(f"Could not parse {column}") def resolve_aggregates( self, columns: list[str] - ) -> tuple[list[ResolvedColumn], list[VirtualColumnContext | None]]: + ) -> tuple[list[ResolvedFunction], list[VirtualColumnContext | None]]: """Helper function to resolve a list of aggregates instead of 1 attribute at a time""" resolved_aggregates, resolved_contexts = [], [] for column in columns: @@ -391,7 +445,9 @@ def resolve_aggregates( def resolve_aggregate( self, column: str, match: Match | None = None - ) -> tuple[ResolvedColumn, VirtualColumnContext | None]: + ) -> tuple[ResolvedFunction, VirtualColumnContext | None]: + if column in self._resolved_function_cache: + return self._resolved_function_cache[column] # Check if this is a valid function, parse the function name and args out if match is None: match = fields.is_function(column) @@ -441,23 +497,29 @@ def resolve_aggregate( # Proto doesn't support anything more than 1 argument yet if len(parsed_columns) > 1: raise InvalidSearchQuery("Cannot use more than one argument") - elif len(parsed_columns) == 1: - resolved_argument = ( - parsed_columns[0].proto_definition - if isinstance(parsed_columns[0].proto_definition, AttributeKey) - else None + elif len(parsed_columns) == 1 and isinstance( + parsed_columns[0].proto_definition, AttributeKey + ): + parsed_column = parsed_columns[0] + resolved_argument = parsed_column.proto_definition + search_type = ( + parsed_column.search_type + if function_definition.infer_search_type_from_arguments + else function_definition.default_search_type ) else: resolved_argument = None - - return ( - ResolvedColumn( - public_alias=alias, - internal_name=function_definition.internal_function, - search_type=function_definition.search_type, - internal_type=function_definition.internal_type, - processor=function_definition.processor, - argument=resolved_argument, - ), - None, + search_type = function_definition.default_search_type + + resolved_function = ResolvedFunction( + public_alias=alias, + internal_name=function_definition.internal_function, + search_type=search_type, + internal_type=function_definition.internal_type, + processor=function_definition.processor, + extrapolation=function_definition.extrapolation, + argument=resolved_argument, ) + resolved_context = None + self._resolved_function_cache[column] = (resolved_function, resolved_context) + return self._resolved_function_cache[column] diff --git a/src/sentry/search/eap/types.py b/src/sentry/search/eap/types.py index 3650bca03d4e59..949c23974678dc 100644 --- a/src/sentry/search/eap/types.py +++ b/src/sentry/search/eap/types.py @@ -1,4 +1,9 @@ from dataclasses import dataclass +from typing import Literal + +from sentry_protos.snuba.v1.trace_item_attribute_pb2 import Reliability + +from sentry.search.events.types import EventsResponse @dataclass(frozen=True) @@ -10,3 +15,15 @@ class SearchResolverConfig: # TODO: do we need parser_config_overrides? it looks like its just for alerts # Whether to process the results from snuba process_results: bool = True + + +CONFIDENCES: dict[Reliability.ValueType, Literal["low", "high"]] = { + Reliability.RELIABILITY_LOW: "low", + Reliability.RELIABILITY_HIGH: "high", +} +Confidence = Literal["low", "high"] | None +ConfidenceData = list[dict[str, Confidence]] + + +class EAPResponse(EventsResponse): + confidence: ConfidenceData diff --git a/src/sentry/search/events/builder/base.py b/src/sentry/search/events/builder/base.py index b02a6cf7b181fe..bcf70f7c87adac 100644 --- a/src/sentry/search/events/builder/base.py +++ b/src/sentry/search/events/builder/base.py @@ -1,6 +1,5 @@ from __future__ import annotations -import math from collections.abc import Callable, Mapping, Sequence from datetime import datetime, timedelta from re import Match @@ -70,6 +69,7 @@ is_numeric_measurement, is_percentage_measurement, is_span_op_breakdown, + process_value, raw_snql_query, resolve_column, ) @@ -87,10 +87,10 @@ class BaseQueryBuilder: organization_column: str = "organization.id" function_alias_prefix: str | None = None spans_metrics_builder = False - profile_functions_metrics_builder = False entity: Entity | None = None config_class: type[DatasetConfig] | None = None duration_fields: set[str] = set() + size_fields: dict[str, str] = {} uuid_fields: set[str] = set() span_id_fields: set[str] = set() @@ -1004,7 +1004,7 @@ def get_field_type(self, field: str) -> str | None: return self.meta_resolver_map[field] if is_percentage_measurement(field): return "percentage" - elif is_numeric_measurement(field): + if is_numeric_measurement(field): return "number" if ( @@ -1014,6 +1014,9 @@ def get_field_type(self, field: str) -> str | None: ): return "duration" + if unit := self.size_fields.get(field): + return unit + measurement = self.get_measurement_by_name(field) # let the caller decide what to do if measurement is None: @@ -1530,16 +1533,6 @@ def get_snql_query(self) -> Request: tenant_ids=self.tenant_ids, ) - @classmethod - def handle_invalid_float(cls, value: float | None) -> float | None: - if value is None: - return value - elif math.isnan(value): - return 0 - elif math.isinf(value): - return None - return value - def run_query( self, referrer: str | None, use_cache: bool = False, query_source: QuerySource | None = None ) -> Any: @@ -1592,18 +1585,7 @@ def process_results(self, results: Any) -> EventsResponse: def get_row(row: dict[str, Any]) -> dict[str, Any]: transformed = {} for key, value in row.items(): - if isinstance(value, float): - # 0 for nan, and none for inf were chosen arbitrarily, nan and inf are invalid json - # so needed to pick something valid to use instead - if math.isnan(value): - value = 0 - elif math.isinf(value): - value = None - value = self.handle_invalid_float(value) - if isinstance(value, list): - for index, item in enumerate(value): - if isinstance(item, float): - value[index] = self.handle_invalid_float(item) + value = process_value(value) if key in self.value_resolver_map: new_value = self.value_resolver_map[key](value) else: diff --git a/src/sentry/search/events/builder/errors.py b/src/sentry/search/events/builder/errors.py index 4869fe65ac191f..650d4963513009 100644 --- a/src/sentry/search/events/builder/errors.py +++ b/src/sentry/search/events/builder/errors.py @@ -88,7 +88,13 @@ def aliased_column(self, name: str) -> SelectType: aliased_col, exp=self._apply_column_entity(aliased_col.exp.name) ) elif isinstance(aliased_col, Column): - return self._apply_column_entity(aliased_col.name) + if self.config.use_entity_prefix_for_fields: + return self._apply_column_entity(aliased_col.name) + + # Map the column with the entity name back to the original resolved name + return AliasedExpression( + self._apply_column_entity(aliased_col.name), alias=aliased_col.name + ) raise NotImplementedError(f"{type(aliased_col)} not implemented in aliased_column") diff --git a/src/sentry/search/events/builder/metrics.py b/src/sentry/search/events/builder/metrics.py index 23342295762102..d57893ae33b027 100644 --- a/src/sentry/search/events/builder/metrics.py +++ b/src/sentry/search/events/builder/metrics.py @@ -444,8 +444,6 @@ def use_case_id(self) -> UseCaseID: return UseCaseID.SPANS elif self.is_performance: return UseCaseID.TRANSACTIONS - elif self.profile_functions_metrics_builder: - return UseCaseID.PROFILES else: return UseCaseID.SESSIONS @@ -759,7 +757,7 @@ def resolve_metric_index(self, value: str) -> int | None: def resolve_tag_value(self, value: str) -> int | str | None: # We only use the indexer for alerts queries - if self.is_performance or self.use_metrics_layer or self.profile_functions_metrics_builder: + if self.is_performance or self.use_metrics_layer: return value return self.resolve_metric_index(value) diff --git a/src/sentry/search/events/builder/metrics_summaries.py b/src/sentry/search/events/builder/metrics_summaries.py deleted file mode 100644 index 5d2968ab2ba104..00000000000000 --- a/src/sentry/search/events/builder/metrics_summaries.py +++ /dev/null @@ -1,38 +0,0 @@ -from snuba_sdk import Entity, Flags, Query, Request - -from sentry.search.events.builder.base import BaseQueryBuilder -from sentry.search.events.datasets.metrics_summaries import MetricsSummariesDatasetConfig -from sentry.snuba.dataset import Dataset - - -class MetricsSummariesQueryBuilder(BaseQueryBuilder): - requires_organization_condition = False - config_class = MetricsSummariesDatasetConfig - - def get_field_type(self, field: str) -> str | None: - if field in ["min_metric", "max_metric", "sum_metric", "count_metric"]: - return "number" - return None - - def get_snql_query(self) -> Request: - self.validate_having_clause() - - return Request( - # the metrics summaries entity exists within the spans indexed dataset - dataset=Dataset.SpansIndexed.value, - app_id="default", - query=Query( - match=Entity(self.dataset.value, sample=self.sample_rate), - select=self.columns, - array_join=self.array_join, - where=self.where, - having=self.having, - groupby=self.groupby, - orderby=self.orderby, - limit=self.limit, - offset=self.offset, - limitby=self.limitby, - ), - flags=Flags(turbo=self.turbo), - tenant_ids=self.tenant_ids, - ) diff --git a/src/sentry/search/events/builder/profile_functions.py b/src/sentry/search/events/builder/profile_functions.py index c38bcaebb61483..08ee6e71c8520d 100644 --- a/src/sentry/search/events/builder/profile_functions.py +++ b/src/sentry/search/events/builder/profile_functions.py @@ -120,9 +120,13 @@ def time_column(self) -> SelectType: def process_results(self, results: Any) -> EventsResponse: # Calling `super().process_results(results)` on the timeseries data # mutates the data in such a way that breaks the zerofill later such - # as applying `transform_alias_to_input_format` setting. So skip it. + # as applying `transform_alias_to_input_format` setting. So only run + # it to get the correct meta. for row in results["data"]: self.process_profiling_function_columns(row) + + results["meta"] = super().process_results(results)["meta"] + return results diff --git a/src/sentry/search/events/builder/profile_functions_metrics.py b/src/sentry/search/events/builder/profile_functions_metrics.py deleted file mode 100644 index 43a956e256f26d..00000000000000 --- a/src/sentry/search/events/builder/profile_functions_metrics.py +++ /dev/null @@ -1,60 +0,0 @@ -from sentry.search.events.builder.metrics import ( - MetricsQueryBuilder, - TimeseriesMetricQueryBuilder, - TopMetricsQueryBuilder, -) -from sentry.search.events.datasets.profile_functions_metrics import ( - ProfileFunctionsMetricsDatasetConfig, -) -from sentry.search.events.types import SelectType - - -class ProfileFunctionsMetricsQueryBuilder(MetricsQueryBuilder): - requires_organization_condition = True - profile_functions_metrics_builder = True - config_class = ProfileFunctionsMetricsDatasetConfig - - column_remapping = { - # We want to remap `message` to `name` for the free - # text search use case so that it searches the `name` - # (function name) when the user performs a free text search - "message": "name", - } - default_metric_tags = { - "project_id", - "fingerprint", - "function", - "package", - "is_application", - "platform", - "environment", - "release", - } - - @property - def use_default_tags(self) -> bool: - return True - - def get_field_type(self, field: str) -> str | None: - if field in self.meta_resolver_map: - return self.meta_resolver_map[field] - return None - - def resolve_select( - self, selected_columns: list[str] | None, equations: list[str] | None - ) -> list[SelectType]: - if selected_columns and "transaction" in selected_columns: - self.has_transaction = True # if always true can we skip this? - return super().resolve_select(selected_columns, equations) - - -class TimeseriesProfileFunctionsMetricsQueryBuilder( - ProfileFunctionsMetricsQueryBuilder, TimeseriesMetricQueryBuilder -): - pass - - -class TopProfileFunctionsMetricsQueryBuilder( - TimeseriesProfileFunctionsMetricsQueryBuilder, TopMetricsQueryBuilder -): - pass diff --git a/src/sentry/search/events/builder/spans_indexed.py b/src/sentry/search/events/builder/spans_indexed.py index 853ed7ea307f15..79a82d81f46a1e 100644 --- a/src/sentry/search/events/builder/spans_indexed.py +++ b/src/sentry/search/events/builder/spans_indexed.py @@ -33,23 +33,24 @@ "segment_id", } +DURATION_FIELDS = { + "span.duration", + "span.self_time", +} -class SpansIndexedQueryBuilderMixin: - meta_resolver_map: dict[str, str] - - def get_field_type(self, field: str) -> str | None: - if field in self.meta_resolver_map: - return self.meta_resolver_map[field] - if field in ["span.duration", "span.self_time"]: - return "duration" - - return None +SIZE_FIELDS = { + "http.decoded_response_content_length": "byte", + "http.response_content_length": "byte", + "http.response_transfer_size": "byte", +} -class SpansIndexedQueryBuilder(SpansIndexedQueryBuilderMixin, BaseQueryBuilder): +class SpansIndexedQueryBuilder(BaseQueryBuilder): requires_organization_condition = False uuid_fields = SPAN_UUID_FIELDS span_id_fields = SPAN_ID_FIELDS + duration_fields = DURATION_FIELDS + size_fields = SIZE_FIELDS config_class = SpansIndexedDatasetConfig def __init__(self, *args, **kwargs): @@ -59,10 +60,12 @@ def __init__(self, *args, **kwargs): ) -class SpansEAPQueryBuilder(SpansIndexedQueryBuilderMixin, BaseQueryBuilder): +class SpansEAPQueryBuilder(BaseQueryBuilder): requires_organization_condition = True uuid_fields = SPAN_UUID_FIELDS span_id_fields = SPAN_ID_FIELDS + duration_fields = DURATION_FIELDS + size_fields = SIZE_FIELDS config_class = SpansEAPDatasetConfig def __init__(self, *args, **kwargs): @@ -106,10 +109,12 @@ def resolve_field(self, raw_field: str, alias: bool = False) -> Column: return field_col -class TimeseriesSpanIndexedQueryBuilder(SpansIndexedQueryBuilderMixin, TimeseriesQueryBuilder): +class TimeseriesSpanIndexedQueryBuilder(TimeseriesQueryBuilder): config_class = SpansIndexedDatasetConfig uuid_fields = SPAN_UUID_FIELDS span_id_fields = SPAN_ID_FIELDS + duration_fields = DURATION_FIELDS + size_fields = SIZE_FIELDS @property def time_column(self) -> SelectType: @@ -122,10 +127,12 @@ class TimeseriesSpanEAPIndexedQueryBuilder(SpansEAPQueryBuilder, TimeseriesQuery pass -class TopEventsSpanIndexedQueryBuilder(SpansIndexedQueryBuilderMixin, TopEventsQueryBuilder): +class TopEventsSpanIndexedQueryBuilder(TopEventsQueryBuilder): config_class = SpansIndexedDatasetConfig uuid_fields = SPAN_UUID_FIELDS span_id_fields = SPAN_ID_FIELDS + duration_fields = DURATION_FIELDS + size_fields = SIZE_FIELDS @property def time_column(self) -> SelectType: diff --git a/src/sentry/search/events/constants.py b/src/sentry/search/events/constants.py index b77093a6b9b06b..22cc901451e0a8 100644 --- a/src/sentry/search/events/constants.py +++ b/src/sentry/search/events/constants.py @@ -367,9 +367,6 @@ class ThresholdDict(TypedDict): "mobile.frames_delay": "g:spans/mobile.frames_delay@second", "messaging.message.receive.latency": SPAN_MESSAGING_LATENCY, } -PROFILE_METRICS_MAP = { - "function.duration": "d:profiles/function.duration@millisecond", -} # 50 to match the size of tables in the UI + 1 for pagination reasons METRICS_MAX_LIMIT = 101 diff --git a/src/sentry/search/events/datasets/discover.py b/src/sentry/search/events/datasets/discover.py index f422ad4361b918..9178ae61de05cd 100644 --- a/src/sentry/search/events/datasets/discover.py +++ b/src/sentry/search/events/datasets/discover.py @@ -107,6 +107,7 @@ class DiscoverDatasetConfig(DatasetConfig): "user_misery()", } non_nullable_keys = {"event.type"} + use_entity_prefix_for_fields: bool = False def __init__(self, builder: BaseQueryBuilder): self.builder = builder diff --git a/src/sentry/search/events/datasets/profile_functions_metrics.py b/src/sentry/search/events/datasets/profile_functions_metrics.py deleted file mode 100644 index f05fb1d6fa4ae5..00000000000000 --- a/src/sentry/search/events/datasets/profile_functions_metrics.py +++ /dev/null @@ -1,518 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Mapping -from datetime import datetime - -from snuba_sdk import Column, Function, OrderBy - -from sentry.api.event_search import SearchFilter -from sentry.exceptions import IncompatibleMetricsQuery, InvalidSearchQuery -from sentry.search.events import constants, fields -from sentry.search.events.builder import profile_functions_metrics -from sentry.search.events.constants import PROJECT_ALIAS, PROJECT_NAME_ALIAS -from sentry.search.events.datasets import field_aliases, filter_aliases, function_aliases -from sentry.search.events.datasets.base import DatasetConfig -from sentry.search.events.types import SelectType, WhereType - - -class ProfileFunctionsMetricsDatasetConfig(DatasetConfig): - missing_function_error = IncompatibleMetricsQuery - - def __init__(self, builder: profile_functions_metrics.ProfileFunctionsMetricsQueryBuilder): - self.builder = builder - - def resolve_mri(self, value: str) -> Column: - return Column(constants.PROFILE_METRICS_MAP[value]) - - @property - def search_filter_converter( - self, - ) -> Mapping[str, Callable[[SearchFilter], WhereType | None]]: - return { - PROJECT_ALIAS: self._project_slug_filter_converter, - PROJECT_NAME_ALIAS: self._project_slug_filter_converter, - } - - @property - def orderby_converter(self) -> Mapping[str, OrderBy]: - return {} - - @property - def field_alias_converter(self) -> Mapping[str, Callable[[str], SelectType]]: - return { - PROJECT_ALIAS: self._resolve_project_slug_alias, - PROJECT_NAME_ALIAS: self._resolve_project_slug_alias, - } - - def _project_slug_filter_converter(self, search_filter: SearchFilter) -> WhereType | None: - return filter_aliases.project_slug_converter(self.builder, search_filter) - - def _resolve_project_slug_alias(self, alias: str) -> SelectType: - return field_aliases.resolve_project_slug_alias(self.builder, alias) - - def resolve_metric(self, value: str) -> int: - # "function.duration" --> "d:profiles/function.duration@millisecond" - metric_id = self.builder.resolve_metric_index( - constants.PROFILE_METRICS_MAP.get(value, value) - ) - # If it's still None its not a custom measurement - if metric_id is None: - raise IncompatibleMetricsQuery(f"Metric: {value} could not be resolved") - self.builder.metric_ids.add(metric_id) - return metric_id - - def _resolve_avg(self, args, alias): - return Function( - "avgIf", - [ - Column("value"), - Function("equals", [Column("metric_id"), args["metric_id"]]), - ], - alias, - ) - - def _resolve_cpm( - self, - args: Mapping[str, str | Column | SelectType | int | float], - alias: str | None, - extra_condition: Function | None = None, - ) -> SelectType: - assert ( - self.builder.params.end is not None and self.builder.params.start is not None - ), f"params.end: {self.builder.params.end} - params.start: {self.builder.params.start}" - interval = (self.builder.params.end - self.builder.params.start).total_seconds() - - base_condition = Function( - "equals", - [ - Column("metric_id"), - self.resolve_metric("function.duration"), - ], - ) - if extra_condition: - condition = Function("and", [base_condition, extra_condition]) - else: - condition = base_condition - - return Function( - "divide", - [ - Function( - "countIf", - [ - Column("value"), - condition, - ], - ), - Function("divide", [interval, 60]), - ], - alias, - ) - - def _resolve_cpm_cond( - self, - args: Mapping[str, str | Column | SelectType | int | float | datetime], - cond: str, - alias: str | None, - ) -> SelectType: - timestmp = args["timestamp"] - if cond == "greater": - assert isinstance(self.builder.params.end, datetime) and isinstance( - timestmp, datetime - ), f"params.end: {self.builder.params.end} - timestmp: {timestmp}" - interval = (self.builder.params.end - timestmp).total_seconds() - # interval = interval - elif cond == "less": - assert isinstance(self.builder.params.start, datetime) and isinstance( - timestmp, datetime - ), f"params.start: {self.builder.params.start} - timestmp: {timestmp}" - interval = (timestmp - self.builder.params.start).total_seconds() - else: - raise InvalidSearchQuery(f"Unsupported condition for cpm: {cond}") - - metric_id_condition = Function( - "equals", [Column("metric_id"), self.resolve_metric("function.duration")] - ) - - return Function( - "divide", - [ - Function( - "countIf", - [ - Column("value"), - Function( - "and", - [ - metric_id_condition, - Function( - cond, - [ - Column("timestamp"), - args["timestamp"], - ], - ), - ], - ), # close and condition - ], - ), - Function("divide", [interval, 60]), - ], - alias, - ) - - def _resolve_cpm_delta( - self, - args: Mapping[str, str | Column | SelectType | int | float], - alias: str, - ) -> SelectType: - return Function( - "minus", - [ - self._resolve_cpm_cond(args, "greater", None), - self._resolve_cpm_cond(args, "less", None), - ], - alias, - ) - - def _resolve_regression_score( - self, - args: Mapping[str, str | Column | SelectType | int | float | datetime], - alias: str | None = None, - ) -> SelectType: - return Function( - "minus", - [ - Function( - "multiply", - [ - self._resolve_cpm_cond(args, "greater", None), - function_aliases.resolve_metrics_percentile( - args=args, - alias=None, - extra_conditions=[ - Function("greater", [Column("timestamp"), args["timestamp"]]) - ], - ), - ], - ), - Function( - "multiply", - [ - self._resolve_cpm_cond(args, "less", None), - function_aliases.resolve_metrics_percentile( - args=args, - alias=None, - extra_conditions=[ - Function("less", [Column("timestamp"), args["timestamp"]]) - ], - ), - ], - ), - ], - alias, - ) - - @property - def function_converter(self) -> Mapping[str, fields.MetricsFunction]: - """While the final functions in clickhouse must have their -Merge combinators in order to function, we don't - need to add them here since snuba has a FunctionMapper that will add it for us. Basically it turns expressions - like quantiles(0.9)(value) into quantilesMerge(0.9)(percentiles) - Make sure to update METRIC_FUNCTION_LIST_BY_TYPE when adding functions here, can't be a dynamic list since the - Metric Layer will actually handle which dataset each function goes to - """ - resolve_metric_id = { - "name": "metric_id", - "fn": lambda args: self.resolve_metric(args["column"]), - } - function_converter = { - function.name: function - for function in [ - fields.MetricsFunction( - "count", - snql_distribution=lambda args, alias: Function( - "countIf", - [ - Column("value"), - Function( - "equals", - [ - Column("metric_id"), - self.resolve_metric("function.duration"), - ], - ), - ], - alias, - ), - default_result_type="integer", - ), - fields.MetricsFunction( - "cpm", # calls per minute - snql_distribution=lambda args, alias: self._resolve_cpm(args, alias), - default_result_type="number", - ), - fields.MetricsFunction( - "cpm_before", - required_args=[fields.TimestampArg("timestamp")], - snql_distribution=lambda args, alias: self._resolve_cpm_cond( - args, "less", alias - ), - default_result_type="number", - ), - fields.MetricsFunction( - "cpm_after", - required_args=[fields.TimestampArg("timestamp")], - snql_distribution=lambda args, alias: self._resolve_cpm_cond( - args, "greater", alias - ), - default_result_type="number", - ), - fields.MetricsFunction( - "cpm_delta", - required_args=[fields.TimestampArg("timestamp")], - snql_distribution=self._resolve_cpm_delta, - default_result_type="number", - ), - fields.MetricsFunction( - "percentile", - required_args=[ - fields.with_default( - "function.duration", - fields.MetricArg("column", allowed_columns=["function.duration"]), - ), - fields.NumberRange("percentile", 0, 1), - ], - calculated_args=[resolve_metric_id], - snql_distribution=function_aliases.resolve_metrics_percentile, - is_percentile=True, - result_type_fn=self.reflective_result_type(), - default_result_type="duration", - ), - fields.MetricsFunction( - "p50", - optional_args=[ - fields.with_default( - "function.duration", - fields.MetricArg( - "column", - allowed_columns=["function.duration"], - allow_custom_measurements=False, - ), - ), - ], - calculated_args=[resolve_metric_id], - snql_distribution=lambda args, alias: function_aliases.resolve_metrics_percentile( - args=args, alias=alias, fixed_percentile=0.50 - ), - is_percentile=True, - default_result_type="duration", - ), - fields.MetricsFunction( - "p75", - optional_args=[ - fields.with_default( - "function.duration", - fields.MetricArg( - "column", - allowed_columns=["function.duration"], - allow_custom_measurements=False, - ), - ), - ], - calculated_args=[resolve_metric_id], - snql_distribution=lambda args, alias: function_aliases.resolve_metrics_percentile( - args=args, alias=alias, fixed_percentile=0.75 - ), - is_percentile=True, - default_result_type="duration", - ), - fields.MetricsFunction( - "p95", - optional_args=[ - fields.with_default( - "function.duration", - fields.MetricArg( - "column", - allowed_columns=["function.duration"], - allow_custom_measurements=False, - ), - ), - ], - calculated_args=[resolve_metric_id], - snql_distribution=lambda args, alias: function_aliases.resolve_metrics_percentile( - args=args, alias=alias, fixed_percentile=0.95 - ), - is_percentile=True, - default_result_type="duration", - ), - fields.MetricsFunction( - "p99", - optional_args=[ - fields.with_default( - "function.duration", - fields.MetricArg( - "column", - allowed_columns=["function.duration"], - allow_custom_measurements=False, - ), - ), - ], - calculated_args=[resolve_metric_id], - snql_distribution=lambda args, alias: function_aliases.resolve_metrics_percentile( - args=args, alias=alias, fixed_percentile=0.99 - ), - is_percentile=True, - default_result_type="duration", - ), - fields.MetricsFunction( - "avg", - optional_args=[ - fields.with_default( - "function.duration", - fields.MetricArg( - "column", - allowed_columns=["function.duration"], - ), - ), - ], - calculated_args=[resolve_metric_id], - snql_gauge=self._resolve_avg, - snql_distribution=self._resolve_avg, - result_type_fn=self.reflective_result_type(), - default_result_type="duration", - ), - fields.MetricsFunction( - "sum", - optional_args=[ - fields.with_default( - "function.duration", - fields.MetricArg( - "column", - allowed_columns=["function.duration"], - allow_custom_measurements=False, - ), - ), - ], - calculated_args=[resolve_metric_id], - snql_distribution=lambda args, alias: Function( - "sumIf", - [ - Column("value"), - Function("equals", [Column("metric_id"), args["metric_id"]]), - ], - alias, - ), - default_result_type="duration", - ), - fields.MetricsFunction( - "percentile_before", - required_args=[ - fields.TimestampArg("timestamp"), - fields.NumberRange("percentile", 0, 1), - ], - optional_args=[ - fields.with_default( - "function.duration", - fields.MetricArg( - "column", - allowed_columns=["function.duration"], - allow_custom_measurements=False, - ), - ), - ], - calculated_args=[resolve_metric_id], - snql_distribution=lambda args, alias: function_aliases.resolve_metrics_percentile( - args=args, - alias=alias, - extra_conditions=[ - Function("less", [Column("timestamp"), args["timestamp"]]) - ], - ), - is_percentile=True, - default_result_type="duration", - ), - fields.MetricsFunction( - "percentile_after", - required_args=[ - fields.TimestampArg("timestamp"), - fields.NumberRange("percentile", 0, 1), - ], - optional_args=[ - fields.with_default( - "function.duration", - fields.MetricArg( - "column", - allowed_columns=["function.duration"], - allow_custom_measurements=False, - ), - ), - ], - calculated_args=[resolve_metric_id], - snql_distribution=lambda args, alias: function_aliases.resolve_metrics_percentile( - args=args, - alias=alias, - extra_conditions=[ - Function("greater", [Column("timestamp"), args["timestamp"]]) - ], - ), - is_percentile=True, - default_result_type="duration", - ), - fields.MetricsFunction( - "percentile_delta", - required_args=[ - fields.TimestampArg("timestamp"), - fields.NumberRange("percentile", 0, 1), - ], - optional_args=[ - fields.with_default( - "function.duration", - fields.MetricArg( - "column", - allowed_columns=["function.duration"], - allow_custom_measurements=False, - ), - ), - ], - calculated_args=[resolve_metric_id], - snql_distribution=lambda args, alias: Function( - "minus", - [ - function_aliases.resolve_metrics_percentile( - args=args, - alias=alias, - extra_conditions=[ - Function("greater", [Column("timestamp"), args["timestamp"]]) - ], - ), - function_aliases.resolve_metrics_percentile( - args=args, - alias=alias, - extra_conditions=[ - Function("less", [Column("timestamp"), args["timestamp"]]) - ], - ), - ], - alias, - ), - is_percentile=True, - default_result_type="duration", - ), - fields.MetricsFunction( - "regression_score", - required_args=[ - fields.MetricArg( - "column", - allowed_columns=["function.duration"], - allow_custom_measurements=False, - ), - fields.TimestampArg("timestamp"), - fields.NumberRange("percentile", 0, 1), - ], - calculated_args=[resolve_metric_id], - snql_distribution=self._resolve_regression_score, - default_result_type="number", - ), - ] - } - return function_converter diff --git a/src/sentry/search/events/datasets/spans_indexed.py b/src/sentry/search/events/datasets/spans_indexed.py index a9b045ba4536bd..d32691f62e64b4 100644 --- a/src/sentry/search/events/datasets/spans_indexed.py +++ b/src/sentry/search/events/datasets/spans_indexed.py @@ -44,7 +44,7 @@ def __init__(self, builder: BaseQueryBuilder): @property def search_filter_converter( self, - ) -> Mapping[str, Callable[[SearchFilter], WhereType | None]]: + ) -> dict[str, Callable[[SearchFilter], WhereType | None]]: return { "message": self._message_filter_converter, constants.PROJECT_ALIAS: self._project_slug_filter_converter, @@ -620,10 +620,11 @@ def function_converter(self) -> dict[str, SnQLFunction]: snql_aggregate=lambda args, alias: function_aliases.resolve_epm( args, alias, self.builder ), + optional_args=[IntervalDefault("interval", 1, None)], default_result_type="rate", ), SnQLFunction( - "count", + "count_sample", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -637,14 +638,14 @@ def function_converter(self) -> dict[str, SnQLFunction]: default_result_type="integer", ), SnQLFunction( - "sum", + "sum_sample", required_args=[NumericColumn("column", spans=True)], snql_aggregate=self._resolve_aggregate_if("sum"), result_type_fn=self.reflective_result_type(), default_result_type="duration", ), SnQLFunction( - "avg", + "avg_sample", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -654,7 +655,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p50", + "p50_sample", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -664,7 +665,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p75", + "p75_sample", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -674,7 +675,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p90", + "p90_sample", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -684,7 +685,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p95", + "p95_sample", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -694,7 +695,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p99", + "p99_sample", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -704,7 +705,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p100", + "p100_sample", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -730,7 +731,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "count_weighted", + "count", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -738,20 +739,14 @@ def function_converter(self) -> dict[str, SnQLFunction]: default_result_type="integer", ), SnQLFunction( - "count_unique_weighted", - required_args=[ColumnTagArg("column")], - snql_aggregate=self._resolve_aggregate_if("uniq"), - default_result_type="integer", - ), - SnQLFunction( - "sum_weighted", + "sum", required_args=[NumericColumn("column", spans=True)], result_type_fn=self.reflective_result_type(), snql_aggregate=lambda args, alias: self._resolve_sum_weighted(args, alias), default_result_type="duration", ), SnQLFunction( - "avg_weighted", + "avg", required_args=[NumericColumn("column", spans=True)], result_type_fn=self.reflective_result_type(), snql_aggregate=lambda args, alias: Function( @@ -765,7 +760,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: default_result_type="duration", ), SnQLFunction( - "percentile_weighted", + "percentile", required_args=[ NumericColumn("column", spans=True), NumberRange("percentile", 0, 1), @@ -776,7 +771,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p50_weighted", + "p50", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -788,7 +783,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p75_weighted", + "p75", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -800,7 +795,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p90_weighted", + "p90", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -812,7 +807,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p95_weighted", + "p95", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -824,7 +819,7 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p99_weighted", + "p99", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], @@ -836,29 +831,10 @@ def function_converter(self) -> dict[str, SnQLFunction]: redundant_grouping=True, ), SnQLFunction( - "p100_weighted", + "p100", optional_args=[ with_default("span.duration", NumericColumn("column", spans=True)), ], - snql_aggregate=lambda args, alias: self._resolve_percentile_weighted( - args, alias, 1.0 - ), - result_type_fn=self.reflective_result_type(), - default_result_type="duration", - redundant_grouping=True, - ), - # Min and Max are identical to their existing implementations - SnQLFunction( - "min_weighted", - required_args=[NumericColumn("column", spans=True)], - snql_aggregate=self._resolve_aggregate_if("min"), - result_type_fn=self.reflective_result_type(), - default_result_type="duration", - redundant_grouping=True, - ), - SnQLFunction( - "max_weighted", - required_args=[NumericColumn("column", spans=True)], snql_aggregate=self._resolve_aggregate_if("max"), result_type_fn=self.reflective_result_type(), default_result_type="duration", @@ -938,6 +914,14 @@ def field_alias_converter(self) -> Mapping[str, Callable[[str], SelectType]]: existing_field_aliases.update(field_alias_converter) return existing_field_aliases + @property + def search_filter_converter( + self, + ) -> dict[str, Callable[[SearchFilter], WhereType | None]]: + existing_search_filters = super().search_filter_converter + del existing_search_filters[constants.SPAN_STATUS] + return existing_search_filters + def _resolve_sum_weighted( self, args: Mapping[str, str | SelectType | int | float], @@ -1059,14 +1043,14 @@ def _query_total_counts(self) -> tuple[float | int, float | int]: dataset=self.builder.dataset, params={}, snuba_params=self.builder.params, - selected_columns=["count()", "count_weighted()"], + selected_columns=["count_sample()", "count()"], ) total_results = total_query.run_query(Referrer.API_SPANS_TOTAL_COUNT_FIELD.value) results = total_query.process_results(total_results) if len(results["data"]) != 1: raise Exception("Could not query population size") - self._cached_count = results["data"][0]["count"] - self._cached_count_weighted = results["data"][0]["count_weighted"] + self._cached_count = results["data"][0]["count_sample"] + self._cached_count_weighted = results["data"][0]["count"] return self._cached_count, self._cached_count_weighted @cached_property diff --git a/src/sentry/search/events/types.py b/src/sentry/search/events/types.py index 81992445893f47..f4b8e4e4672311 100644 --- a/src/sentry/search/events/types.py +++ b/src/sentry/search/events/types.py @@ -231,6 +231,8 @@ class QueryBuilderConfig: skip_field_validation_for_entity_subscription_deletion: bool = False allow_metric_aggregates: bool | None = False insights_metrics_override_metric_layer: bool = False + # Allow the errors query builder to use the entity prefix for fields + use_entity_prefix_for_fields: bool = False @dataclass(frozen=True) diff --git a/src/sentry/seer/similarity/utils.py b/src/sentry/seer/similarity/utils.py index 9db410eb8fdd73..3e5d47527e06f5 100644 --- a/src/sentry/seer/similarity/utils.py +++ b/src/sentry/seer/similarity/utils.py @@ -1,8 +1,10 @@ import logging +from collections.abc import Mapping, Sequence +from enum import StrEnum from typing import Any, TypeVar from sentry import options -from sentry.eventstore.models import Event +from sentry.eventstore.models import Event, GroupEvent from sentry.killswitches import killswitch_matches_context from sentry.models.project import Project from sentry.utils import metrics @@ -15,6 +17,7 @@ FULLY_MINIFIED_STACKTRACE_MAX_FRAME_COUNT = 20 SEER_ELIGIBLE_PLATFORMS_EVENTS = frozenset( [ + "csharp", "go", "java", "javascript", @@ -24,17 +27,14 @@ "ruby", ] ) -SEER_ELIGIBLE_PLATFORMS = frozenset( +# An original set of platforms were backfilled allowing more than 30 system contributing frames +# being set to seer. Unfortunately, this can cause over grouping. We will need to reduce +# these set of platforms but for now we will blacklist them. +SYSTEM_FRAME_CHECK_BLACKLIST_PLATFORMS = frozenset( [ - # "android", - # "android-profiling-onboarding-1-install", - # "android-profiling-onboarding-3-configure-profiling", - # "android-profiling-onboarding-4-upload", "bun", - # "dart", "deno", "django", - # "flutter", "go", "go-echo", "go-fasthttp", @@ -44,16 +44,6 @@ "go-iris", "go-martini", "go-negroni", - # "groovy", - "java", - "java-android", - # "java-appengine", - # "java-log4j", - # "java-log4j2", - # "java-logging", - "java-logback", - # "java-spring", - # "java-spring-boot", "javascript", "javascript-angular", "javascript-angularjs", @@ -143,6 +133,29 @@ "ruby-rails", ] ) +SEER_ELIGIBLE_PLATFORMS = SYSTEM_FRAME_CHECK_BLACKLIST_PLATFORMS | frozenset( + [ + "android", + "android-profiling-onboarding-1-install", + "android-profiling-onboarding-3-configure-profiling", + "android-profiling-onboarding-4-upload", + "csharp", + "csharp-aspnetcore", + "dart", + "dotnet", + "flutter", + "groovy", + "java", + "java-android", + "java-appengine", + "java-log4j", + "java-log4j2", + "java-logging", + "java-logback", + "java-spring", + "java-spring-boot", + ] +) BASE64_ENCODED_PREFIXES = [ "data:text/html;base64", "data:text/javascript;base64", @@ -151,11 +164,20 @@ ] -def _get_value_if_exists(exception_value: dict[str, Any]) -> str: +class ReferrerOptions(StrEnum): + INGEST = "ingest" + BACKFILL = "backfill" + + +class TooManyOnlySystemFramesException(Exception): + pass + + +def _get_value_if_exists(exception_value: Mapping[str, Any]) -> str: return exception_value["values"][0] if exception_value.get("values") else "" -def get_stacktrace_string(data: dict[str, Any]) -> str: +def get_stacktrace_string(data: dict[str, Any], platform: str | None = None) -> str: """Format a stacktrace string from the grouping information.""" app_hash = get_path(data, "app", "hash") app_component = get_path(data, "app", "component", "values") @@ -177,6 +199,7 @@ def get_stacktrace_string(data: dict[str, Any]) -> str: frame_count = 0 html_frame_count = 0 # for a temporary metric + is_frames_truncated = False stacktrace_str = "" found_non_snipped_context_line = False @@ -185,22 +208,23 @@ def get_stacktrace_string(data: dict[str, Any]) -> str: def _process_frames(frames: list[dict[str, Any]]) -> list[str]: nonlocal frame_count nonlocal html_frame_count + nonlocal is_frames_truncated nonlocal found_non_snipped_context_line frame_strings = [] contributing_frames = [ frame for frame in frames if frame.get("id") == "frame" and frame.get("contributes") ] + if len(contributing_frames) + frame_count > MAX_FRAME_COUNT: + is_frames_truncated = True contributing_frames = _discard_excess_frames( contributing_frames, MAX_FRAME_COUNT, frame_count ) frame_count += len(contributing_frames) for frame in contributing_frames: - frame_dict = {"filename": "", "function": "", "context-line": ""} - for frame_values in frame.get("values", []): - if frame_values.get("id") in frame_dict: - frame_dict[frame_values["id"]] = _get_value_if_exists(frame_values) + frame_dict = extract_values_from_frame_values(frame.get("values", [])) + filename = extract_filename(frame_dict) or "None" if not _is_snipped_context_line(frame_dict["context-line"]): found_non_snipped_context_line = True @@ -213,22 +237,11 @@ def _process_frames(frames: list[dict[str, Any]]) -> list[str]: if frame_dict["filename"].endswith("html") or "" in frame_dict["context-line"]: html_frame_count += 1 - # We want to skip frames with base64 encoded filenames since they can be large - # and not contain any usable information - base64_encoded = False - for base64_prefix in BASE64_ENCODED_PREFIXES: - if frame_dict["filename"].startswith(base64_prefix): - metrics.incr( - "seer.grouping.base64_encoded_filename", - sample_rate=options.get("seer.similarity.metrics_sample_rate"), - ) - base64_encoded = True - break - if base64_encoded: + if is_base64_encoded_frame(frame_dict): continue frame_strings.append( - f' File "{frame_dict["filename"]}", function {frame_dict["function"]}\n {frame_dict["context-line"]}\n' + f' File "{filename}", function {frame_dict["function"]}\n {frame_dict["context-line"]}\n' ) return frame_strings @@ -259,6 +272,13 @@ def _process_frames(frames: list[dict[str, Any]]) -> list[str]: exc_value = _get_value_if_exists(exception_value) elif exception_value.get("id") == "stacktrace" and frame_count < MAX_FRAME_COUNT: frame_strings = _process_frames(exception_value["values"]) + if ( + platform not in SYSTEM_FRAME_CHECK_BLACKLIST_PLATFORMS + and is_frames_truncated + and not app_hash + ): + raise TooManyOnlySystemFramesException + # Only exceptions have the type and value properties, so we don't need to handle the threads # case here header = f"{exc_type}: {exc_value}\n" if exception["id"] == "exception" else "" @@ -294,7 +314,63 @@ def _process_frames(frames: list[dict[str, Any]]) -> list[str]: return stacktrace_str.strip() -def event_content_has_stacktrace(event: Event) -> bool: +def extract_values_from_frame_values(values: Sequence[Mapping[str, Any]]) -> dict[str, Any]: + frame_dict = {"filename": "", "function": "", "context-line": "", "module": ""} + for frame_values in values: + if frame_values.get("id") in frame_dict: + frame_dict[frame_values["id"]] = _get_value_if_exists(frame_values) + return frame_dict + + +def extract_filename(frame_dict: Mapping[str, Any]) -> str: + """ + Extract the filename from the frame dictionary. Fallback to module if filename is not present. + """ + filename = frame_dict["filename"] + if filename == "" and frame_dict["module"] != "": + filename = frame_dict["module"] + return filename + + +def is_base64_encoded_frame(frame_dict: Mapping[str, Any]) -> bool: + # We want to skip frames with base64 encoded filenames since they can be large + # and not contain any usable information + base64_encoded = False + for base64_prefix in BASE64_ENCODED_PREFIXES: + if frame_dict["filename"].startswith(base64_prefix): + base64_encoded = True + break + return base64_encoded + + +def get_stacktrace_string_with_metrics( + data: dict[str, Any], platform: str | None, referrer: ReferrerOptions +) -> str | None: + stacktrace_string = None + key = "grouping.similarity.did_call_seer" + sample_rate = options.get("seer.similarity.metrics_sample_rate") + try: + stacktrace_string = get_stacktrace_string(data, platform) + except TooManyOnlySystemFramesException: + platform = platform if platform else "unknown" + metrics.incr( + "grouping.similarity.over_threshold_only_system_frames", + sample_rate=sample_rate, + tags={"platform": platform, "referrer": referrer}, + ) + if referrer == ReferrerOptions.INGEST: + metrics.incr( + key, + sample_rate=sample_rate, + tags={"call_made": False, "blocker": "over-threshold-only-system-frames"}, + ) + except Exception: + logger.exception("Unexpected exception in stacktrace string formatting") + + return stacktrace_string + + +def event_content_has_stacktrace(event: GroupEvent | Event) -> bool: # If an event has no stacktrace, there's no data for Seer to analyze, so no point in making the # API call. If we ever start analyzing message-only events, we'll need to add `event.title in # PLACEHOLDER_EVENT_TITLES` to this check. @@ -304,7 +380,7 @@ def event_content_has_stacktrace(event: Event) -> bool: return exception_stacktrace or threads_stacktrace or only_stacktrace -def event_content_is_seer_eligible(event: Event) -> bool: +def event_content_is_seer_eligible(event: GroupEvent | Event) -> bool: """ Determine if an event's contents makes it fit for using with Seer's similar issues model. """ @@ -333,7 +409,7 @@ def event_content_is_seer_eligible(event: Event) -> bool: return True -def killswitch_enabled(project_id: int, event: Event | None = None) -> bool: +def killswitch_enabled(project_id: int, event: GroupEvent | Event | None = None) -> bool: """ Check both the global and similarity-specific Seer killswitches. """ diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_apps.py b/src/sentry/sentry_apps/api/endpoints/sentry_apps.py index efa42e1c3752ed..1ebe56f95d6374 100644 --- a/src/sentry/sentry_apps/api/endpoints/sentry_apps.py +++ b/src/sentry/sentry_apps/api/endpoints/sentry_apps.py @@ -14,6 +14,8 @@ from sentry.auth.staff import is_active_staff from sentry.auth.superuser import is_active_superuser from sentry.constants import SentryAppStatus +from sentry.db.models.manager.base_query_set import BaseQuerySet +from sentry.organizations.services.organization import organization_service from sentry.sentry_apps.api.bases.sentryapps import SentryAppsBaseEndpoint from sentry.sentry_apps.api.parsers.sentry_app import SentryAppParser from sentry.sentry_apps.api.serializers.sentry_app import ( @@ -46,25 +48,11 @@ def get(self, request: Request) -> Response: elif status == "unpublished": queryset = SentryApp.objects.filter(status=SentryAppStatus.UNPUBLISHED) if not elevated_user: - queryset = queryset.filter( - owner_id__in=[ - o.id - for o in user_service.get_organizations( - user_id=request.user.id, only_visible=True - ) - ] - ) + queryset = self._filter_queryset_for_user(queryset, request.user.id) elif status == "internal": queryset = SentryApp.objects.filter(status=SentryAppStatus.INTERNAL) if not elevated_user: - queryset = queryset.filter( - owner_id__in=[ - o.id - for o in user_service.get_organizations( - user_id=request.user.id, only_visible=True - ) - ] - ) + queryset = self._filter_queryset_for_user(queryset, request.user.id) else: if elevated_user: queryset = SentryApp.objects.all() @@ -168,6 +156,15 @@ def post(self, request: Request, organization) -> Response: analytics.record(name, **log_info) return Response(serializer.errors, status=400) + def _filter_queryset_for_user(self, queryset: BaseQuerySet[SentryApp, SentryApp], user_id: int): + owner_ids = [] + for o in user_service.get_organizations(user_id=user_id, only_visible=True): + org_context = organization_service.get_organization_by_id(id=o.id, user_id=user_id) + if org_context and org_context.member and "org:read" in org_context.member.scopes: + owner_ids.append(o.id) + + return queryset.filter(owner_id__in=owner_ids) + def _has_hook_events(self, request: Request): if not request.json_body.get("events"): return False diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py b/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py index d0ad44c5f50932..7f8aae8ad5f161 100644 --- a/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py +++ b/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py @@ -16,7 +16,7 @@ class SentryAppsStatsEndpoint(SentryAppsBaseEndpoint): owner = ApiOwner.INTEGRATIONS publish_status = { - "GET": ApiPublishStatus.UNKNOWN, + "GET": ApiPublishStatus.PRIVATE, } permission_classes = (SuperuserOrStaffFeatureFlaggedPermission,) diff --git a/src/sentry/sentry_apps/external_requests/issue_link_requester.py b/src/sentry/sentry_apps/external_requests/issue_link_requester.py index f999dcd5d5f17c..50279b4d79f8ff 100644 --- a/src/sentry/sentry_apps/external_requests/issue_link_requester.py +++ b/src/sentry/sentry_apps/external_requests/issue_link_requester.py @@ -12,6 +12,7 @@ from sentry.models.group import Group from sentry.sentry_apps.external_requests.utils import send_and_save_sentry_app_request, validate from sentry.sentry_apps.services.app import RpcSentryAppInstallation +from sentry.users.models.user import User from sentry.users.services.user import RpcUser from sentry.utils import json @@ -54,7 +55,7 @@ class IssueLinkRequester: uri: str group: Group fields: dict[str, Any] - user: RpcUser + user: RpcUser | User action: str def run(self) -> dict[str, Any]: diff --git a/src/sentry/sentry_apps/external_requests/select_requester.py b/src/sentry/sentry_apps/external_requests/select_requester.py index b7e746811f3e7b..9df1240653f150 100644 --- a/src/sentry/sentry_apps/external_requests/select_requester.py +++ b/src/sentry/sentry_apps/external_requests/select_requester.py @@ -1,6 +1,7 @@ import logging +from collections.abc import Sequence from dataclasses import dataclass, field -from typing import Any +from typing import Annotated, Any, TypedDict from urllib.parse import urlencode, urlparse, urlunparse from uuid import uuid4 @@ -17,6 +18,12 @@ logger = logging.getLogger("sentry.sentry_apps.external_requests") +class SelectRequesterResult(TypedDict, total=False): + # Each contained Sequence of strings is of length 2 i.e ["label", "value"] + choices: Sequence[Annotated[Sequence[str], 2]] + defaultValue: str + + @dataclass class SelectRequester: """ @@ -34,7 +41,7 @@ class SelectRequester: query: str | None = field(default=None) dependent_data: str | None = field(default=None) - def run(self) -> dict[str, Any]: + def run(self) -> SelectRequesterResult: response: list[dict[str, str]] = [] try: url = self._build_url() @@ -71,7 +78,9 @@ def run(self) -> dict[str, Any]: message = "select-requester.request-failed" logger.info(message, extra=extra) - raise APIError from e + raise APIError( + f"Something went wrong while getting SelectFields from {self.sentry_app.slug}" + ) from e if not self._validate_response(response): logger.info( @@ -85,7 +94,7 @@ def run(self) -> dict[str, Any]: }, ) raise ValidationError( - f"Invalid response format for SelectField in {self.sentry_app} from uri: {self.uri}" + f"Invalid response format for SelectField in {self.sentry_app.slug} from uri: {self.uri}" ) return self._format_response(response) @@ -107,14 +116,16 @@ def _build_url(self) -> str: urlparts[4] = urlencode(query) return str(urlunparse(urlparts)) - def _validate_response(self, resp: list[dict[str, Any]]) -> bool: + # response format must be: + # https://docs.sentry.io/organization/integrations/integration-platform/ui-components/formfield/#uri-response-format + def _validate_response(self, resp: Sequence[dict[str, Any]]) -> bool: return validate(instance=resp, schema_type="select") - def _format_response(self, resp: list[dict[str, Any]]) -> dict[str, Any]: + def _format_response(self, resp: Sequence[dict[str, Any]]) -> SelectRequesterResult: # the UI expects the following form: # choices: [[label, value]] # default: [label, value] - response: dict[str, Any] = {} + response: SelectRequesterResult = {} choices: list[list[str]] = [] for option in resp: diff --git a/src/sentry/sentry_apps/models/sentry_app.py b/src/sentry/sentry_apps/models/sentry_app.py index 2693de9b2c7699..effdbaa0bbabb2 100644 --- a/src/sentry/sentry_apps/models/sentry_app.py +++ b/src/sentry/sentry_apps/models/sentry_app.py @@ -31,7 +31,7 @@ from sentry.hybridcloud.models.outbox import ControlOutbox, outbox_context from sentry.hybridcloud.outbox.category import OutboxCategory, OutboxScope from sentry.models.apiscopes import HasApiScopes -from sentry.types.region import find_all_region_names +from sentry.types.region import find_all_region_names, find_regions_for_sentry_app from sentry.utils import metrics # When a developer selects to receive " Webhooks" it really means @@ -234,6 +234,9 @@ def outboxes_for_update(self) -> list[ControlOutbox]: for region_name in find_all_region_names() ] + def regions_with_installations(self) -> set[str]: + return find_regions_for_sentry_app(self) + def delete(self, *args, **kwargs): from sentry.sentry_apps.models.sentry_app_avatar import SentryAppAvatar diff --git a/src/sentry/sentry_apps/services/app/impl.py b/src/sentry/sentry_apps/services/app/impl.py index 74f3a5fa033484..e266283516b35e 100644 --- a/src/sentry/sentry_apps/services/app/impl.py +++ b/src/sentry/sentry_apps/services/app/impl.py @@ -97,7 +97,7 @@ def get_sentry_app_by_slug(self, *, slug: str) -> RpcSentryApp | None: except SentryApp.DoesNotExist: return None - def get_installed_for_organization( + def get_installations_for_organization( self, *, organization_id: int ) -> list[RpcSentryAppInstallation]: installations = SentryAppInstallation.objects.get_installed_for_organization( diff --git a/src/sentry/sentry_apps/services/app/service.py b/src/sentry/sentry_apps/services/app/service.py index beac279d86361c..7d8d8f466fcc60 100644 --- a/src/sentry/sentry_apps/services/app/service.py +++ b/src/sentry/sentry_apps/services/app/service.py @@ -8,7 +8,7 @@ from typing import Any from sentry.auth.services.auth import AuthenticationContext -from sentry.hybridcloud.rpc.caching.service import back_with_silo_cache +from sentry.hybridcloud.rpc.caching.service import back_with_silo_cache, back_with_silo_cache_list from sentry.hybridcloud.rpc.filter_query import OpaqueSerializedResponse from sentry.hybridcloud.rpc.service import RpcService, rpc_method from sentry.sentry_apps.services.app import ( @@ -60,12 +60,20 @@ def find_installation_by_proxy_user( ) -> RpcSentryAppInstallation | None: pass + def installations_for_organization( + self, *, organization_id: int + ) -> list[RpcSentryAppInstallation]: + """ + Get a list of installations for an organization_id + + This is a cached wrapper around get_installations_for_organization + """ + return get_installations_for_organization(organization_id) + @rpc_method @abc.abstractmethod - def get_installed_for_organization( - self, - *, - organization_id: int, + def get_installations_for_organization( + self, *, organization_id: int ) -> list[RpcSentryAppInstallation]: pass @@ -199,6 +207,13 @@ def get_installation(id: int) -> RpcSentryAppInstallation | None: return app_service.get_installation_by_id(id=id) +@back_with_silo_cache_list( + "app_service.get_installed_for_organization", SiloMode.REGION, RpcSentryAppInstallation +) +def get_installations_for_organization(organization_id: int) -> list[RpcSentryAppInstallation]: + return app_service.get_installations_for_organization(organization_id=organization_id) + + @back_with_silo_cache("app_service.get_by_application_id", SiloMode.REGION, RpcSentryApp) def get_by_application_id(application_id: int) -> RpcSentryApp | None: return app_service.find_service_hook_sentry_app(api_application_id=application_id) diff --git a/src/sentry/sentry_apps/services/app_request/__init__.py b/src/sentry/sentry_apps/services/app_request/__init__.py new file mode 100644 index 00000000000000..2a9746c30ef42c --- /dev/null +++ b/src/sentry/sentry_apps/services/app_request/__init__.py @@ -0,0 +1,2 @@ +from .model import * # noqa +from .service import * # noqa diff --git a/src/sentry/sentry_apps/services/app_request/impl.py b/src/sentry/sentry_apps/services/app_request/impl.py new file mode 100644 index 00000000000000..b791620fff09a2 --- /dev/null +++ b/src/sentry/sentry_apps/services/app_request/impl.py @@ -0,0 +1,30 @@ +from sentry.sentry_apps.services.app import app_service +from sentry.sentry_apps.services.app_request.model import ( + RpcSentryAppRequest, + SentryAppRequestFilterArgs, +) +from sentry.sentry_apps.services.app_request.serial import serialize_rpc_sentry_app_request +from sentry.sentry_apps.services.app_request.service import SentryAppRequestService +from sentry.utils.sentry_apps import SentryAppWebhookRequestsBuffer + + +class DatabaseBackedSentryAppRequestService(SentryAppRequestService): + def get_buffer_requests_for_region( + self, + *, + sentry_app_id: str, + region_name: str, + filter: SentryAppRequestFilterArgs | None = None, + ) -> list[RpcSentryAppRequest] | None: + sentry_app = app_service.get_sentry_app_by_id(id=sentry_app_id) + if not sentry_app: + return None + buffer = SentryAppWebhookRequestsBuffer(sentry_app) + + event = filter.get("event", None) if filter else None + errors_only = filter.get("errors_only", False) if filter else False + + return [ + serialize_rpc_sentry_app_request(req) + for req in buffer.get_requests(event=event, errors_only=errors_only) + ] diff --git a/src/sentry/sentry_apps/services/app_request/model.py b/src/sentry/sentry_apps/services/app_request/model.py new file mode 100644 index 00000000000000..c676f5eeb0af8e --- /dev/null +++ b/src/sentry/sentry_apps/services/app_request/model.py @@ -0,0 +1,21 @@ +from typing_extensions import TypedDict + +from sentry.hybridcloud.rpc import RpcModel + + +class RpcSentryAppOrganization(RpcModel): + name: str + slug: str + + +class RpcSentryAppRequest(RpcModel): + date: str + response_code: int + webhook_url: str + organization_id: int + event_type: str + + +class SentryAppRequestFilterArgs(TypedDict, total=False): + event: str + errors_only: bool diff --git a/src/sentry/sentry_apps/services/app_request/serial.py b/src/sentry/sentry_apps/services/app_request/serial.py new file mode 100644 index 00000000000000..87eff5f677a0c3 --- /dev/null +++ b/src/sentry/sentry_apps/services/app_request/serial.py @@ -0,0 +1,13 @@ +from typing import Any + +from sentry.sentry_apps.services.app_request.model import RpcSentryAppRequest + + +def serialize_rpc_sentry_app_request(request: dict[str, Any]) -> RpcSentryAppRequest: + return RpcSentryAppRequest( + date=request.get("date"), + response_code=request.get("response_code"), + webhook_url=request.get("webhook_url"), + organization_id=request.get("organization_id"), + event_type=request.get("event_type"), + ) diff --git a/src/sentry/sentry_apps/services/app_request/service.py b/src/sentry/sentry_apps/services/app_request/service.py new file mode 100644 index 00000000000000..88567a5ed34e3d --- /dev/null +++ b/src/sentry/sentry_apps/services/app_request/service.py @@ -0,0 +1,36 @@ +import abc + +from sentry.hybridcloud.rpc.resolvers import ByRegionName +from sentry.hybridcloud.rpc.service import RpcService, regional_rpc_method +from sentry.sentry_apps.services.app_request.model import ( + RpcSentryAppRequest, + SentryAppRequestFilterArgs, +) +from sentry.silo.base import SiloMode + + +class SentryAppRequestService(RpcService): + key = "sentry_app_request" + local_mode = SiloMode.REGION + + @classmethod + def get_local_implementation(cls) -> RpcService: + from sentry.sentry_apps.services.app_request.impl import ( + DatabaseBackedSentryAppRequestService, + ) + + return DatabaseBackedSentryAppRequestService() + + @regional_rpc_method(resolve=ByRegionName()) + @abc.abstractmethod + def get_buffer_requests_for_region( + self, + *, + sentry_app_id: str, + region_name: str, + filter: SentryAppRequestFilterArgs | None = None, + ) -> list[RpcSentryAppRequest] | None: + pass + + +app_request_service = SentryAppRequestService.create_delegation() diff --git a/src/sentry/sentry_apps/tasks/__init__.py b/src/sentry/sentry_apps/tasks/__init__.py index 62d7eee5405b7d..26909deb270a05 100644 --- a/src/sentry/sentry_apps/tasks/__init__.py +++ b/src/sentry/sentry_apps/tasks/__init__.py @@ -5,6 +5,7 @@ installation_webhook, process_resource_change_bound, send_alert_event, + send_alert_webhook, send_resource_change_webhook, workflow_notification, ) @@ -20,4 +21,5 @@ "send_resource_change_webhook", "workflow_notification", "process_service_hook", + "send_alert_webhook", ) diff --git a/src/sentry/sentry_apps/tasks/sentry_apps.py b/src/sentry/sentry_apps/tasks/sentry_apps.py index 0d2aef314790fd..62e35c5197cdaf 100644 --- a/src/sentry/sentry_apps/tasks/sentry_apps.py +++ b/src/sentry/sentry_apps/tasks/sentry_apps.py @@ -2,7 +2,7 @@ import logging from collections import defaultdict -from collections.abc import Mapping +from collections.abc import Mapping, Sequence from typing import Any from celery import Task, current_task @@ -13,8 +13,10 @@ from sentry.api.serializers import serialize from sentry.constants import SentryAppInstallationStatus from sentry.db.models.base import Model -from sentry.eventstore.models import Event, GroupEvent +from sentry.eventstore.models import BaseEvent, Event, GroupEvent +from sentry.features.rollout import in_random_rollout from sentry.hybridcloud.rpc.caching import region_caching_service +from sentry.issues.issue_occurrence import IssueOccurrence from sentry.models.activity import Activity from sentry.models.group import Group from sentry.models.organization import Organization @@ -29,10 +31,12 @@ app_service, get_by_application_id, get_installation, + get_installations_for_organization, ) from sentry.shared_integrations.exceptions import ApiHostError, ApiTimeoutError, ClientError from sentry.silo.base import SiloMode from sentry.tasks.base import instrumented_task, retry +from sentry.types.rules import RuleFuture from sentry.users.services.user.model import RpcUser from sentry.users.services.user.service import user_service from sentry.utils import metrics @@ -74,6 +78,8 @@ def _webhook_event_data( event: Event | GroupEvent, group_id: int, project_id: int ) -> dict[str, Any]: + from sentry.api.serializers.rest_framework import convert_dict_key_case, snake_to_camel_case + project = Project.objects.get_from_cache(id=project_id) organization = Organization.objects.get_from_cache(id=project.organization_id) @@ -90,6 +96,10 @@ def _webhook_event_data( "sentry-organization-event-detail", args=[organization.slug, group_id, event.event_id] ) ) + if hasattr(event, "occurrence") and event.occurrence is not None: + event_context["occurrence"] = convert_dict_key_case( + event.occurrence.to_dict(), snake_to_camel_case + ) # The URL has a regex OR in it ("|") which means `reverse` cannot generate # a valid URL (it can't know which option to pick). We have to manually @@ -99,6 +109,104 @@ def _webhook_event_data( return event_context +@instrumented_task(name="sentry.sentry_apps.tasks.sentry_apps.send_alert_webhook", **TASK_OPTIONS) +@retry_decorator +def send_alert_webhook( + rule: str, + sentry_app_id: int, + instance_id: str, + group_id: int, + occurrence_id: str, + additional_payload_key: str | None = None, + additional_payload: Mapping[str, Any] | None = None, + **kwargs: Any, +): + group = Group.objects.get_from_cache(id=group_id) + assert group, "Group must exist to get related attributes" + project = Project.objects.get_from_cache(id=group.project_id) + organization = Organization.objects.get_from_cache(id=project.organization_id) + extra = { + "sentry_app_id": sentry_app_id, + "project_slug": project.slug, + "organization_slug": organization.slug, + "rule": rule, + } + + sentry_app = app_service.get_sentry_app_by_id(id=sentry_app_id) + if sentry_app is None: + logger.info("event_alert_webhook.missing_sentry_app", extra=extra) + return + + installations = app_service.get_many( + filter=dict( + organization_id=organization.id, + app_ids=[sentry_app.id], + status=SentryAppInstallationStatus.INSTALLED, + ) + ) + if not installations: + logger.info("event_alert_webhook.missing_installation", extra=extra) + return + (install,) = installations + + nodedata = nodestore.backend.get( + BaseEvent.generate_node_id(project_id=project.id, event_id=instance_id) + ) + + if not nodedata: + extra = { + "event_id": instance_id, + "occurrence_id": occurrence_id, + "rule": rule, + "sentry_app": sentry_app.slug, + "group_id": group_id, + } + logger.info("send_alert_event.missing_event", extra=extra) + return + + occurrence = None + if occurrence_id: + occurrence = IssueOccurrence.fetch(occurrence_id, project_id=project.id) + + if not occurrence: + logger.info( + "send_alert_event.missing_occurrence", + extra={"occurrence_id": occurrence_id, "project_id": project.id}, + ) + return + + group_event = GroupEvent( + project_id=project.id, + event_id=instance_id, + group=group, + data=nodedata, + occurrence=occurrence, + ) + + event_context = _webhook_event_data(group_event, group.id, project.id) + + data = {"event": event_context, "triggered_rule": rule} + + # Attach extra payload to the webhook + if additional_payload_key and additional_payload: + data[additional_payload_key] = additional_payload + + request_data = AppPlatformEvent( + resource="event_alert", action="triggered", install=install, data=data + ) + + send_and_save_webhook_request(sentry_app, request_data) + + # On success, record analytic event for Alert Rule UI Component + if request_data.data.get("issue_alert"): + analytics.record( + "alert_rule_ui_component_webhook.sent", + organization_id=organization.id, + sentry_app_id=sentry_app_id, + event=f"{request_data.resource}.{request_data.action}", + ) + + @instrumented_task(name="sentry.sentry_apps.tasks.sentry_apps.send_alert_event", **TASK_OPTIONS) @retry_decorator def send_alert_event( @@ -226,9 +334,10 @@ def _process_resource_change( id=Project.objects.get_from_cache(id=instance.project_id).organization_id ) assert org, "organization must exist to get related sentry app installations" - installations: list[RpcSentryAppInstallation] = [ + + installations = [ installation - for installation in app_service.get_installed_for_organization(organization_id=org.id) + for installation in app_service.installations_for_organization(organization_id=org.id) if event in installation.sentry_app.events ] @@ -314,6 +423,10 @@ def clear_region_cache(sentry_app_id: int, region_name: str) -> None: organization_id__in=list(install_map.keys()), region_name=region_name ).values("organization_id") for region_row in region_query: + region_caching_service.clear_key( + key=get_installations_for_organization.key_from(region_row["organization_id"]), + region_name=region_name, + ) installs = install_map[region_row["organization_id"]] for install_id in installs: region_caching_service.clear_key( @@ -420,7 +533,7 @@ def send_resource_change_webhook( metrics.incr("resource_change.processed", sample_rate=1.0, tags={"change_event": event}) -def notify_sentry_app(event: Event | GroupEvent, futures): +def notify_sentry_app(event: GroupEvent, futures: Sequence[RuleFuture]): for f in futures: if not f.kwargs.get("sentry_app"): continue @@ -440,12 +553,22 @@ def notify_sentry_app(event: Event | GroupEvent, futures): "settings": settings, } - send_alert_event.delay( - event=event, - rule=f.rule.label, - sentry_app_id=f.kwargs["sentry_app"].id, - **extra_kwargs, - ) + if in_random_rollout("sentryapps.send_alert_event.use-eventid"): + send_alert_webhook.delay( + instance_id=event.event_id, + group_id=event.group_id, + occurrence_id=event.occurrence_id if hasattr(event, "occurrence_id") else None, + rule=f.rule.label, + sentry_app_id=f.kwargs["sentry_app"].id, + **extra_kwargs, + ) + else: + send_alert_event.delay( + event=event, + rule=f.rule.label, + sentry_app_id=f.kwargs["sentry_app"].id, + **extra_kwargs, + ) def send_webhooks(installation: RpcSentryAppInstallation, event: str, **kwargs: Any) -> None: diff --git a/src/sentry/sentry_metrics/querying/samples_list.py b/src/sentry/sentry_metrics/querying/samples_list.py deleted file mode 100644 index ef305548c71a71..00000000000000 --- a/src/sentry/sentry_metrics/querying/samples_list.py +++ /dev/null @@ -1,1225 +0,0 @@ -from abc import ABC, abstractmethod -from bisect import bisect -from collections.abc import Callable -from dataclasses import dataclass -from datetime import datetime -from typing import Any, Literal, TypedDict, cast - -from snuba_sdk import And, Column, Condition, Function, Op, Or - -from sentry import options -from sentry.api.event_search import SearchFilter, SearchKey, SearchValue -from sentry.search.events.builder.base import BaseQueryBuilder -from sentry.search.events.builder.discover import DiscoverQueryBuilder -from sentry.search.events.builder.metrics_summaries import MetricsSummariesQueryBuilder -from sentry.search.events.builder.spans_indexed import SpansIndexedQueryBuilder -from sentry.search.events.types import QueryBuilderConfig, SelectType, SnubaParams -from sentry.snuba.dataset import Dataset -from sentry.snuba.metrics.naming_layer.mri import ( - SpanMRI, - TransactionMRI, - is_custom_metric, - is_measurement, - parse_mri, -) -from sentry.snuba.referrer import Referrer -from sentry.utils.numbers import clip - - -@dataclass(frozen=True) -class SpanKey: - group: str - timestamp: str - span_id: str - - -class Summary(TypedDict): - min: float - max: float - sum: float - count: int - - -class AbstractSamplesListExecutor(ABC): - # picking 30 samples gives a decent chance to surface a few samples from the higher percentiles - num_samples = 30 - - sortable_columns: set[str] - - def __init__( - self, - *, - mri: str, - snuba_params: SnubaParams, - referrer: Referrer, - fields: list[str], - operation: str | None = None, - query: str | None = None, - min: float | None = None, - max: float | None = None, - sort: str | None = None, - rollup: int | None = None, - ): - self.mri = mri - self.snuba_params = snuba_params - self.fields = fields - self.operation = operation - self.query = query - self.min = min - self.max = max - self.sort = sort - self.rollup = rollup - self.referrer = referrer - - @classmethod - @abstractmethod - def supports_mri(cls, mri: str) -> bool: - raise NotImplementedError - - @classmethod - def supports_sort(cls, column: str) -> bool: - return column in cls.sortable_columns - - @abstractmethod - def get_matching_traces(self, limit: int) -> tuple[list[str], list[datetime]]: - raise NotImplementedError - - @abstractmethod - def get_matching_spans_from_traces( - self, - trace_ids: list[str], - max_spans_per_trace: int, - ) -> list[SpanKey]: - raise NotImplementedError - - def get_matching_spans(self, offset, limit): - assert self.rollup is not None - - if self.sort is None: - execute_fn = self.get_matching_spans_unsorted - else: - execute_fn = self.get_matching_spans_sorted - return execute_fn(offset, limit) - - @abstractmethod - def get_matching_spans_sorted(self, offset, limit): - raise NotImplementedError - - @abstractmethod - def get_matching_spans_unsorted(self, offset, limit): - raise NotImplementedError - - def get_spans_by_key( - self, - span_keys: list[SpanKey], - additional_fields: list[str] | None = None, - ): - if not span_keys: - return {"data": []} - - fields = self.fields[:] - if additional_fields is not None: - fields.extend(additional_fields) - - builder = SpansIndexedQueryBuilder( - Dataset.SpansIndexed, - params={}, - snuba_params=self.snuba_params, - selected_columns=fields, - limit=len(span_keys), - offset=0, - ) - - # This are the additional conditions to better take advantage of the ORDER BY - # on the spans table. This creates a list of conditions to be `OR`ed together - # that can will be used by ClickHouse to narrow down the granules. - # - # The span ids are not in this condition because they are more effective when - # specified within the `PREWHERE` clause. So, it's in a separate condition. - conditions = [ - And( - [ - Condition(builder.column("span.group"), Op.EQ, key.group), - Condition( - builder.column("timestamp"), Op.EQ, datetime.fromisoformat(key.timestamp) - ), - ] - ) - for key in span_keys - ] - - if len(conditions) == 1: - order_by_condition = conditions[0] - else: - order_by_condition = Or(conditions) - - # Using `IN` combined with putting the list in a SnQL "tuple" triggers an optimizer - # in snuba where it - # 1. moves the condition into the `PREWHERE` clause - # 2. maps the ids to the underlying UInt64 and uses the bloom filter index - # - # NOTE: the "tuple" here is critical as without it, snuba will not correctly - # rewrite the condition and keep it in the WHERE and as a hexidecimal. - span_id_condition = Condition( - builder.column("id"), - Op.IN, - Function("tuple", [key.span_id for key in span_keys]), - ) - - builder.add_conditions([order_by_condition, span_id_condition]) - - query_results = builder.run_query(self.referrer.value) - return builder.process_results(query_results) - - -class SegmentsSamplesListExecutor(AbstractSamplesListExecutor): - sortable_columns = {"timestamp", "span.duration", "summary"} - - SORT_MAPPING = { - "span.duration": "transaction.duration", - "timestamp": "timestamp", - } - - @classmethod - @abstractmethod - def mri_to_column(cls, mri: str) -> str | None: - raise NotImplementedError - - @classmethod - def convert_sort(cls, sort: str, mri: str) -> tuple[Literal["", "-"], str] | None: - direction: Literal["", "-"] = "" - - if sort.startswith("-"): - direction = "-" - sort = sort[1:] - - if sort in cls.SORT_MAPPING: - return direction, cls.SORT_MAPPING[sort] - - if sort == "summary": - column = cls.mri_to_column(mri) - if column is not None: - return direction, column - - return None - - @classmethod - def supports_mri(cls, mri: str) -> bool: - return cls.mri_to_column(mri) is not None - - def get_matching_traces(self, limit: int) -> tuple[list[str], list[datetime]]: - column = self.mri_to_column(self.mri) - assert column - - builder = SpansIndexedQueryBuilder( - Dataset.Transactions, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["trace", "timestamp"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=limit, - limitby=("trace", 1), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - trace_ids = [row["trace"] for row in results["data"]] - timestamps = [datetime.fromisoformat(row["timestamp"]) for row in results["data"]] - return trace_ids, timestamps - - def get_matching_spans_from_traces( - self, - trace_ids: list[str], - max_spans_per_trace: int, - ) -> list[SpanKey]: - column = self.mri_to_column(self.mri) - assert column is not None - - builder = SpansIndexedQueryBuilder( - Dataset.Transactions, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["timestamp", "span_id"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=len(trace_ids) * max_spans_per_trace, - limitby=("trace", max_spans_per_trace), - ) - - trace_id_condition = Condition(Column("trace_id"), Op.IN, trace_ids) - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - builder.add_conditions( - [ - trace_id_condition, - *additional_conditions, - *min_max_conditions, - ] - ) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - return [ - SpanKey( - group="00", # all segments have a group of `00` currently - timestamp=row["timestamp"], - span_id=row["span_id"], - ) - for row in results["data"] - ] - - def _get_spans( - self, - span_keys: list[SpanKey], - summaries: dict[str, Summary], - ): - result = self.get_spans_by_key( - span_keys, - # force `id` to be one of the fields - additional_fields=["id"], - ) - - # if there is a sort, we want to preserve the result in the same - # order as the span keys which we can do by checking the span ids - if self.sort: - order = {key.span_id: i for i, key in enumerate(span_keys)} - result["data"].sort(key=lambda row: order[row["id"]]) - - # if `id` wasn't initially there, we should remove it - should_pop_id = "id" not in self.fields - - for row in result["data"]: - span_id = row.pop("id") if should_pop_id else row["id"] - row["summary"] = summaries[span_id] - - return result - - def get_matching_spans_sorted(self, offset, limit): - span_keys, summaries = self.get_sorted_span_keys(offset, limit) - return self._get_spans(span_keys, summaries) - - def get_sorted_span_keys( - self, - offset: int, - limit: int, - ) -> tuple[list[SpanKey], dict[str, Summary]]: - """ - When getting examples for a segment, it's actually much faster to read it - from the transactions dataset compared to the spans dataset as it's a much - smaller dataset. - - One consideration here is that there is an one to one mapping between a - transaction to a segment today. If this relationship changes, we'll have to - rethink how to fetch segment samples a little as the transactions dataset - may not contain all the necessary data. - """ - assert self.sort - sort = self.convert_sort(self.sort, self.mri) - assert sort is not None - direction, sort_column = sort - - mri_column = self.mri_to_column(self.mri) - assert mri_column is not None - - fields = ["span_id", "timestamp"] - if sort_column not in fields: - fields.append(sort_column) - if mri_column not in fields: - fields.append(mri_column) - - builder = DiscoverQueryBuilder( - Dataset.Transactions, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=fields, - orderby=f"{direction}{sort_column}", - limit=limit, - offset=offset, - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.column(mri_column)) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - span_keys = [ - SpanKey( - group="00", # all segments have a group of `00` currently - timestamp=row["timestamp"], - span_id=row["span_id"], - ) - for row in result["data"] - ] - - """ - Because transaction level measurements currently do not get - propagated to the spans dataset, we have to query them here, - generate the summary for it here, and propagate it to the - results of the next stage. - - Once we start writing transaction level measurements to the - indexed spans dataset, we can stop doing this and read the - value directly from the indexed spans dataset. - - For simplicity, all transaction based metrics use this approach. - """ - summaries = { - cast(str, row["span_id"]): cast( - Summary, - { - "min": row[mri_column], - "max": row[mri_column], - "sum": row[mri_column], - "count": 1, - }, - ) - for row in result["data"] - } - - return span_keys, summaries - - def get_matching_spans_unsorted(self, offset, limit): - span_keys, summaries = self.get_unsorted_span_keys(offset, limit) - return self._get_spans(span_keys, summaries) - - def get_unsorted_span_keys( - self, - offset: int, - limit: int, - ) -> tuple[list[SpanKey], dict[str, Summary]]: - """ - When getting examples for a segment, it's actually much faster to read it - from the transactions dataset compared to the spans dataset as it's a much - smaller dataset. - - One consideration here is that there is an one to one mapping between a - transaction to a segment today. If this relationship changes, we'll have to - rethink how to fetch segment samples a little as the transactions dataset - may not contain all the necessary data. - """ - column = self.mri_to_column(self.mri) - assert column is not None - - builder = DiscoverQueryBuilder( - Dataset.Transactions, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=[ - f"rounded_timestamp({self.rollup})", - f"examples({column}, {self.num_samples}) AS examples", - ], - limit=limit, - offset=offset, - sample_rate=options.get("metrics.sample-list.sample-rate"), - config=QueryBuilderConfig(functions_acl=["rounded_timestamp", "examples"]), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.column(column)) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - metric_key = lambda example: example[2] # sort by metric - for row in result["data"]: - row["examples"] = pick_samples(row["examples"], metric_key=metric_key) - - span_keys = [ - SpanKey( - group="00", # all segments have a group of `00` currently - timestamp=example[0], - span_id=example[1], - ) - for row in result["data"] - for example in row["examples"] - ][:limit] - - """ - Because transaction level measurements currently do not get - propagated to the spans dataset, we have to query them here, - generate the summary for it here, and propagate it to the - results of the next stage. - - Once we start writing transaction level measurements to the - indexed spans dataset, we can stop doing this and read the - value directly from the indexed spans dataset. - - For simplicity, all transaction based metrics use this approach. - """ - summaries = { - cast(str, example[1]): cast( - Summary, - { - "min": example[2], - "max": example[2], - "sum": example[2], - "count": 1, - }, - ) - for row in result["data"] - for example in row["examples"] - } - - return span_keys, summaries - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - raise NotImplementedError - - def get_min_max_conditions(self, column: Column) -> list[Condition]: - conditions = [] - - if self.min is not None: - conditions.append(Condition(column, Op.GTE, self.min)) - if self.max is not None: - conditions.append(Condition(column, Op.LTE, self.max)) - - return conditions - - -class TransactionDurationSamplesListExecutor(SegmentsSamplesListExecutor): - @classmethod - def mri_to_column(cls, mri: str) -> str | None: - if mri == TransactionMRI.DURATION.value: - # Because we read this from the transactions dataset, - # we use the name for the transactions dataset instead. - return "transaction.duration" - return None - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - return [] - - -class TransactionMeasurementsSamplesListExecutor(SegmentsSamplesListExecutor): - @classmethod - def mri_to_column(cls, mri) -> str | None: - name = cls.mri_to_measurement_name(mri) - if name is not None: - return f"measurements.{name}" - - return None - - @classmethod - def mri_to_measurement_name(cls, mri) -> str | None: - parsed_mri = parse_mri(mri) - if parsed_mri is not None and is_measurement(parsed_mri): - return parsed_mri.name[len("measurements:") :] - return None - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - name = self.mri_to_measurement_name(self.mri) - return [Condition(Function("has", [Column("measurements.key"), name]), Op.EQ, 1)] - - -class SpansSamplesListExecutor(AbstractSamplesListExecutor): - sortable_columns = {"timestamp", "span.duration", "span.self_time", "summary"} - - @classmethod - @abstractmethod - def mri_to_column(cls, mri) -> str | None: - raise NotImplementedError - - @classmethod - def convert_sort(cls, sort: str, mri: str) -> tuple[Literal["", "-"], str] | None: - direction: Literal["", "-"] = "" - - if sort.startswith("-"): - direction = "-" - sort = sort[1:] - - if sort == "summary": - column = cls.mri_to_column(mri) - if column is not None: - return direction, column - - if sort in cls.sortable_columns: - return direction, sort - - return None - - @classmethod - def supports_mri(cls, mri: str) -> bool: - return cls.mri_to_column(mri) is not None - - def get_matching_traces(self, limit: int) -> tuple[list[str], list[datetime]]: - column = self.mri_to_column(self.mri) - assert column is not None - - builder = SpansIndexedQueryBuilder( - Dataset.SpansIndexed, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["trace", "timestamp"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=limit, - limitby=("trace", 1), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - trace_ids = [row["trace"] for row in results["data"]] - timestamps = [datetime.fromisoformat(row["timestamp"]) for row in results["data"]] - return trace_ids, timestamps - - def get_matching_spans_from_traces( - self, - trace_ids: list[str], - max_spans_per_trace: int, - ) -> list[SpanKey]: - column = self.mri_to_column(self.mri) - assert column is not None - - builder = SpansIndexedQueryBuilder( - Dataset.SpansIndexed, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["span.group", "timestamp", "id"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=len(trace_ids) * max_spans_per_trace, - limitby=("trace", max_spans_per_trace), - ) - - trace_id_condition = Condition(Column("trace_id"), Op.IN, trace_ids) - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - builder.add_conditions( - [ - trace_id_condition, - *additional_conditions, - *min_max_conditions, - ] - ) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - return [ - SpanKey( - group=row["span.group"], - timestamp=row["timestamp"], - span_id=row["id"], - ) - for row in results["data"] - ] - - def get_matching_spans_sorted(self, offset, limit): - """ - Since we're already querying the spans table sorted on some column, - there's no reason to split this into 2 queries. We can go ahead and - just do it all in a single query. - """ - assert self.sort - sort = self.convert_sort(self.sort, self.mri) - assert sort is not None - direction, sort_column = sort - - fields = self.fields[:] - if sort_column not in fields: - fields.append(sort_column) - - column = self.mri_to_column(self.mri) - assert column is not None - if column not in fields: - fields.append(column) - - builder = SpansIndexedQueryBuilder( - Dataset.SpansIndexed, - params={}, - snuba_params=self.snuba_params, - selected_columns=fields, - orderby=f"{direction}{sort_column}", - limit=limit, - offset=0, - ) - - additional_conditions = self.get_additional_conditions(builder) - - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - should_pop_column = column not in self.fields - - for row in result["data"]: - value = row.pop(column) if should_pop_column else row[column] - row["summary"] = { - "min": value, - "max": value, - "sum": value, - "count": 1, - } - - return result - - def get_matching_spans_unsorted(self, offset, limit): - span_keys = self.get_unsorted_span_keys(offset, limit) - - column = self.mri_to_column(self.mri) - assert column is not None # should always resolve to a column here - - result = self.get_spans_by_key(span_keys, additional_fields=[column]) - - should_pop_column = column not in self.fields - - for row in result["data"]: - value = row.pop(column) if should_pop_column else row[column] - row["summary"] = { - "min": value, - "max": value, - "sum": value, - "count": 1, - } - - return result - - def get_unsorted_span_keys(self, offset: int, limit: int) -> list[SpanKey]: - column = self.mri_to_column(self.mri) - - for dataset_segmentation_condition_fn in self.dataset_segmentation_conditions(): - builder = SpansIndexedQueryBuilder( - Dataset.SpansIndexed, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=[ - f"rounded_timestamp({self.rollup})", - f"examples({column}, {self.num_samples}) AS examples", - ], - limit=limit, - offset=offset, - sample_rate=options.get("metrics.sample-list.sample-rate"), - config=QueryBuilderConfig(functions_acl=["rounded_timestamp", "examples"]), - ) - - segmentation_conditions = dataset_segmentation_condition_fn(builder) - - additional_conditions = self.get_additional_conditions(builder) - - assert column is not None - min_max_conditions = self.get_min_max_conditions(builder.resolve_column(column)) - - builder.add_conditions( - [ - *segmentation_conditions, - *additional_conditions, - *min_max_conditions, - ] - ) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - if not result["data"]: - continue - - metric_key = lambda example: example[3] # sort by metric - for row in result["data"]: - row["examples"] = pick_samples(row["examples"], metric_key=metric_key) - - return [ - SpanKey( - group=example[0], - timestamp=example[1], - span_id=example[2], - ) - for row in result["data"] - for example in row["examples"] - ][:limit] - - return [] - - @abstractmethod - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - raise NotImplementedError - - def dataset_segmentation_conditions( - self, - ) -> list[Callable[[BaseQueryBuilder], list[Condition]]]: - return [lambda builder: []] - - def get_min_max_conditions(self, column: SelectType) -> list[Condition]: - conditions = [] - - if self.min is not None: - conditions.append(Condition(column, Op.GTE, self.min)) - if self.max is not None: - conditions.append(Condition(column, Op.LTE, self.max)) - - return conditions - - -class SpansTimingsSamplesListExecutor(SpansSamplesListExecutor): - MRI_MAPPING = { - SpanMRI.DURATION.value: "span.duration", - SpanMRI.SELF_TIME.value: "span.self_time", - } - - @classmethod - def mri_to_column(cls, mri) -> str | None: - return cls.MRI_MAPPING.get(mri) - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - return [] - - def dataset_segmentation_conditions( - self, - ) -> list[Callable[[BaseQueryBuilder], list[Condition]]]: - return [ - # This grouping makes the assumption that spans are divided into 2 groups right now. - # Those that are classified with a non zero group, and those that are unclassified - # with a zero group. - # - # In the future, if all span groups are classified, this segmentation should change - # to reflect that. - lambda builder: [ - # The `00` group is used for spans not used within the - # new starfish experience. It's effectively the group - # for other. It is a massive group, so we've chosen - # to exclude it here. - Condition(builder.column("span.group"), Op.NEQ, "00"), - ], - lambda builder: [ - # If the previous query contained no results, we'll - # have to search the `00` group which is slower but - # unfortunately necessary here. - Condition(builder.column("span.group"), Op.EQ, "00"), - ], - ] - - -class SpansMeasurementsSamplesListExecutor(SpansSamplesListExecutor): - # These are some hard coded metrics in the spans name space that can be - # queried in the measurements of the indexed spans dataset - MRI_MAPPING = { - SpanMRI.RESPONSE_CONTENT_LENGTH.value: "http.response_content_length", - SpanMRI.DECODED_RESPONSE_CONTENT_LENGTH.value: "http.decoded_response_content_length", - SpanMRI.RESPONSE_TRANSFER_SIZE.value: "http.response_transfer_size", - SpanMRI.AI_TOTAL_TOKENS.value: "ai_total_tokens_used", - SpanMRI.AI_TOTAL_COST.value: "ai_total_cost", - SpanMRI.CACHE_ITEM_SIZE.value: "cache.item_size", - SpanMRI.MOBILE_SLOW_FRAMES.value: "frames.slow", - SpanMRI.MOBILE_FROZEN_FRAMES.value: "frames.frozen", - SpanMRI.MOBILE_TOTAL_FRAMES.value: "frames.total", - SpanMRI.MOBILE_FRAMES_DELAY.value: "frames.delay", - SpanMRI.MESSAGE_RECEIVE_LATENCY.value: "messaging.message.receive.latency", - } - - @classmethod - def mri_to_column(cls, mri) -> str | None: - name = cls.mri_measurement_name(mri) - if name is not None: - return f"measurements.{name}" - - return None - - @classmethod - def mri_measurement_name(cls, mri) -> str | None: - if name := cls.MRI_MAPPING.get(mri): - return name - - # some web vitals exist on spans - parsed_mri = parse_mri(mri) - if ( - parsed_mri is not None - and parsed_mri.namespace == "spans" - and parsed_mri.name.startswith("webvital.") - ): - return parsed_mri.name[len("webvital:") :] - - return None - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - name = self.mri_measurement_name(self.mri) - return [Condition(Function("has", [Column("measurements.key"), name]), Op.EQ, 1)] - - -class CustomSamplesListExecutor(AbstractSamplesListExecutor): - sortable_columns = {"timestamp", "span.duration", "summary"} - - SORT_MAPPING = { - "span.duration": "span.duration", - "timestamp": "timestamp", - } - - OPERATION_COLUMN_MAPPING = { - "min": "min_metric", - "max": "max_metric", - "count": "count_metric", - } - - # refer to the definition of `examples()` in the metrics summary dataset - EXAMPLES_SORT_KEY = { - "min": 3, - "max": 4, - "count": 6, - } - - @classmethod - def convert_sort(cls, sort: str, operation: str | None) -> tuple[Literal["", "-"], str] | None: - direction: Literal["", "-"] = "" - - if sort.startswith("-"): - direction = "-" - sort = sort[1:] - - if sort in cls.SORT_MAPPING: - return direction, cls.SORT_MAPPING[sort] - - if sort == "summary": - return direction, cls.OPERATION_COLUMN_MAPPING.get(operation or "", "avg_metric") - - return None - - @classmethod - def supports_mri(cls, mri: str) -> bool: - parsed_mri = parse_mri(mri) - if parsed_mri is not None and is_custom_metric(parsed_mri): - return True - return False - - def get_matching_traces(self, limit: int) -> tuple[list[str], list[datetime]]: - builder = MetricsSummariesQueryBuilder( - Dataset.MetricsSummaries, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["trace", "timestamp"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=limit, - limitby=("trace", 1), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - trace_ids = [row["trace"] for row in results["data"]] - timestamps = [datetime.fromisoformat(row["timestamp"]) for row in results["data"]] - return trace_ids, timestamps - - def get_matching_spans_from_traces( - self, - trace_ids: list[str], - max_spans_per_trace: int, - ) -> list[SpanKey]: - builder = MetricsSummariesQueryBuilder( - Dataset.MetricsSummaries, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=["span.group", "timestamp", "id"], - # The orderby is intentionally `None` here as this query is much faster - # if we let Clickhouse decide which order to return the results in. - # This also means we cannot order by any columns or paginate. - orderby=None, - limit=len(trace_ids) * max_spans_per_trace, - limitby=("trace", max_spans_per_trace), - ) - - trace_id_condition = Condition(Column("trace_id"), Op.IN, trace_ids) - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder) - builder.add_conditions( - [ - trace_id_condition, - *additional_conditions, - *min_max_conditions, - ] - ) - - query_results = builder.run_query(self.referrer.value) - results = builder.process_results(query_results) - - return [ - SpanKey( - group=row["span.group"], - timestamp=row["timestamp"], - span_id=row["id"], - ) - for row in results["data"] - ] - - def _get_spans( - self, - span_keys: list[SpanKey], - summaries: dict[str, Summary], - ): - result = self.get_spans_by_key(span_keys, additional_fields=["id"]) - - # if there is a sort, we want to preserve the result in the same - # order as the span keys which we can do by checking the span ids - if self.sort: - order = {key.span_id: i for i, key in enumerate(span_keys)} - result["data"].sort(key=lambda row: order[row["id"]]) - - should_pop_id = "id" not in self.fields - - for row in result["data"]: - span_id = row.pop("id") if should_pop_id else row["id"] - row["summary"] = summaries[span_id] - - return result - - def get_matching_spans_sorted(self, offset, limit): - span_keys, summaries = self.get_sorted_span_keys(offset, limit) - return self._get_spans(span_keys, summaries) - - def get_sorted_span_keys( - self, - offset: int, - limit: int, - ) -> tuple[list[SpanKey], dict[str, Summary]]: - assert self.sort - sort = self.convert_sort(self.sort, self.operation) - assert sort is not None - direction, sort_column = sort - - fields = [ - "id", - "timestamp", - "span.group", - "min_metric", - "max_metric", - "sum_metric", - "count_metric", - ] - if sort_column not in fields: - fields.append(sort_column) - - builder = MetricsSummariesQueryBuilder( - Dataset.MetricsSummaries, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=fields, - orderby=f"{direction}{sort_column}", - limit=limit, - offset=offset, - # This table has a poor SAMPLE BY so DO NOT use it for now - # sample_rate=options.get("metrics.sample-list.sample-rate"), - config=QueryBuilderConfig(functions_acl=["rounded_timestamp", "example"]), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - span_keys = [ - SpanKey( - group=row["span.group"], - timestamp=row["timestamp"], - span_id=row["id"], - ) - for row in result["data"] - ] - - """ - The indexed spans dataset does not contain any metric related - data. To propagate these values, we read it from the metric - summaries table, and copy them to the results in the next step. - """ - summaries = { - cast(str, row["id"]): cast( - Summary, - { - "min": row["min_metric"], - "max": row["max_metric"], - "sum": row["sum_metric"], - "count": row["count_metric"], - }, - ) - for row in result["data"] - } - - return span_keys, summaries - - def get_matching_spans_unsorted(self, offset, limit): - span_keys, summaries = self.get_unsorted_span_keys(offset, limit) - return self._get_spans(span_keys, summaries) - - def get_unsorted_span_keys( - self, - offset: int, - limit: int, - ) -> tuple[list[SpanKey], dict[str, Summary]]: - builder = MetricsSummariesQueryBuilder( - Dataset.MetricsSummaries, - params={}, - snuba_params=self.snuba_params, - query=self.query, - selected_columns=[ - f"rounded_timestamp({self.rollup})", - f"examples({self.num_samples}) AS examples", - ], - limit=limit, - offset=offset, - # This table has a poor SAMPLE BY so DO NOT use it for now - # sample_rate=options.get("metrics.sample-list.sample-rate"), - config=QueryBuilderConfig(functions_acl=["rounded_timestamp", "examples"]), - ) - - additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions(builder) - builder.add_conditions([*additional_conditions, *min_max_conditions]) - - query_results = builder.run_query(self.referrer.value) - result = builder.process_results(query_results) - - # 7 here refers to the avg value which is the default - # if the operaton doesn't have metric it should sort by - index = self.EXAMPLES_SORT_KEY.get(self.operation or "", 7) # sort by metric - metric_key = lambda example: example[index] - - for row in result["data"]: - row["examples"] = pick_samples(row["examples"], metric_key=metric_key) - - span_keys = [ - SpanKey( - group=example[0], - timestamp=example[1], - span_id=example[2], - ) - for row in result["data"] - for example in row["examples"] - ][:limit] - - """ - The indexed spans dataset does not contain any metric related - data. To propagate these values, we read it from the metric - summaries table, and copy them to the results in the next step. - """ - summaries = { - cast(str, example[2]): cast( - Summary, - { - "min": example[3], - "max": example[4], - "sum": example[5], - "count": example[6], - }, - ) - for row in result["data"] - for example in row["examples"] - } - - return span_keys, summaries - - def get_additional_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - return [ - builder.convert_search_filter_to_condition( - SearchFilter(SearchKey("metric"), "=", SearchValue(self.mri)), - ) - ] - - def get_min_max_conditions(self, builder: BaseQueryBuilder) -> list[Condition]: - conditions = [] - - column = builder.resolve_column( - self.OPERATION_COLUMN_MAPPING.get(self.operation or "", "avg_metric") - ) - - if self.min is not None: - conditions.append(Condition(column, Op.GTE, self.min)) - if self.max is not None: - conditions.append(Condition(column, Op.LTE, self.max)) - - return conditions - - -SAMPLE_LIST_EXECUTORS = [ - TransactionDurationSamplesListExecutor, - TransactionMeasurementsSamplesListExecutor, - SpansTimingsSamplesListExecutor, - SpansMeasurementsSamplesListExecutor, - CustomSamplesListExecutor, -] - - -def get_sample_list_executor_cls(mri) -> type[AbstractSamplesListExecutor] | None: - for executor_cls in SAMPLE_LIST_EXECUTORS: - if executor_cls.supports_mri(mri): - return executor_cls - return None - - -def pick_samples( - samples: list[Any], - metric_key: Callable[[Any], float], -) -> list[Any]: - # if there are at most 3 samples, there's no picking needed - # as we want to return at most 3 from the list provided - if len(samples) <= 3: - return samples - - samples.sort(key=metric_key) - - keys = [metric_key(sample) for sample in samples] - - # first element is the one near the average - # but must not be the first or last element - avg_m = sum(keys) / len(keys) - idx_m = bisect(keys, avg_m) - # ensure there is at least 1 element on both sides - # of the middle element we just picked - # i.e. should not pick index 0 and len(keys) - 1 - idx_m = clip(idx_m, 1, len(keys) - 2) - - # second element is near the average of first - # split, but must not be the split element - avg_l = sum(keys[:idx_m]) / idx_m - idx_l = bisect(keys, avg_l, hi=idx_m - 1) - idx_l += 1 # push it closer to the middle - # ensure this is not the same as middle element - idx_l = clip(idx_l, 0, idx_m - 1) - - # third element is near the average of second - # split, but must not be the split element - avg_r = sum(keys[idx_m + 1 :]) / (len(keys) - idx_m - 1) - idx_r = bisect(keys, avg_r, lo=idx_m + 1) - idx_r -= 1 # push it closer to the middle - # ensure this is not the same as middle element - idx_r = clip(idx_r, idx_m + 1, len(keys) - 1) - - return [samples[idx_m], samples[idx_l], samples[idx_r]] diff --git a/src/sentry/snuba/dataset.py b/src/sentry/snuba/dataset.py index 799029ea0e7b19..e2ab7d47fd87f5 100644 --- a/src/sentry/snuba/dataset.py +++ b/src/sentry/snuba/dataset.py @@ -54,12 +54,6 @@ class Dataset(Enum): EventsAnalyticsPlatform = "events_analytics_platform" - MetricsSummaries = "metrics_summaries" - """ - Summaries of all metrics within a span. Used to correlate indexed - spans to a metric. - """ - @unique class EntityKey(Enum): @@ -79,7 +73,6 @@ class EntityKey(Enum): GenericOrgMetricsCounters = "generic_org_metrics_counters" IssuePlatform = "search_issues" Functions = "functions" - MetricsSummaries = "metrics_summaries" @unique diff --git a/src/sentry/snuba/discover.py b/src/sentry/snuba/discover.py index 99e57559448d85..dbc94b6c85af38 100644 --- a/src/sentry/snuba/discover.py +++ b/src/sentry/snuba/discover.py @@ -20,12 +20,7 @@ TimeseriesQueryBuilder, TopEventsQueryBuilder, ) -from sentry.search.events.fields import ( - FIELD_ALIASES, - get_function_alias, - get_json_meta_type, - is_function, -) +from sentry.search.events.fields import FIELD_ALIASES, get_function_alias, is_function from sentry.search.events.types import ( EventsResponse, HistogramParams, @@ -312,6 +307,7 @@ def timeseries_query( dataset: Dataset = Dataset.Discover, query_source: QuerySource | None = None, fallback_to_transactions: bool = False, + transform_alias_to_input_format: bool = False, ) -> SnubaTSResult: """ High-level API for doing arbitrary user timeseries queries against events. @@ -337,6 +333,8 @@ def timeseries_query( allow_metric_aggregates - Ignored here, only used in metric enhanced performance fallback_to_transactions - Whether to fallback to the transactions dataset if the query fails in metrics enhanced requests. To be removed once the discover dataset is split. + transform_alias_to_input_format - Whether aggregate columns should be returned in the originally + requested function format. """ assert dataset in [ Dataset.Discover, @@ -356,6 +354,7 @@ def timeseries_query( config=QueryBuilderConfig( functions_acl=functions_acl, has_metrics=has_metrics, + transform_alias_to_input_format=transform_alias_to_input_format, ), ) query_list = [base_builder] @@ -411,20 +410,10 @@ def timeseries_query( compared_value = compared_row.get(col_name, 0) row["comparisonCount"] = compared_value - result = results[0] + result = base_builder.process_results(results[0]) return SnubaTSResult( - { - "data": result["data"], - "meta": { - "fields": { - value["name"]: get_json_meta_type( - value["name"], value.get("type"), base_builder - ) - for value in result["meta"] - } - }, - }, + {"data": result["data"], "meta": result["meta"]}, snuba_params.start_date, snuba_params.end_date, rollup, diff --git a/src/sentry/snuba/entity_subscription.py b/src/sentry/snuba/entity_subscription.py index e4731e6baf73ef..08c7319179d86f 100644 --- a/src/sentry/snuba/entity_subscription.py +++ b/src/sentry/snuba/entity_subscription.py @@ -213,6 +213,7 @@ def build_query_builder( skip_time_conditions=True, parser_config_overrides=parser_config_overrides, skip_field_validation_for_entity_subscription_deletion=skip_field_validation_for_entity_subscription_deletion, + use_entity_prefix_for_fields=True, ), ) @@ -671,6 +672,9 @@ def get_entity_key_from_snuba_query( project_id: int, skip_field_validation_for_entity_subscription_deletion: bool = False, ) -> EntityKey: + query_dataset = Dataset(snuba_query.dataset) + if query_dataset == Dataset.EventsAnalyticsPlatform: + return EntityKey.EAPSpans entity_subscription = get_entity_subscription_from_snuba_query( snuba_query, organization_id, diff --git a/src/sentry/snuba/errors.py b/src/sentry/snuba/errors.py index 6e3de25c26257c..8466a1f40c2c78 100644 --- a/src/sentry/snuba/errors.py +++ b/src/sentry/snuba/errors.py @@ -14,7 +14,6 @@ ErrorsTimeseriesQueryBuilder, ErrorsTopEventsQueryBuilder, ) -from sentry.search.events.fields import get_json_meta_type from sentry.search.events.types import EventsResponse, QueryBuilderConfig, SnubaParams from sentry.snuba.dataset import Dataset from sentry.snuba.discover import OTHER_KEY, create_result_key, transform_tips, zerofill @@ -105,6 +104,7 @@ def timeseries_query( on_demand_metrics_type: MetricSpecType | None = None, query_source: QuerySource | None = None, fallback_to_transactions: bool = False, + transform_alias_to_input_format: bool = False, ): with sentry_sdk.start_span(op="errors", name="timeseries.filter_transform"): @@ -121,6 +121,7 @@ def timeseries_query( functions_acl=functions_acl, has_metrics=has_metrics, parser_config_overrides=PARSER_CONFIG_OVERRIDES, + transform_alias_to_input_format=transform_alias_to_input_format, ), ) query_list = [base_builder] @@ -175,19 +176,12 @@ def timeseries_query( cmp_result_val = cmp_result.get(col_name, 0) result["comparisonCount"] = cmp_result_val - result = results[0] + result = base_builder.process_results(results[0]) return SnubaTSResult( { "data": result["data"], - "meta": { - "fields": { - value["name"]: get_json_meta_type( - value["name"], value.get("type"), base_builder - ) - for value in result["meta"] - } - }, + "meta": result["meta"], }, snuba_params.start_date, snuba_params.end_date, diff --git a/src/sentry/snuba/events.py b/src/sentry/snuba/events.py index 3b201dc4dcf1d8..24718ed5a41150 100644 --- a/src/sentry/snuba/events.py +++ b/src/sentry/snuba/events.py @@ -601,20 +601,20 @@ class Columns(Enum): alias="app.in_foreground", ) OS_DISTRIBUTION_NAME = Column( - group_name="events.contexts[os.distribution.name]", - event_name="contexts[os.distribution.name]", - transaction_name="contexts[os.distribution.name]", - discover_name="contexts[os.distribution.name]", - issue_platform_name="contexts[os.distribution.name]", - alias="os.distribution.name", + group_name="events.contexts[os.distribution_name]", + event_name="contexts[os.distribution_name]", + transaction_name="contexts[os.distribution_name]", + discover_name="contexts[os.distribution_name]", + issue_platform_name="contexts[os.distribution_name]", + alias="os.distribution_name", ) OS_DISTRIBUTION_VERSION = Column( - group_name="events.contexts[os.distribution.version]", - event_name="contexts[os.distribution.version]", - transaction_name="contexts[os.distribution.version]", - discover_name="contexts[os.distribution.version]", - issue_platform_name="contexts[os.distribution.version]", - alias="os.distribution.version", + group_name="events.contexts[os.distribution_version]", + event_name="contexts[os.distribution_version]", + transaction_name="contexts[os.distribution_version]", + discover_name="contexts[os.distribution_version]", + issue_platform_name="contexts[os.distribution_version]", + alias="os.distribution_version", ) # Transactions specific columns TRANSACTION_OP = Column( diff --git a/src/sentry/snuba/functions.py b/src/sentry/snuba/functions.py index 8c17391226de11..c5641706f9f449 100644 --- a/src/sentry/snuba/functions.py +++ b/src/sentry/snuba/functions.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +from datetime import timedelta from typing import Any import sentry_sdk @@ -10,7 +10,6 @@ ProfileFunctionsTimeseriesQueryBuilder, ProfileTopFunctionsTimeseriesQueryBuilder, ) -from sentry.search.events.fields import get_json_meta_type from sentry.search.events.types import QueryBuilderConfig, SnubaParams from sentry.snuba.dataset import Dataset from sentry.snuba.discover import transform_tips, zerofill @@ -82,7 +81,7 @@ def timeseries_query( rollup: int, referrer: str = "", zerofill_results: bool = True, - comparison_delta: datetime | None = None, + comparison_delta: timedelta | None = None, functions_acl: list[str] | None = None, allow_metric_aggregates: bool = False, has_metrics: bool = False, @@ -91,6 +90,7 @@ def timeseries_query( on_demand_metrics_type: MetricSpecType | None = None, query_source: QuerySource | None = None, fallback_to_transactions: bool = False, + transform_alias_to_input_format: bool = False, ) -> Any: builder = ProfileFunctionsTimeseriesQueryBuilder( @@ -102,6 +102,7 @@ def timeseries_query( selected_columns=selected_columns, config=QueryBuilderConfig( functions_acl=functions_acl, + transform_alias_to_input_format=transform_alias_to_input_format, ), ) results = builder.run_query(referrer=referrer, query_source=query_source) @@ -121,12 +122,7 @@ def timeseries_query( if zerofill_results else results["data"] ), - "meta": { - "fields": { - value["name"]: get_json_meta_type(value["name"], value.get("type"), builder) - for value in results["meta"] - } - }, + "meta": results["meta"], }, snuba_params.start_date, snuba_params.end_date, @@ -274,14 +270,7 @@ def format_top_events_timeseries_results( else item["data"] ), "order": item["order"], - "meta": { - "fields": { - value["name"]: get_json_meta_type( - value["name"], value.get("type"), query_builder - ) - for value in result["meta"] - } - }, + "meta": result["meta"], }, snuba_params.start_date, snuba_params.end_date, diff --git a/src/sentry/snuba/issue_platform.py b/src/sentry/snuba/issue_platform.py index 7f4cb3162f3605..c4b26427682e2c 100644 --- a/src/sentry/snuba/issue_platform.py +++ b/src/sentry/snuba/issue_platform.py @@ -7,7 +7,6 @@ from sentry.exceptions import InvalidSearchQuery from sentry.search.events.builder.discover import DiscoverQueryBuilder from sentry.search.events.builder.issue_platform import IssuePlatformTimeseriesQueryBuilder -from sentry.search.events.fields import get_json_meta_type from sentry.search.events.types import EventsResponse, QueryBuilderConfig, SnubaParams from sentry.snuba.dataset import Dataset from sentry.snuba.discover import transform_tips, zerofill @@ -121,6 +120,7 @@ def timeseries_query( on_demand_metrics_type: MetricSpecType | None = None, query_source: QuerySource | None = None, fallback_to_transactions: bool = False, + transform_alias_to_input_format: bool = False, ): """ High-level API for doing arbitrary user timeseries queries against events. @@ -159,6 +159,7 @@ def timeseries_query( config=QueryBuilderConfig( functions_acl=functions_acl, has_metrics=has_metrics, + transform_alias_to_input_format=transform_alias_to_input_format, ), ) query_list = [base_builder] @@ -211,19 +212,12 @@ def timeseries_query( cmp_result_val = cmp_result.get(col_name, 0) result["comparisonCount"] = cmp_result_val - result = results[0] + result = base_builder.process_results(results[0]) return SnubaTSResult( { "data": result["data"], - "meta": { - "fields": { - value["name"]: get_json_meta_type( - value["name"], value.get("type"), base_builder - ) - for value in result["meta"] - } - }, + "meta": result["meta"], }, snuba_params.start_date, snuba_params.end_date, diff --git a/src/sentry/snuba/metrics_enhanced_performance.py b/src/sentry/snuba/metrics_enhanced_performance.py index d9e9248839e572..2273781467e894 100644 --- a/src/sentry/snuba/metrics_enhanced_performance.py +++ b/src/sentry/snuba/metrics_enhanced_performance.py @@ -139,6 +139,7 @@ def timeseries_query( on_demand_metrics_type=None, query_source: QuerySource | None = None, fallback_to_transactions: bool = False, + transform_alias_to_input_format: bool = False, ) -> SnubaTSResult: """ High-level API for doing arbitrary user timeseries queries against events. @@ -163,6 +164,7 @@ def timeseries_query( on_demand_metrics_enabled=on_demand_metrics_enabled, on_demand_metrics_type=on_demand_metrics_type, query_source=query_source, + transform_alias_to_input_format=transform_alias_to_input_format, ) # raise Invalid Queries since the same thing will happen with discover except InvalidSearchQuery: @@ -191,6 +193,7 @@ def timeseries_query( functions_acl=functions_acl, has_metrics=has_metrics, query_source=query_source, + transform_alias_to_input_format=transform_alias_to_input_format, ) return SnubaTSResult( { diff --git a/src/sentry/snuba/metrics_performance.py b/src/sentry/snuba/metrics_performance.py index fc586c49bccc38..68e00c52bfccf8 100644 --- a/src/sentry/snuba/metrics_performance.py +++ b/src/sentry/snuba/metrics_performance.py @@ -258,6 +258,7 @@ def timeseries_query( groupby: Column | None = None, query_source: QuerySource | None = None, fallback_to_transactions: bool = False, + transform_alias_to_input_format: bool = False, ) -> SnubaTSResult: """ High-level API for doing arbitrary user timeseries queries against events. @@ -282,6 +283,7 @@ def run_metrics_query(inner_params: SnubaParams): use_metrics_layer=use_metrics_layer, on_demand_metrics_enabled=on_demand_metrics_enabled, on_demand_metrics_type=on_demand_metrics_type, + transform_alias_to_input_format=transform_alias_to_input_format, ), ) metrics_referrer = referrer + ".metrics-enhanced" diff --git a/src/sentry/snuba/metrics_summaries.py b/src/sentry/snuba/metrics_summaries.py deleted file mode 100644 index 3abff797de7237..00000000000000 --- a/src/sentry/snuba/metrics_summaries.py +++ /dev/null @@ -1,60 +0,0 @@ -from sentry.search.events.builder.metrics_summaries import MetricsSummariesQueryBuilder -from sentry.search.events.types import QueryBuilderConfig -from sentry.snuba.dataset import Dataset -from sentry.snuba.metrics.extraction import MetricSpecType -from sentry.snuba.query_sources import QuerySource - - -def query( - selected_columns, - query, - params, - snuba_params=None, - equations=None, - orderby=None, - offset=None, - limit=50, - referrer=None, - auto_fields=False, - auto_aggregations=False, - include_equation_fields=False, - allow_metric_aggregates=False, - use_aggregate_conditions=False, - conditions=None, - functions_acl=None, - transform_alias_to_input_format=False, - sample=None, - has_metrics=False, - use_metrics_layer=False, - skip_tag_resolution=False, - extra_columns=None, - on_demand_metrics_enabled=False, - on_demand_metrics_type: MetricSpecType | None = None, - fallback_to_transactions=False, - query_source: QuerySource | None = None, -): - builder = MetricsSummariesQueryBuilder( - Dataset.MetricsSummaries, - params, - snuba_params=snuba_params, - query=query, - selected_columns=selected_columns, - equations=equations, - orderby=orderby, - limit=limit, - offset=offset, - sample_rate=sample, - config=QueryBuilderConfig( - has_metrics=has_metrics, - transform_alias_to_input_format=transform_alias_to_input_format, - skip_tag_resolution=skip_tag_resolution, - equation_config={"auto_add": include_equation_fields}, - auto_fields=auto_fields, - auto_aggregations=auto_aggregations, - use_aggregate_conditions=use_aggregate_conditions, - functions_acl=functions_acl, - ), - ) - - result = builder.process_results(builder.run_query(referrer, query_source=query_source)) - return result diff --git a/src/sentry/snuba/profile_functions_metrics.py b/src/sentry/snuba/profile_functions_metrics.py deleted file mode 100644 index eae59d3a28d9db..00000000000000 --- a/src/sentry/snuba/profile_functions_metrics.py +++ /dev/null @@ -1,299 +0,0 @@ -import logging -from datetime import timedelta - -from snuba_sdk import Column, Condition - -import sentry.models -from sentry.search.events.builder.profile_functions_metrics import ( - ProfileFunctionsMetricsQueryBuilder, - TimeseriesProfileFunctionsMetricsQueryBuilder, - TopProfileFunctionsMetricsQueryBuilder, -) -from sentry.search.events.types import EventsResponse, QueryBuilderConfig, SnubaParams -from sentry.snuba import discover -from sentry.snuba.dataset import Dataset -from sentry.snuba.metrics.extraction import MetricSpecType -from sentry.snuba.query_sources import QuerySource -from sentry.utils.snuba import SnubaTSResult - -logger = logging.getLogger(__name__) - - -def query( - selected_columns: list[str], - query: str, - referrer: str, - snuba_params: SnubaParams | None = None, - equations: list[str] | None = None, - orderby: list[str] | None = None, - offset: int | None = None, - limit: int = 50, - auto_fields: bool = False, - auto_aggregations: bool = False, - include_equation_fields: bool = False, - allow_metric_aggregates: bool = False, - use_aggregate_conditions: bool = False, - conditions: list[Condition] | None = None, - functions_acl: list[str] | None = None, - transform_alias_to_input_format: bool = False, - sample: float | None = None, - has_metrics: bool = False, - use_metrics_layer: bool = False, - skip_tag_resolution: bool = False, - extra_columns: list[Column] | None = None, - on_demand_metrics_enabled: bool = False, - on_demand_metrics_type: MetricSpecType | None = None, - fallback_to_transactions: bool = False, - query_source: QuerySource | None = None, -): - builder = ProfileFunctionsMetricsQueryBuilder( - dataset=Dataset.PerformanceMetrics, - params={}, - snuba_params=snuba_params, - query=query, - selected_columns=selected_columns, - equations=equations, - orderby=orderby, - limit=limit, - offset=offset, - sample_rate=sample, - config=QueryBuilderConfig( - auto_fields=auto_fields, - auto_aggregations=auto_aggregations, - use_aggregate_conditions=use_aggregate_conditions, - functions_acl=functions_acl, - equation_config={"auto_add": include_equation_fields}, - has_metrics=has_metrics, - use_metrics_layer=use_metrics_layer, - transform_alias_to_input_format=transform_alias_to_input_format, - skip_tag_resolution=skip_tag_resolution, - ), - ) - - result = builder.process_results(builder.run_query(referrer, query_source=query_source)) - return result - - -def timeseries_query( - selected_columns: list[str], - query: str, - snuba_params: SnubaParams, - rollup: int, - referrer: str, - zerofill_results: bool = True, - allow_metric_aggregates=True, - comparison_delta: timedelta | None = None, - functions_acl: list[str] | None = None, - has_metrics: bool = True, - use_metrics_layer: bool = False, - on_demand_metrics_enabled: bool = False, - on_demand_metrics_type: MetricSpecType | None = None, - groupby: Column | None = None, - query_source: QuerySource | None = None, - fallback_to_transactions: bool = False, -) -> SnubaTSResult: - """ - High-level API for doing arbitrary user timeseries queries against events. - this API should match that of sentry.snuba.discover.timeseries_query - """ - - metrics_query = TimeseriesProfileFunctionsMetricsQueryBuilder( - {}, - rollup, - snuba_params=snuba_params, - dataset=Dataset.PerformanceMetrics, - query=query, - selected_columns=selected_columns, - groupby=groupby, - config=QueryBuilderConfig( - functions_acl=functions_acl, - allow_metric_aggregates=allow_metric_aggregates, - use_metrics_layer=use_metrics_layer, - ), - ) - result = metrics_query.run_query(referrer, query_source=query_source) - - result = metrics_query.process_results(result) - result["data"] = ( - discover.zerofill( - result["data"], - snuba_params.start_date, - snuba_params.end_date, - rollup, - ["time"], - ) - if zerofill_results - else result["data"] - ) - - result["meta"]["isMetricsData"] = True - - return SnubaTSResult( - { - "data": result["data"], - "isMetricsData": True, - "meta": result["meta"], - }, - snuba_params.start_date, - snuba_params.end_date, - rollup, - ) - - -def top_events_timeseries( - timeseries_columns: list[str], - selected_columns: list[str], - user_query: str, - snuba_params: SnubaParams, - orderby: list[str], - rollup: int, - limit: int, - referrer: str, - organization: sentry.models.Organization, - equations: list[str] | None = None, - top_events: EventsResponse | None = None, - allow_empty: bool = True, - zerofill_results: bool = True, - include_other: bool = False, - functions_acl: list[str] | None = None, - on_demand_metrics_enabled: bool = False, - on_demand_metrics_type: MetricSpecType | None = None, - fallback_to_transactions: bool = False, -): - """ - High-level API for doing arbitrary user timeseries queries for a limited number of top events - - Returns a dictionary of SnubaTSResult objects that have been zerofilled in - case of gaps. Each value of the dictionary should match the result of a timeseries query - - timeseries_columns (Sequence[str]) List of public aliases to fetch for the timeseries query, - usually matches the y-axis of the graph - selected_columns (Sequence[str]) List of public aliases to fetch for the events query, - this is to determine what the top events are - user_query (str) Filter query string to create conditions from. needs to be user_query - to not conflict with the function query - params (Dict[str, str]) Filtering parameters with start, end, project_id, environment, - orderby (Sequence[str]) The fields to order results by. - rollup (int) The bucket width in seconds - limit (int) The number of events to get timeseries for - organization (Organization) Used to map group ids to short ids - referrer (str|None) A referrer string to help locate the origin of this query. - top_events (dict|None) A dictionary with a 'data' key containing a list of dictionaries that - represent the top events matching the query. Useful when you have found - the top events earlier and want to save a query. - """ - - if top_events is None: - top_events = query( - selected_columns, - query=user_query, - snuba_params=snuba_params, - equations=equations, - orderby=orderby, - limit=limit, - referrer=referrer, - auto_aggregations=True, - use_aggregate_conditions=True, - include_equation_fields=True, - skip_tag_resolution=True, - ) - - top_events_builder = TopProfileFunctionsMetricsQueryBuilder( - Dataset.PerformanceMetrics, - {}, - rollup, - top_events["data"], - snuba_params=snuba_params, - other=False, - query=user_query, - selected_columns=selected_columns, - timeseries_columns=timeseries_columns, - config=QueryBuilderConfig( - functions_acl=functions_acl, - skip_tag_resolution=True, - ), - ) - if len(top_events["data"]) == limit and include_other: - other_events_builder = TopProfileFunctionsMetricsQueryBuilder( - Dataset.PerformanceMetrics, - {}, - rollup, - top_events["data"], - snuba_params=snuba_params, - other=True, - query=user_query, - selected_columns=selected_columns, - timeseries_columns=timeseries_columns, - ) - - # TODO: use bulk_snuba_queries - other_result = other_events_builder.run_query(referrer) - result = top_events_builder.run_query(referrer) - else: - result = top_events_builder.run_query(referrer) - other_result = {"data": []} - if ( - not allow_empty - and not len(result.get("data", [])) - and not len(other_result.get("data", [])) - ): - return SnubaTSResult( - { - "data": ( - discover.zerofill( - [], snuba_params.start_date, snuba_params.end_date, rollup, ["time"] - ) - if zerofill_results - else [] - ), - }, - snuba_params.start_date, - snuba_params.end_date, - rollup, - ) - - result = top_events_builder.process_results(result) - - translated_groupby = top_events_builder.translated_groupby - - results = ( - {discover.OTHER_KEY: {"order": limit, "data": other_result["data"]}} - if len(other_result.get("data", [])) - else {} - ) - # Using the top events add the order to the results - for index, item in enumerate(top_events["data"]): - result_key = discover.create_result_key(item, translated_groupby, {}) - results[result_key] = {"order": index, "data": []} - for row in result["data"]: - result_key = discover.create_result_key(row, translated_groupby, {}) - if result_key in results: - results[result_key]["data"].append(row) - else: - logger.warning( - "profile_functions_metrics.top-events.timeseries.key-mismatch", - extra={"result_key": result_key, "top_event_keys": list(results.keys())}, - ) - snuba_ts_result: dict[str, SnubaTSResult] = {} - for key, item in results.items(): - snuba_ts_result[key] = SnubaTSResult( - { - "data": ( - discover.zerofill( - item["data"], - snuba_params.start_date, - snuba_params.end_date, - rollup, - ["time"], - ) - if zerofill_results - else item["data"] - ), - "order": item["order"], - }, - snuba_params.start_date, - snuba_params.end_date, - rollup, - ) - - return snuba_ts_result diff --git a/src/sentry/snuba/profiles.py b/src/sentry/snuba/profiles.py index 8e6b92c1163b04..41b44a5e49d64d 100644 --- a/src/sentry/snuba/profiles.py +++ b/src/sentry/snuba/profiles.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import timedelta from typing import Any from sentry.exceptions import InvalidSearchQuery @@ -6,7 +6,6 @@ ProfilesQueryBuilder, ProfilesTimeseriesQueryBuilder, ) -from sentry.search.events.fields import get_json_meta_type from sentry.search.events.types import QueryBuilderConfig, SnubaParams from sentry.snuba.dataset import Dataset from sentry.snuba.discover import transform_tips, zerofill @@ -69,7 +68,7 @@ def timeseries_query( rollup: int, referrer: str = "", zerofill_results: bool = True, - comparison_delta: datetime | None = None, + comparison_delta: timedelta | None = None, functions_acl: list[str] | None = None, allow_metric_aggregates: bool = False, has_metrics: bool = False, @@ -78,6 +77,7 @@ def timeseries_query( on_demand_metrics_type: MetricSpecType | None = None, query_source: QuerySource | None = None, fallback_to_transactions: bool = False, + transform_alias_to_input_format: bool = False, ) -> Any: builder = ProfilesTimeseriesQueryBuilder( dataset=Dataset.Profiles, @@ -88,9 +88,11 @@ def timeseries_query( selected_columns=selected_columns, config=QueryBuilderConfig( functions_acl=functions_acl, + transform_alias_to_input_format=transform_alias_to_input_format, ), ) results = builder.run_query(referrer=referrer, query_source=query_source) + results = builder.process_results(results) return SnubaTSResult( { @@ -105,12 +107,7 @@ def timeseries_query( if zerofill_results else results["data"] ), - "meta": { - "fields": { - value["name"]: get_json_meta_type(value["name"], value.get("type"), builder) - for value in results["meta"] - } - }, + "meta": results["meta"], }, snuba_params.start_date, snuba_params.end_date, diff --git a/src/sentry/snuba/query_subscriptions/constants.py b/src/sentry/snuba/query_subscriptions/constants.py index b04cf11f619fc6..f692db46d6d9c5 100644 --- a/src/sentry/snuba/query_subscriptions/constants.py +++ b/src/sentry/snuba/query_subscriptions/constants.py @@ -7,6 +7,7 @@ Dataset.Transactions: "transactions-subscription-results", Dataset.PerformanceMetrics: "generic-metrics-subscription-results", Dataset.Metrics: "metrics-subscription-results", + Dataset.EventsAnalyticsPlatform: "eap-spans-subscription-results", } topic_to_dataset = { diff --git a/src/sentry/snuba/referrer.py b/src/sentry/snuba/referrer.py index bd9141627d5a99..667d39232b72dc 100644 --- a/src/sentry/snuba/referrer.py +++ b/src/sentry/snuba/referrer.py @@ -165,12 +165,8 @@ class Referrer(Enum): API_ORGANIZATION_METRICS_DATA = "api.organization.metrics-data" API_ORGANIZATION_METRICS_ESTIMATION_STATS = "api.organization-metrics-estimation-stats" API_ORGANIZATION_METRICS_METADATA_FETCH_SPANS = "api.organization.metrics-metadata.fetch-spans" - API_ORGANIZATION_METRICS_METADATA_FETCH_METRICS_SUMMARIES = ( - "api.organization.metrics-metadata.fetch-metrics-summaries" - ) API_ORGANIZATION_METRICS_QUERY = "api.organization.metrics-query" API_ORGANIZATION_METRICS_EAP_QUERY = "api.organization.metrics-eap-query" - API_ORGANIZATION_METRICS_SAMPLES = "api.organization.metrics-samples" API_ORGANIZATION_ISSUE_REPLAY_COUNT = "api.organization-issue-replay-count" API_ORGANIZATION_SDK_UPDATES = "api.organization-sdk-updates" API_ORGANIZATION_SPANS_HISTOGRAM_MIN_MAX = "api.organization-spans-histogram-min-max" @@ -324,6 +320,12 @@ class Referrer(Enum): API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_SCREENS_BY_TTID = ( "api.performance.generic-widget-chart.slow-screens-by-ttid" ) + API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_SCREENS_BY_COLD_START = ( + "api.performance.generic-widget-chart.slow-screens-by-cold-start" + ) + API_PERFORMANCE_GENERIC_WIDGET_CHART_SLOW_SCREENS_BY_WARM_START = ( + "api.performance.generic-widget-chart.slow-screens-by-warm-start" + ) API_PERFORMANCE_GENERIC_WIDGET_CHART_TPM_AREA_METRICS_ENHANCED = ( "api.performance.generic-widget-chart.tpm-area.metrics-enhanced" ) @@ -410,7 +412,6 @@ class Referrer(Enum): API_PROFILING_PROFILE_SUMMARY_TOTALS = "api.profiling.profile-summary-totals" API_PROFILING_PROFILE_SUMMARY_TABLE = "api.profiling.profile-summary-table" API_PROFILING_PROFILE_SUMMARY_FUNCTIONS_TABLE = "api.profiling.profile-summary-functions-table" - API_PROFILING_PROFILE_FLAMEGRAPH = "api.profiling.profile-flamegraph" API_PROFILING_PROFILE_FLAMEGRAPH_TRANSACTION_CANDIDATES = ( "api.profiling.profile-flamegraph-transaction-candidates" ) diff --git a/src/sentry/snuba/spans_eap.py b/src/sentry/snuba/spans_eap.py index bf6cfeb665f2aa..5356f2fd29f64e 100644 --- a/src/sentry/snuba/spans_eap.py +++ b/src/sentry/snuba/spans_eap.py @@ -97,6 +97,7 @@ def timeseries_query( dataset: Dataset = Dataset.Discover, query_source: QuerySource | None = None, fallback_to_transactions: bool = False, + transform_alias_to_input_format: bool = False, ) -> SnubaTSResult: """ High-level API for doing arbitrary user timeseries queries against events. @@ -114,6 +115,7 @@ def timeseries_query( selected_columns=columns, config=QueryBuilderConfig( functions_acl=functions_acl, + transform_alias_to_input_format=transform_alias_to_input_format, ), ) result = querybuilder.run_query(referrer, query_source=query_source) diff --git a/src/sentry/snuba/spans_indexed.py b/src/sentry/snuba/spans_indexed.py index c49021e507b884..602e0e5c256b83 100644 --- a/src/sentry/snuba/spans_indexed.py +++ b/src/sentry/snuba/spans_indexed.py @@ -92,6 +92,7 @@ def timeseries_query( on_demand_metrics_type: MetricSpecType | None = None, query_source: QuerySource | None = None, fallback_to_transactions: bool = False, + transform_alias_to_input_format: bool = False, ) -> SnubaTSResult: """ High-level API for doing arbitrary user timeseries queries against events. @@ -109,6 +110,7 @@ def timeseries_query( selected_columns=columns, config=QueryBuilderConfig( functions_acl=functions_acl, + transform_alias_to_input_format=transform_alias_to_input_format, ), ) result = query.run_query(referrer, query_source=query_source) diff --git a/src/sentry/snuba/spans_metrics.py b/src/sentry/snuba/spans_metrics.py index 0358beb2153ae9..d2f41948938910 100644 --- a/src/sentry/snuba/spans_metrics.py +++ b/src/sentry/snuba/spans_metrics.py @@ -93,6 +93,7 @@ def timeseries_query( groupby: Column | None = None, query_source: QuerySource | None = None, fallback_to_transactions: bool = False, + transform_alias_to_input_format: bool = False, ) -> SnubaTSResult: """ High-level API for doing arbitrary user timeseries queries against events. @@ -111,6 +112,7 @@ def timeseries_query( functions_acl=functions_acl, allow_metric_aggregates=allow_metric_aggregates, use_metrics_layer=use_metrics_layer, + transform_alias_to_input_format=transform_alias_to_input_format, ), ) result = metrics_query.run_query(referrer=referrer, query_source=query_source) diff --git a/src/sentry/snuba/spans_rpc.py b/src/sentry/snuba/spans_rpc.py index a7ad32ec292ad8..7b01cd43bef5b6 100644 --- a/src/sentry/snuba/spans_rpc.py +++ b/src/sentry/snuba/spans_rpc.py @@ -1,26 +1,32 @@ import logging +from datetime import timedelta from typing import Any -from sentry_protos.snuba.v1.endpoint_time_series_pb2 import TimeSeriesRequest +from sentry_protos.snuba.v1.endpoint_time_series_pb2 import TimeSeries, TimeSeriesRequest from sentry_protos.snuba.v1.endpoint_trace_item_table_pb2 import Column, TraceItemTableRequest from sentry_protos.snuba.v1.trace_item_attribute_pb2 import AttributeAggregation, AttributeKey +from sentry_protos.snuba.v1.trace_item_filter_pb2 import AndFilter, OrFilter, TraceItemFilter -from sentry.search.eap.columns import ResolvedColumn +from sentry.api.event_search import SearchFilter, SearchKey, SearchValue +from sentry.exceptions import InvalidSearchQuery +from sentry.search.eap.columns import ResolvedColumn, ResolvedFunction from sentry.search.eap.constants import FLOAT, INT, STRING from sentry.search.eap.spans import SearchResolver -from sentry.search.eap.types import SearchResolverConfig -from sentry.search.events.types import EventsMeta, EventsResponse, SnubaData, SnubaParams +from sentry.search.eap.types import CONFIDENCES, ConfidenceData, EAPResponse, SearchResolverConfig +from sentry.search.events.fields import get_function_alias, is_function +from sentry.search.events.types import EventsMeta, SnubaData, SnubaParams +from sentry.snuba.discover import OTHER_KEY, create_result_key, zerofill from sentry.utils import snuba_rpc +from sentry.utils.snuba import SnubaTSResult, process_value logger = logging.getLogger("sentry.snuba.spans_rpc") -def categorize_column(column: ResolvedColumn) -> Column: - proto_definition = column.proto_definition - if isinstance(proto_definition, AttributeAggregation): - return Column(aggregation=proto_definition, label=column.public_alias) +def categorize_column(column: ResolvedColumn | ResolvedFunction) -> Column: + if isinstance(column, ResolvedFunction): + return Column(aggregation=column.proto_definition, label=column.public_alias) else: - return Column(key=proto_definition, label=column.public_alias) + return Column(key=column.proto_definition, label=column.public_alias) def run_table_query( @@ -32,24 +38,40 @@ def run_table_query( limit: int, referrer: str, config: SearchResolverConfig, -) -> EventsResponse: + search_resolver: SearchResolver | None = None, +) -> EAPResponse: """Make the query""" - resolver = SearchResolver(params=params, config=config) + resolver = ( + SearchResolver(params=params, config=config) if search_resolver is None else search_resolver + ) meta = resolver.resolve_meta(referrer=referrer) query = resolver.resolve_query(query_string) columns, contexts = resolver.resolve_columns(selected_columns) + # We allow orderby function_aliases if they're a selected_column + # eg. can orderby sum_span_self_time, assuming sum(span.self_time) is selected + orderby_aliases = { + get_function_alias(column_name): resolved_column + for resolved_column, column_name in zip(columns, selected_columns) + } # Orderby is only applicable to TraceItemTableRequest - resolved_orderby = ( - [ + resolved_orderby = [] + orderby_columns = orderby if orderby is not None else [] + for orderby_column in orderby_columns: + stripped_orderby = orderby_column.lstrip("-") + if stripped_orderby in orderby_aliases: + resolved_column = orderby_aliases[stripped_orderby] + else: + resolved_column = resolver.resolve_column(stripped_orderby)[0] + resolved_orderby.append( TraceItemTableRequest.OrderBy( - column=categorize_column(resolver.resolve_column(orderby_column.lstrip("-"))[0]), + column=categorize_column(resolved_column), descending=orderby_column.startswith("-"), ) - for orderby_column in orderby - ] - if orderby - else [] + ) + has_aggregations = any( + col for col in columns if isinstance(col.proto_definition, AttributeAggregation) ) + labeled_columns = [categorize_column(col) for col in columns] """Run the query""" @@ -57,18 +79,24 @@ def run_table_query( meta=meta, filter=query, columns=labeled_columns, - group_by=[ - col.proto_definition - for col in columns - if isinstance(col.proto_definition, AttributeKey) - ], + group_by=( + [ + col.proto_definition + for col in columns + if isinstance(col.proto_definition, AttributeKey) + ] + if has_aggregations + else [] + ), order_by=resolved_orderby, + limit=limit, virtual_column_contexts=[context for context in contexts if context is not None], ) rpc_response = snuba_rpc.table_rpc(rpc_request) """Process the results""" final_data: SnubaData = [] + final_confidence: ConfidenceData = [] final_meta: EventsMeta = EventsMeta(fields={}) # Mapping from public alias to resolved column so we know type etc. columns_by_name = {col.public_alias: col for col in columns} @@ -82,10 +110,18 @@ def run_table_query( ) continue resolved_column = columns_by_name[attribute] - final_meta["fields"][attribute] = resolved_column.meta_type + final_meta["fields"][attribute] = resolved_column.search_type + + # When there's no aggregates reliabilities is an empty array + has_reliability = len(column_value.reliabilities) > 0 + if has_reliability: + assert len(column_value.results) == len(column_value.reliabilities), Exception( + "Length of rpc results do not match length of rpc reliabilities" + ) while len(final_data) < len(column_value.results): final_data.append({}) + final_confidence.append({}) for index, result in enumerate(column_value.results): result_value: str | int | float @@ -95,9 +131,14 @@ def run_table_query( result_value = result.val_int elif resolved_column.proto_type == FLOAT: result_value = result.val_float + result_value = process_value(result_value) final_data[index][attribute] = resolved_column.process_column(result_value) + if has_reliability: + final_confidence[index][attribute] = CONFIDENCES.get( + column_value.reliabilities[index], None + ) - return {"data": final_data, "meta": final_meta} + return {"data": final_data, "meta": final_meta, "confidence": final_confidence} def get_timeseries_query( @@ -108,12 +149,18 @@ def get_timeseries_query( referrer: str, config: SearchResolverConfig, granularity_secs: int, + extra_conditions: TraceItemFilter | None = None, ) -> TimeSeriesRequest: resolver = SearchResolver(params=params, config=config) meta = resolver.resolve_meta(referrer=referrer) query = resolver.resolve_query(query_string) (aggregations, _) = resolver.resolve_aggregates(y_axes) (groupbys, _) = resolver.resolve_columns(groupby) + if extra_conditions is not None: + if query is not None: + query = TraceItemFilter(and_filter=AndFilter(filters=[query, extra_conditions])) + else: + query = extra_conditions return TimeSeriesRequest( meta=meta, @@ -136,54 +183,243 @@ def run_timeseries_query( params: SnubaParams, query_string: str, y_axes: list[str], - groupby: list[str], -) -> Any: - pass + referrer: str, + granularity_secs: int, + config: SearchResolverConfig, + comparison_delta: timedelta | None = None, +) -> SnubaTSResult: """Make the query""" - # maker = SearchResolver(params) - # groupby, contexts = maker.resolve_columns(groupby) - # yaxes = maker.resolve_aggregate(y_axes) - # query = maker.resolve_query(query_string) + rpc_request = get_timeseries_query( + params, query_string, y_axes, [], referrer, config, granularity_secs + ) """Run the query""" - # rpc = timeseries_RPC(columns=[column.proto_definition for column in groupby], query=query) - # result = rpc.run() + rpc_response = snuba_rpc.timeseries_rpc(rpc_request) """Process the results""" - # return _process_timeseries(result, columns) + result: SnubaData = [] + confidences: SnubaData = [] + for timeseries in rpc_response.result_timeseries: + processed, confidence = _process_timeseries(timeseries, params, granularity_secs) + if len(result) == 0: + result = processed + confidences = confidence + else: + for existing, new in zip(result, processed): + existing.update(new) + for existing, new in zip(confidences, confidence): + existing.update(new) + if len(result) == 0: + # The rpc only zerofills for us when there are results, if there aren't any we have to do it ourselves + result = zerofill( + [], + params.start_date, + params.end_date, + granularity_secs, + ["time"], + ) + + if comparison_delta is not None: + if len(rpc_request.aggregations) != 1: + raise InvalidSearchQuery("Only one column can be selected for comparison queries") + + comp_query_params = params.copy() + assert comp_query_params.start is not None, "start is required" + assert comp_query_params.end is not None, "end is required" + comp_query_params.start = comp_query_params.start_date - comparison_delta + comp_query_params.end = comp_query_params.end_date - comparison_delta + + comp_rpc_request = get_timeseries_query( + comp_query_params, query_string, y_axes, [], referrer, config, granularity_secs + ) + comp_rpc_response = snuba_rpc.timeseries_rpc(comp_rpc_request) + + if comp_rpc_response.result_timeseries: + timeseries = comp_rpc_response.result_timeseries[0] + processed, _ = _process_timeseries(timeseries, params, granularity_secs) + label = get_function_alias(timeseries.label) + for existing, new in zip(result, processed): + existing["comparisonCount"] = new[label] + else: + for existing in result: + existing["comparisonCount"] = 0 + + return SnubaTSResult( + {"data": result, "confidence": confidences}, params.start, params.end, granularity_secs + ) + + +def build_top_event_conditions( + resolver: SearchResolver, top_events: EAPResponse, groupby_columns: list[str] +) -> Any: + conditions = [] + other_conditions = [] + for event in top_events["data"]: + row_conditions = [] + other_row_conditions = [] + for key in groupby_columns: + if key == "project.id": + value = resolver.params.project_slug_map[ + event.get("project", event.get("project.slug")) + ] + else: + value = event[key] + resolved_term = resolver.resolve_term( + SearchFilter( + key=SearchKey(name=key), + operator="=", + value=SearchValue(raw_value=value), + ) + ) + if resolved_term is not None: + row_conditions.append(resolved_term) + other_term = resolver.resolve_term( + SearchFilter( + key=SearchKey(name=key), + operator="!=", + value=SearchValue(raw_value=value), + ) + ) + if other_term is not None: + other_row_conditions.append(other_term) + conditions.append(TraceItemFilter(and_filter=AndFilter(filters=row_conditions))) + other_conditions.append(TraceItemFilter(or_filter=OrFilter(filters=other_row_conditions))) + return ( + TraceItemFilter(or_filter=OrFilter(filters=conditions)), + TraceItemFilter(and_filter=AndFilter(filters=other_conditions)), + ) def run_top_events_timeseries_query( params: SnubaParams, query_string: str, y_axes: list[str], - groupby: list[str], - orderby: list[str], + raw_groupby: list[str], + orderby: list[str] | None, + limit: int, + referrer: str, + granularity_secs: int, + config: SearchResolverConfig, ) -> Any: """We intentionally duplicate run_timeseries_query code here to reduce the complexity of needing multiple helper functions that both would call This is because at time of writing, the query construction is very straightforward, if that changes perhaps we can change this""" - pass + """Make a table query first to get what we need to filter by""" + search_resolver = SearchResolver(params, config) + top_events = run_table_query( + params, + query_string, + raw_groupby + y_axes, + orderby, + 0, + limit, + referrer, + config, + search_resolver=search_resolver, + ) + # Need to change the project slug columns to project.id because timeseries requests don't take virtual_column_contexts + groupby_columns = [col for col in raw_groupby if not is_function(col)] + groupby_columns_without_project = [ + col if col not in ["project", "project.name"] else "project.id" for col in groupby_columns + ] + top_conditions, other_conditions = build_top_event_conditions( + search_resolver, top_events, groupby_columns_without_project + ) """Make the query""" - # maker = SearchResolver(params) - # top_events = run_table_query() with process_results off - # new_conditions = construct conditions based on top_events - # resolved_query = And(new_conditions, maker.resolve_query(query_string)) - # groupby, contexts = maker.resolve_columns(groupby) - # yaxes = maker.resolve_aggregate(y_axes) + rpc_request = get_timeseries_query( + params, + query_string, + y_axes, + groupby_columns_without_project, + referrer, + config, + granularity_secs, + extra_conditions=top_conditions, + ) + other_request = get_timeseries_query( + params, + query_string, + y_axes, + groupby_columns_without_project, + referrer, + config, + granularity_secs, + extra_conditions=other_conditions, + ) """Run the query""" - # rpc = timeseries_RPC(columns=[column.proto_definition for column in groupby], query=query) + rpc_response = snuba_rpc.timeseries_rpc(rpc_request) + other_response = snuba_rpc.timeseries_rpc(other_request) """Process the results""" - # result = rpc.run() - # return _process_timeseries(result, columns) + map_result_key_to_timeseries = {} + for timeseries in rpc_response.result_timeseries: + groupby_attributes = timeseries.group_by_attributes + remapped_groupby = {} + # Remap internal attrs back to public ones + for col in groupby_columns: + if col in ["project", "project.slug"]: + resolved_groupby, _ = search_resolver.resolve_attribute("project.id") + remapped_groupby[col] = params.project_id_map[ + int(groupby_attributes[resolved_groupby.internal_name]) + ] + else: + resolved_groupby, _ = search_resolver.resolve_attribute(col) + remapped_groupby[col] = groupby_attributes[resolved_groupby.internal_name] + result_key = create_result_key(remapped_groupby, groupby_columns, {}) + map_result_key_to_timeseries[result_key] = timeseries + final_result = {} + # Top Events actually has the order, so we need to iterate through it, regenerate the result keys + for index, row in enumerate(top_events["data"]): + result_key = create_result_key(row, groupby_columns, {}) + result_data, result_confidence = _process_timeseries( + map_result_key_to_timeseries[result_key], + params, + granularity_secs, + ) + final_result[result_key] = SnubaTSResult( + { + "data": result_data, + "confidence": result_confidence, + "order": index, + }, + params.start, + params.end, + granularity_secs, + ) + if other_response.result_timeseries: + result_data, result_confidence = _process_timeseries( + other_response.result_timeseries[0], + params, + granularity_secs, + ) + final_result[OTHER_KEY] = SnubaTSResult( + { + "data": result_data, + "confidence": result_confidence, + "order": limit, + }, + params.start, + params.end, + granularity_secs, + ) + return final_result + +def _process_timeseries( + timeseries: TimeSeries, params: SnubaParams, granularity_secs: int, order: int | None = None +) -> tuple[SnubaData, SnubaData]: + result: SnubaData = [] + confidence: SnubaData = [] + # Timeseries serialization expects the function alias (eg. `count` not `count()`) + label = get_function_alias(timeseries.label) + if len(result) < len(timeseries.buckets): + for bucket in timeseries.buckets: + result.append({"time": bucket.seconds}) + confidence.append({"time": bucket.seconds}) + for index, data_point in enumerate(timeseries.data_points): + result[index][label] = process_value(data_point.data) + confidence[index][label] = CONFIDENCES.get(data_point.reliability, None) -def _process_timeseries(result, columns): - pass - # for row in result: - # for column in columns: - # column.process(row) - # return result + return result, confidence diff --git a/src/sentry/snuba/transactions.py b/src/sentry/snuba/transactions.py index 6a59ac6738bda2..85396238623389 100644 --- a/src/sentry/snuba/transactions.py +++ b/src/sentry/snuba/transactions.py @@ -89,6 +89,7 @@ def timeseries_query( on_demand_metrics_type=None, query_source: QuerySource | None = None, fallback_to_transactions: bool = False, + transform_alias_to_input_format: bool = False, ) -> SnubaTSResult: """ High-level API for doing arbitrary user timeseries queries against events. @@ -110,6 +111,7 @@ def timeseries_query( on_demand_metrics_type=on_demand_metrics_type, dataset=Dataset.Transactions, query_source=query_source, + transform_alias_to_input_format=transform_alias_to_input_format, ) diff --git a/src/sentry/snuba/utils.py b/src/sentry/snuba/utils.py index 8b31efcf74a8b3..c48bcfdbbd819e 100644 --- a/src/sentry/snuba/utils.py +++ b/src/sentry/snuba/utils.py @@ -9,7 +9,6 @@ issue_platform, metrics_enhanced_performance, metrics_performance, - profile_functions_metrics, profiles, spans_eap, spans_indexed, @@ -32,7 +31,6 @@ "spansIndexed": spans_indexed, "spansMetrics": spans_metrics, "transactions": transactions, - "profileFunctionsMetrics": profile_functions_metrics, } DATASET_LABELS = {value: key for key, value in DATASET_OPTIONS.items()} diff --git a/src/sentry/spans/consumers/process/factory.py b/src/sentry/spans/consumers/process/factory.py index 4f76b504ab073f..2ae1701f3af752 100644 --- a/src/sentry/spans/consumers/process/factory.py +++ b/src/sentry/spans/consumers/process/factory.py @@ -2,6 +2,7 @@ import logging from collections import defaultdict from collections.abc import Mapping +from datetime import datetime from typing import Any import orjson @@ -15,7 +16,15 @@ from arroyo.processing.strategies.produce import Produce from arroyo.processing.strategies.run_task import RunTask from arroyo.processing.strategies.unfold import Unfold -from arroyo.types import FILTERED_PAYLOAD, BrokerValue, Commit, FilteredPayload, Message, Partition +from arroyo.types import ( + FILTERED_PAYLOAD, + BrokerValue, + Commit, + FilteredPayload, + Message, + Partition, + Value, +) from sentry_kafka_schemas.codecs import Codec from sentry_kafka_schemas.schema_types.snuba_spans_v1 import SpanEvent @@ -196,7 +205,7 @@ def batch_write_to_redis( def _expand_segments(should_process_segments: list[ProcessSegmentsContext]): with sentry_sdk.start_transaction(op="process", name="spans.process.expand_segments") as txn: - buffered_segments: list[KafkaPayload | FilteredPayload] = [] + buffered_segments: list[Value] = [] for result in should_process_segments: timestamp = result.timestamp @@ -235,7 +244,13 @@ def _expand_segments(should_process_segments: list[ProcessSegmentsContext]): metrics.incr("performance.buffered_segments.max_payload_size_exceeded") continue - buffered_segments.append(KafkaPayload(None, payload_data, [])) + buffered_segments.append( + Value( + KafkaPayload(None, payload_data, []), + {}, + datetime.fromtimestamp(timestamp), + ) + ) return buffered_segments diff --git a/src/sentry/static/sentry/images/logos/logo-cloudflare-worker.svg b/src/sentry/static/sentry/images/logos/logo-cloudflare-worker.svg new file mode 100644 index 00000000000000..d54d7c285bc94d --- /dev/null +++ b/src/sentry/static/sentry/images/logos/logo-cloudflare-worker.svg @@ -0,0 +1 @@ + diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py index 0f13ae3272a148..8f0bf74cb0c826 100644 --- a/src/sentry/tagstore/snuba/backend.py +++ b/src/sentry/tagstore/snuba/backend.py @@ -6,6 +6,7 @@ from datetime import timedelta, timezone from typing import Any +import sentry_sdk from dateutil.parser import parse as parse_datetime from django.core.cache import cache from sentry_relay.consts import SPAN_STATUS_CODE_TO_NAME @@ -309,11 +310,21 @@ def __get_tag_keys_for_projects( # Cause there's rounding to create this cache suffix, we want to update the query end so results match end = snuba.quantize_time(end, key_hash) cache_key += f":{duration}@{end.isoformat()}" - result = cache.get(cache_key, None) - if result is not None: - metrics.incr("testing.tagstore.cache_tag_key.hit") - else: - metrics.incr("testing.tagstore.cache_tag_key.miss") + + with sentry_sdk.start_span( + op="cache.get", name="sentry.tagstore.cache.__get_tag_keys_for_projects" + ) as span: + result = cache.get(cache_key, None) + + span.set_data("cache.key", [cache_key]) + + if result is not None: + span.set_data("cache.hit", True) + span.set_data("cache.item_size", len(str(result))) + metrics.incr("testing.tagstore.cache_tag_key.hit") + else: + span.set_data("cache.hit", False) + metrics.incr("testing.tagstore.cache_tag_key.miss") if result is None: result = snuba.query( @@ -330,8 +341,13 @@ def __get_tag_keys_for_projects( **kwargs, ) if should_cache: - cache.set(cache_key, result, 300) - metrics.incr("testing.tagstore.cache_tag_key.len", amount=len(result)) + with sentry_sdk.start_span( + op="cache.put", name="sentry.tagstore.cache.__get_tag_keys_for_projects" + ) as span: + cache.set(cache_key, result, 300) + span.set_data("cache.key", [cache_key]) + span.set_data("cache.item_size", len(str(result))) + metrics.incr("testing.tagstore.cache_tag_key.len", amount=len(result)) if group is None: ctor = TagKey @@ -435,12 +451,12 @@ def get_tag_keys_for_projects( # We want to disable FINAL in the snuba query to reduce load. optimize_kwargs = {"turbo": True} - # Add static sample amount to the query. Turbo will sample at 10% by - # default, but organizations with many events still get timeouts. A - # static sample creates more consistent performance. organization_id = get_organization_id_from_project_ids(projects) organization = Organization.objects.get_from_cache(id=organization_id) if features.has("organizations:tag-key-sample-n", organization): + # Add static sample amount to the query. Turbo will sample at 10% by + # default, but organizations with many events still get timeouts. A + # static sample creates more consistent performance. optimize_kwargs["sample"] = options.get("visibility.tag-key-sample-size") # If we are fetching less than max_unsampled_projects, then disable @@ -449,7 +465,7 @@ def get_tag_keys_for_projects( # that we don't cause performance issues for Snuba. # We also see issues with long timeranges in large projects, # So only disable sampling if the timerange is short enough. - if len(projects) <= max_unsampled_projects and end - start <= timedelta(days=14): + elif len(projects) <= max_unsampled_projects and end - start <= timedelta(days=14): optimize_kwargs["sample"] = 1 # Replays doesn't support sampling. diff --git a/src/sentry/tasks/derive_code_mappings.py b/src/sentry/tasks/derive_code_mappings.py index 009d8793bb83d1..ca79f41e311a65 100644 --- a/src/sentry/tasks/derive_code_mappings.py +++ b/src/sentry/tasks/derive_code_mappings.py @@ -2,6 +2,7 @@ import logging from collections.abc import Mapping +from enum import StrEnum from typing import TYPE_CHECKING, Any from sentry_sdk import set_tag, set_user @@ -9,8 +10,13 @@ from sentry import features from sentry.constants import ObjectStatus from sentry.db.models.fields.node import NodeData +from sentry.integrations.github.integration import GitHubIntegration from sentry.integrations.models.repository_project_path_config import RepositoryProjectPathConfig from sentry.integrations.services.integration import RpcOrganizationIntegration, integration_service +from sentry.integrations.source_code_management.metrics import ( + SCMIntegrationInteractionEvent, + SCMIntegrationInteractionType, +) from sentry.integrations.utils.code_mapping import CodeMapping, CodeMappingTreesHelper from sentry.locks import locks from sentry.models.organization import Organization @@ -29,6 +35,12 @@ from sentry.integrations.base import IntegrationInstallation +class DeriveCodeMappingsErrorReason(StrEnum): + UNEXPECTED_ERROR = "Unexpected error type while calling `get_trees_for_org()`." + LOCK_FAILED = "Failed to acquire lock" + EMPTY_TREES = "The trees are empty." + + def process_error(error: ApiError, extra: dict[str, str]) -> None: """Log known issues and report unknown ones""" if error.json: @@ -115,24 +127,30 @@ def derive_code_mappings( # Acquire the lock for a maximum of 10 minutes lock = locks.get(key=f"get_trees_for_org:{org.slug}", duration=60 * 10, name="process_pending") - try: - with lock.acquire(): - # This method is specific to the GithubIntegration - trees = installation.get_trees_for_org() # type: ignore[attr-defined] - except ApiError as error: - process_error(error, extra) - return - except UnableToAcquireLock as error: - extra["error"] = error - logger.warning("derive_code_mappings.getting_lock_failed", extra=extra) - return - except Exception: - logger.exception("Unexpected error type while calling `get_trees_for_org()`.", extra=extra) - return + with SCMIntegrationInteractionEvent( + SCMIntegrationInteractionType.DERIVE_CODEMAPPINGS, provider_key=installation.model.provider + ).capture() as lifecycle: + try: + with lock.acquire(): + # This method is specific to the GithubIntegration + if not isinstance(installation, GitHubIntegration): + return + trees = installation.get_trees_for_org() + except ApiError as error: + process_error(error, extra) + lifecycle.record_halt(error, extra) + return + except UnableToAcquireLock as error: + extra["error"] = error + lifecycle.record_failure(error, extra) + return + except Exception: + lifecycle.record_failure(DeriveCodeMappingsErrorReason.UNEXPECTED_ERROR, extra=extra) + return - if not trees: - logger.warning("The trees are empty.", extra=extra) - return + if not trees: + lifecycle.record_halt(DeriveCodeMappingsErrorReason.EMPTY_TREES, extra=extra) + return trees_helper = CodeMappingTreesHelper(trees) code_mappings = trees_helper.generate_code_mappings(stacktrace_paths) diff --git a/src/sentry/tasks/embeddings_grouping/utils.py b/src/sentry/tasks/embeddings_grouping/utils.py index 7dd7b95c129981..b4606837d64c9e 100644 --- a/src/sentry/tasks/embeddings_grouping/utils.py +++ b/src/sentry/tasks/embeddings_grouping/utils.py @@ -32,9 +32,10 @@ SimilarHashNotFoundError, ) from sentry.seer.similarity.utils import ( + ReferrerOptions, event_content_has_stacktrace, filter_null_from_string, - get_stacktrace_string, + get_stacktrace_string_with_metrics, ) from sentry.snuba.dataset import Dataset from sentry.snuba.referrer import Referrer @@ -355,8 +356,10 @@ def get_events_from_nodestore( event._project_cache = project if event and event.data and event_content_has_stacktrace(event): grouping_info = get_grouping_info(None, project=project, event=event) - stacktrace_string = get_stacktrace_string(grouping_info) - if stacktrace_string == "": + stacktrace_string = get_stacktrace_string_with_metrics( + grouping_info, event.platform, ReferrerOptions.BACKFILL + ) + if not stacktrace_string: invalid_event_group_ids.append(group_id) continue primary_hash = event.get_primary_hash() diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index dc0cf1bf94794c..e5b0cfc2c320e5 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -13,9 +13,10 @@ from django.utils import timezone from google.api_core.exceptions import ServiceUnavailable -from sentry import features, projectoptions +from sentry import features, options, projectoptions from sentry.eventstream.types import EventStreamEventType from sentry.exceptions import PluginError +from sentry.features.rollout import in_rollout_group from sentry.issues.grouptype import GroupCategory from sentry.issues.issue_occurrence import IssueOccurrence from sentry.killswitches import killswitch_matches_context @@ -30,6 +31,7 @@ from sentry.utils import json, metrics from sentry.utils.cache import cache from sentry.utils.event_frames import get_sdk_name +from sentry.utils.event_tracker import TransactionStageStatus, track_sampled_event from sentry.utils.locking import UnableToAcquireLock from sentry.utils.locking.backends import LockBackend from sentry.utils.locking.manager import LockManager @@ -480,6 +482,17 @@ def should_update_escalating_metrics(event: Event, is_transaction_event: bool) - ) +def _get_event_id_from_cache_key(cache_key: str) -> str | None: + """ + format is "e:{}:{}",event_id,project_id + """ + + try: + return cache_key.split(":")[1] + except IndexError: + return None + + @instrumented_task( name="sentry.tasks.post_process.post_process_group", time_limit=120, @@ -501,6 +514,7 @@ def post_process_group( """ Fires post processing hooks for a group. """ + from sentry.ingest.types import ConsumerType from sentry.utils import snuba with snuba.options_override({"consistent": True}): @@ -527,6 +541,18 @@ def post_process_group( # need to rewind history. data = processing_store.get(cache_key) if not data: + event_id = _get_event_id_from_cache_key(cache_key) + if event_id: + if in_rollout_group( + "transactions.do_post_process_in_save", + event_id, + ): + # if we're doing the work for transactions in save_event_transaction + # instead of here, this is expected, so simply increment a metric + # instead of logging + metrics.incr("post_process.skipped_do_post_process_in_save") + return + logger.info( "post_process.skipped", extra={"cache_key": cache_key, "reason": "missing_cache"}, @@ -534,7 +560,12 @@ def post_process_group( return with metrics.timer("tasks.post_process.delete_event_cache"): processing_store.delete_by_key(cache_key) - + if eventstream_type == EventStreamEventType.Transaction.value: + track_sampled_event( + data["event_id"], + ConsumerType.Transactions, + TransactionStageStatus.REDIS_DELETED, + ) occurrence = None event = process_event(data, group_id) else: @@ -619,6 +650,11 @@ def get_event_raise_exception() -> Event: project=event.project, event=event, ) + track_sampled_event( + event.event_id, + ConsumerType.Transactions, + TransactionStageStatus.POST_PROCESS_FINISHED, + ) metric_tags = {} if group_id: @@ -1205,8 +1241,10 @@ def process_plugins(job: PostProcessJob) -> None: def process_similarity(job: PostProcessJob) -> None: - if job["is_reprocessed"] or features.has( - "projects:similarity-embeddings", job["event"].group.project + if not options.get("sentry.similarity.indexing.enabled"): + return + if job["is_reprocessed"] or job["event"].group.project.get_option( + "sentry:similarity_backfill_completed" ): return diff --git a/src/sentry/tasks/relocation.py b/src/sentry/tasks/relocation.py index 545ec775a1f14a..96ca59b860b630 100644 --- a/src/sentry/tasks/relocation.py +++ b/src/sentry/tasks/relocation.py @@ -24,6 +24,7 @@ from sentry.api.helpers.slugs import validate_sentry_slug from sentry.api.serializers.rest_framework.base import camel_to_snake_case, convert_dict_key_case from sentry.backup.crypto import ( + EncryptorDecryptorPair, GCPKMSDecryptor, GCPKMSEncryptor, LocalFileEncryptor, @@ -72,6 +73,7 @@ TASK_TO_STEP, LoggingPrinter, OrderedTask, + StorageBackedCheckpointExporter, create_cloudbuild_yaml, fail_relocation, get_relocations_bucket_name, @@ -306,12 +308,12 @@ def uploading_start(uuid: UUID, replying_region_name: str | None, org_slug: str max_retries=4, retry_backoff=30, retry_backoff_jitter=True, - # Setting `acks_late` + `task_reject_on_worker_lost` here allows us to retry the potentially + # Setting `acks_late` + `reject_on_worker_lost` here allows us to retry the potentially # long-lived task if the k8s pod of the worker received SIGKILL/TERM/QUIT (or we ran out of some # other resource, leading to the same outcome). We have a timeout check at the very start of the # task itself to make sure it does not loop indefinitely. acks_late=True, - task_reject_on_worker_lost=True, + reject_on_worker_lost=True, # 10 minutes per try. soft_time_limit=60 * 10, silo_mode=SiloMode.REGION, @@ -376,6 +378,16 @@ def fulfill_cross_region_export_request( encryptor=LocalFileEncryptor(BytesIO(encrypt_with_public_key)), org_filter={org_slug}, printer=LoggingPrinter(uuid), + checkpointer=StorageBackedCheckpointExporter( + crypto=EncryptorDecryptorPair( + encryptor=GCPKMSEncryptor.from_crypto_key_version(get_default_crypto_key_version()), + decryptor=GCPKMSDecryptor.from_bytes( + json.dumps(get_default_crypto_key_version()).encode("utf-8") + ), + ), + uuid=uuid, + storage=relocation_storage, + ), ) logger.info( "fulfill_cross_region_export_request: exported", diff --git a/src/sentry/tasks/statistical_detectors.py b/src/sentry/tasks/statistical_detectors.py index 3390242d43441c..acc63ab4a7a6d6 100644 --- a/src/sentry/tasks/statistical_detectors.py +++ b/src/sentry/tasks/statistical_detectors.py @@ -587,6 +587,9 @@ def emit_function_regression_issue( } ) + if not payloads: + return 0 + response = get_from_profiling_service(method="POST", path="/regressed", json_data=payloads) if response.status != 200: return 0 diff --git a/src/sentry/tasks/store.py b/src/sentry/tasks/store.py index 64e030b597bb0f..fa110fa856b386 100644 --- a/src/sentry/tasks/store.py +++ b/src/sentry/tasks/store.py @@ -16,6 +16,7 @@ from sentry.constants import DEFAULT_STORE_NORMALIZER_ARGS from sentry.datascrubbing import scrub_data from sentry.eventstore import processing +from sentry.features.rollout import in_rollout_group from sentry.feedback.usecases.create_feedback import FeedbackCreationSource, create_feedback_issue from sentry.ingest.types import ConsumerType from sentry.killswitches import killswitch_matches_context @@ -26,6 +27,7 @@ from sentry.stacktraces.processing import process_stacktraces, should_process_for_stacktraces from sentry.tasks.base import instrumented_task from sentry.utils import metrics +from sentry.utils.event_tracker import TransactionStageStatus, track_sampled_event from sentry.utils.safe import safe_execute from sentry.utils.sdk import set_current_event_project @@ -582,6 +584,16 @@ def _do_save_event( raise finally: + if ( + consumer_type == ConsumerType.Transactions + and event_id + and in_rollout_group("transactions.do_post_process_in_save", event_id) + ): + # we won't use the transaction data in post_process + # so we can delete it from the cache now. + if cache_key: + processing_store.delete_by_key(cache_key) + reprocessing2.mark_event_reprocessed(data) if cache_key and has_attachments: attachment_cache.delete(cache_key) @@ -639,6 +651,10 @@ def save_event_transaction( project_id: int | None = None, **kwargs: Any, ) -> None: + if event_id: + track_sampled_event( + event_id, ConsumerType.Transactions, TransactionStageStatus.SAVE_TXN_STARTED + ) _do_save_event( cache_key, data, @@ -648,6 +664,10 @@ def save_event_transaction( consumer_type=ConsumerType.Transactions, **kwargs, ) + if event_id: + track_sampled_event( + event_id, ConsumerType.Transactions, TransactionStageStatus.SAVE_TXN_FINISHED + ) @instrumented_task( diff --git a/src/sentry/tasks/symbolication.py b/src/sentry/tasks/symbolication.py index 7060b18de49a4f..24f66936734649 100644 --- a/src/sentry/tasks/symbolication.py +++ b/src/sentry/tasks/symbolication.py @@ -58,7 +58,7 @@ def get_symbolication_platforms( platforms = [] - if is_jvm_event(data): + if is_jvm_event(data, stacktraces): platforms.append(SymbolicatorPlatform.jvm) if is_js_event(data, stacktraces): platforms.append(SymbolicatorPlatform.js) diff --git a/src/sentry/taskworker/client.py b/src/sentry/taskworker/client.py new file mode 100644 index 00000000000000..bb1549d4d3aa4a --- /dev/null +++ b/src/sentry/taskworker/client.py @@ -0,0 +1,65 @@ +import logging + +import grpc +from sentry_protos.sentry.v1.taskworker_pb2 import ( + GetTaskRequest, + SetTaskStatusRequest, + TaskActivation, + TaskActivationStatus, +) +from sentry_protos.sentry.v1.taskworker_pb2_grpc import ConsumerServiceStub + +logger = logging.getLogger("sentry.taskworker.client") + + +class TaskworkerClient: + """ + Taskworker RPC client wrapper + """ + + def __init__(self, host: str) -> None: + self._host = host + + # TODO(taskworker) Need to support xds bootstrap file + self._channel = grpc.insecure_channel(self._host) + self._stub = ConsumerServiceStub(self._channel) + + def get_task(self) -> TaskActivation | None: + """ + Fetch a pending task + + Will return None when there are no tasks to fetch + """ + request = GetTaskRequest() + try: + response = self._stub.GetTask(request) + except grpc.RpcError as err: + if err.code() == grpc.StatusCode.NOT_FOUND: + return None + raise + if response.HasField("task"): + return response.task + return None + + def update_task( + self, task_id: str, status: TaskActivationStatus.ValueType, fetch_next: bool = True + ) -> TaskActivation | None: + """ + Update the status for a given task activation. + + The return value is the next task that should be executed. + """ + request = SetTaskStatusRequest( + id=task_id, + status=status, + fetch_next=fetch_next, + ) + try: + response = self._stub.SetTaskStatus(request) + except grpc.RpcError as err: + if err.code() == grpc.StatusCode.NOT_FOUND: + return None + raise + if response.HasField("task"): + return response.task + return None diff --git a/src/sentry/taskworker/registry.py b/src/sentry/taskworker/registry.py index 1765b39ca570ee..33423dbf2f3150 100644 --- a/src/sentry/taskworker/registry.py +++ b/src/sentry/taskworker/registry.py @@ -16,6 +16,7 @@ from sentry.taskworker.retry import Retry from sentry.taskworker.router import TaskRouter from sentry.taskworker.task import P, R, Task +from sentry.utils import metrics from sentry.utils.imports import import_string from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition @@ -70,19 +71,44 @@ def register( retry: Retry | None = None, expires: int | datetime.timedelta | None = None, processing_deadline_duration: int | datetime.timedelta | None = None, + at_most_once: bool = False, ) -> Callable[[Callable[P, R]], Task[P, R]]: - """register a task, used as a decorator""" + """ + Register a task. + + Applied as a decorator to functions to enable them to be run + asynchronously via taskworkers. + + Parameters + + name: str + The name of the task. This is serialized and must be stable across deploys. + retry: Retry | None + The retry policy for the task. If none and at_most_once is not enabled + the Task namespace default retry policy will be used. + expires: int | datetime.timedelta + The number of seconds a task activation is valid for. After this + duration the activation will be discarded and not executed. + at_most_once : bool + Enable at-most-once execution. Tasks with `at_most_once` cannot + define retry policies, and use a worker side idempotency key to + prevent processing deadline based retries. + """ def wrapped(func: Callable[P, R]) -> Task[P, R]: + task_retry = retry + if not at_most_once: + task_retry = retry or self.default_retry task = Task( name=name, func=func, namespace=self, - retry=retry or self.default_retry, + retry=task_retry, expires=expires or self.default_expires, processing_deadline_duration=( processing_deadline_duration or self.default_processing_deadline_duration ), + at_most_once=at_most_once, ) # TODO(taskworker) tasks should be registered into the registry # so that we can ensure task names are globally unique @@ -92,6 +118,7 @@ def wrapped(func: Callable[P, R]) -> Task[P, R]: return wrapped def send_task(self, activation: TaskActivation) -> None: + metrics.incr("taskworker.registry.send_task", tags={"namespace": activation.namespace}) # TODO(taskworker) producer callback handling self.producer.produce( ArroyoTopic(name=self.topic.value), diff --git a/src/sentry/taskworker/retry.py b/src/sentry/taskworker/retry.py index 7915d09ebaad65..d9e80bfb3f5016 100644 --- a/src/sentry/taskworker/retry.py +++ b/src/sentry/taskworker/retry.py @@ -35,8 +35,10 @@ def __init__( self._times_exceeded = times_exceeded def should_retry(self, state: RetryState, exc: Exception) -> bool: - # No more attempts left - if state.attempts >= self._times: + # No more attempts left. + # We subtract one, as attempts starts at 0, but `times` + # starts at 1. + if state.attempts >= (self._times - 1): return False # Explicit RetryError with attempts left. diff --git a/src/sentry/taskworker/service/client.py b/src/sentry/taskworker/service/client.py deleted file mode 100644 index c8099084ea0d75..00000000000000 --- a/src/sentry/taskworker/service/client.py +++ /dev/null @@ -1,29 +0,0 @@ -import logging - -from sentry_protos.sentry.v1.taskworker_pb2 import TaskActivation, TaskActivationStatus - -logger = logging.getLogger("sentry.taskworker.client") - - -class TaskClient: - """ - Taskworker RPC client wrapper - - TODO(taskworker): Implement gRPC client logic. - """ - - def __init__(self, host: str) -> None: - self._host = host - - def get_task(self) -> TaskActivation | None: - return None - - def update_task( - self, task_id: str, status: TaskActivationStatus.ValueType - ) -> TaskActivation | None: - """ - Update the status for a given task activation. - - The return value is the next task that should be executed. - """ - return None diff --git a/src/sentry/taskworker/task.py b/src/sentry/taskworker/task.py index 0d60884d850406..9b7525b0205046 100644 --- a/src/sentry/taskworker/task.py +++ b/src/sentry/taskworker/task.py @@ -32,16 +32,25 @@ def __init__( retry: Retry | None = None, expires: int | datetime.timedelta | None = None, processing_deadline_duration: int | datetime.timedelta | None = None, + at_most_once: bool = False, ): - # TODO(taskworker) Implement task execution deadlines self.name = name self._func = func self._namespace = namespace - self._retry = retry self._expires = expires self._processing_deadline_duration = ( processing_deadline_duration or DEFAULT_PROCESSING_DEADLINE ) + if at_most_once and retry: + raise AssertionError( + """ + You cannot enable at_most_once and have retries defined. + Having retries enabled means that a task supports being executed + multiple times and thus cannot be idempotent. + """ + ) + self._retry = retry + self.at_most_once = at_most_once update_wrapper(self, func) @property @@ -85,13 +94,15 @@ def create_activation(self, *args: P.args, **kwargs: P.kwargs) -> TaskActivation def _create_retry_state(self) -> RetryState: retry = self.retry or self._namespace.default_retry or None - if not retry: + if not retry or self.at_most_once: # If the task and namespace have no retry policy, - # make a single attempt and then discard the task. + # or can only be attempted once make a single + # attempt and then discard the task. return RetryState( attempts=0, kind="sentry.taskworker.retry.Retry", discard_after_attempt=1, + at_most_once=self.at_most_once, ) return retry.initial_state() diff --git a/src/sentry/taskworker/tasks/examples.py b/src/sentry/taskworker/tasks/examples.py new file mode 100644 index 00000000000000..2e077d70e9b88b --- /dev/null +++ b/src/sentry/taskworker/tasks/examples.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +import logging + +from sentry.taskworker.registry import taskregistry + +logger = logging.getLogger(__name__) +exampletasks = taskregistry.create_namespace(name="examples") + + +@exampletasks.register(name="examples.say_hello") +def say_hello(name: str) -> None: + print(f"Hello {name}") # noqa diff --git a/src/sentry/taskworker/worker.py b/src/sentry/taskworker/worker.py index fda5b4055e9def..bef131a61298de 100644 --- a/src/sentry/taskworker/worker.py +++ b/src/sentry/taskworker/worker.py @@ -11,6 +11,7 @@ import grpc import orjson from django.conf import settings +from django.core.cache import cache from sentry_protos.sentry.v1.taskworker_pb2 import ( TASK_ACTIVATION_STATUS_COMPLETE, TASK_ACTIVATION_STATUS_FAILURE, @@ -18,8 +19,10 @@ TaskActivation, ) +from sentry.taskworker.client import TaskworkerClient from sentry.taskworker.registry import taskregistry -from sentry.taskworker.service.client import TaskClient +from sentry.taskworker.task import Task +from sentry.utils import metrics logger = logging.getLogger("sentry.taskworker.worker") @@ -34,6 +37,14 @@ def _process_activation( taskregistry.get(namespace).get(task_name)(*args, **kwargs) +AT_MOST_ONCE_TIMEOUT = 60 * 60 * 24 # 1 day + + +def get_at_most_once_key(namespace: str, taskname: str, task_id: str) -> str: + # tw:amo -> taskworker:at_most_once + return f"tw:amo:{namespace}:{taskname}:{task_id}" + + class TaskWorker: """ A TaskWorker fetches tasks from a taskworker RPC host and handles executing task activations. @@ -52,7 +63,7 @@ def __init__( self._execution_count = 0 self._worker_id = uuid4().hex self._max_task_count = max_task_count - self.client = TaskClient(rpc_host) + self.client = TaskworkerClient(rpc_host) self._pool: Pool | None = None self._build_pool() @@ -89,6 +100,7 @@ def start(self) -> int: task = self.fetch_task() if not task: + metrics.incr("taskworker.worker.no_task.pause") time.sleep(1) continue @@ -97,6 +109,10 @@ def start(self) -> int: self._max_task_count is not None and self._max_task_count <= self._execution_count ): + metrics.incr( + "taskworker.worker.max_task_count_reached", + tags={"count": self._execution_count}, + ) logger.info("Max task execution count reached. Terminating") return 0 @@ -110,22 +126,25 @@ def fetch_task(self) -> TaskActivation | None: try: activation = self.client.get_task() except grpc.RpcError: + metrics.incr("taskworker.worker.get_task.failed") logger.info("get_task failed. Retrying in 1 second") return None if not activation: + metrics.incr("taskworker.worker.get_task.not_found") logger.info("No task fetched") return None + metrics.incr("taskworker.worker.get_task.success") return activation - def _known_task(self, activation: TaskActivation) -> bool: + def _get_known_task(self, activation: TaskActivation) -> Task[Any, Any] | None: if not taskregistry.contains(activation.namespace): logger.error( "taskworker.invalid_namespace", extra={"namespace": activation.namespace, "taskname": activation.taskname}, ) - return False + return None namespace = taskregistry.get(activation.namespace) if not namespace.contains(activation.taskname): @@ -133,19 +152,35 @@ def _known_task(self, activation: TaskActivation) -> bool: "taskworker.invalid_taskname", extra={"namespace": activation.namespace, "taskname": activation.taskname}, ) - return False - return True + return None + return namespace.get(activation.taskname) def process_task(self, activation: TaskActivation) -> TaskActivation | None: assert self._pool - if not self._known_task(activation): + task = self._get_known_task(activation) + if not task: + metrics.incr( + "taskworker.worker.unknown_task", + tags={"namespace": activation.namespace, "taskname": activation.taskname}, + ) self._execution_count += 1 return self.client.update_task( task_id=activation.id, status=TASK_ACTIVATION_STATUS_FAILURE, ) - # TODO(taskworker): Add at_most_once checks + if task.at_most_once: + key = get_at_most_once_key(activation.namespace, activation.taskname, activation.id) + if cache.add(key, "1", timeout=AT_MOST_ONCE_TIMEOUT): # The key didn't exist + metrics.incr( + "taskworker.task.at_most_once.executed", tags={"task": activation.taskname} + ) + else: + metrics.incr( + "taskworker.worker.at_most_once.skipped", tags={"task": activation.taskname} + ) + return None + processing_timeout = activation.processing_deadline_duration namespace = taskregistry.get(activation.namespace) next_state = TASK_ACTIVATION_STATUS_FAILURE @@ -193,15 +228,35 @@ def process_task(self, activation: TaskActivation) -> TaskActivation | None: self._execution_count += 1 task_added_time = activation.received_at.ToDatetime().timestamp() + execution_duration = execution_complete_time - execution_start_time + execution_latency = execution_complete_time - task_added_time logger.info( "taskworker.task_execution", extra={ "taskname": activation.taskname, - "execution_duration": execution_complete_time - execution_start_time, - "execution_latency": execution_complete_time - task_added_time, + "execution_duration": execution_duration, + "execution_latency": execution_latency, + "status": next_state, + }, + ) + metrics.incr( + "taskworker.worker.execute_task", + tags={ + "namespace": activation.namespace, "status": next_state, }, ) + metrics.distribution( + "taskworker.worker.execution_duration", + execution_duration, + tags={"namespace": activation.namespace}, + ) + metrics.distribution( + "taskworker.worker.execution_latency", + execution_latency, + tags={"namespace": activation.namespace}, + ) + return self.client.update_task( task_id=activation.id, status=next_state, diff --git a/src/sentry/templates/sentry/layout.html b/src/sentry/templates/sentry/layout.html index 17a2783c12755e..ea15abda02dfa6 100644 --- a/src/sentry/templates/sentry/layout.html +++ b/src/sentry/templates/sentry/layout.html @@ -3,7 +3,6 @@ {% load sentry_assets %} {% load sentry_features %} {% load sentry_helpers %} -{% load sentry_react %} {% load sentry_status %} {% get_sentry_version %} @@ -41,7 +40,7 @@ {% block initial_data %} {% script %} {% endscript %} {% endblock %} diff --git a/src/sentry/templates/sentry/toolbar/iframe.html b/src/sentry/templates/sentry/toolbar/iframe.html index 83ecbedba7f457..f35185ab3311a1 100644 --- a/src/sentry/templates/sentry/toolbar/iframe.html +++ b/src/sentry/templates/sentry/toolbar/iframe.html @@ -20,7 +20,7 @@ {% endscript %} diff --git a/src/sentry/templates/sentry/toolbar/login-success.html b/src/sentry/templates/sentry/toolbar/login-success.html index f78c1ee5a05660..83af6518d00f51 100644 --- a/src/sentry/templates/sentry/toolbar/login-success.html +++ b/src/sentry/templates/sentry/toolbar/login-success.html @@ -1,45 +1,57 @@ -{# Auth redirect template for Dev Toolbar. Returned after successfully logging in to a requested organization. #} +{% extends "sentry/bases/auth.html" %} + {% load sentry_assets %} - - - - Sentry - Login Success - - - -
-

You are logged in!

-

This window will automatically close after {{ delay_sec }} seconds. If not then check the console for errors.

- -
- - {% script %} - - {% endscript %} - - + } + })(); + + {% endscript %} +{% endblock %} diff --git a/src/sentry/testutils/asserts.py b/src/sentry/testutils/asserts.py index baf295c83fb78c..062fd52e90a127 100644 --- a/src/sentry/testutils/asserts.py +++ b/src/sentry/testutils/asserts.py @@ -1,5 +1,6 @@ from django.http import StreamingHttpResponse +from sentry.integrations.types import EventLifecycleOutcome from sentry.models.auditlogentry import AuditLogEntry from sentry.models.commitfilechange import CommitFileChange from sentry.silo.base import SiloMode @@ -60,3 +61,35 @@ def assert_org_audit_log_does_not_exist(**kwargs): def delete_all_org_audit_logs(): return AuditLogEntry.objects.all().delete() + + +""" +Helper functions to assert integration SLO metrics +""" + + +def assert_halt_metric(mock_record, error_msg): + (event_halts,) = ( + call for call in mock_record.mock_calls if call.args[0] == EventLifecycleOutcome.HALTED + ) + if isinstance(error_msg, Exception): + assert isinstance(event_halts.args[1], type(error_msg)) + else: + assert event_halts.args[1] == error_msg + + +def assert_failure_metric(mock_record, error_msg): + (event_failures,) = ( + call for call in mock_record.mock_calls if call.args[0] == EventLifecycleOutcome.FAILURE + ) + if isinstance(error_msg, Exception): + assert isinstance(event_failures.args[1], type(error_msg)) + else: + assert event_failures.args[1] == error_msg + + +def assert_success_metric(mock_record): + event_success = ( + call for call in mock_record.mock_calls if call.args[0] == EventLifecycleOutcome.SUCCESS + ) + assert event_success diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index 9275f344ad0f09..5d33c5ccdefab6 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -99,8 +99,6 @@ from sentry.models.environment import Environment from sentry.models.files.file import File from sentry.models.groupmeta import GroupMeta -from sentry.models.notificationsettingoption import NotificationSettingOption -from sentry.models.notificationsettingprovider import NotificationSettingProvider from sentry.models.options.project_option import ProjectOption from sentry.models.organization import Organization from sentry.models.organizationmember import OrganizationMember @@ -110,6 +108,8 @@ from sentry.models.repository import Repository from sentry.models.rule import RuleSource from sentry.monitors.models import Monitor, MonitorEnvironment, MonitorType, ScheduleType +from sentry.notifications.models.notificationsettingoption import NotificationSettingOption +from sentry.notifications.models.notificationsettingprovider import NotificationSettingProvider from sentry.notifications.notifications.base import alert_page_needs_org_id from sentry.notifications.types import FineTuningAPIKey from sentry.organizations.services.organization.serial import serialize_rpc_organization @@ -124,7 +124,6 @@ METRIC_SATISFIED_TAG_VALUE, METRIC_TOLERATED_TAG_VALUE, METRICS_MAP, - PROFILE_METRICS_MAP, SPAN_METRICS_MAP, ) from sentry.sentry_metrics import indexer @@ -1407,43 +1406,6 @@ def store_issues(self, issues): == 200 ) - def store_metrics_summary(self, span): - common_fields = { - "duration_ms": span["duration_ms"], - "end_timestamp": (span["start_timestamp_ms"] + span["duration_ms"]) / 1000, - "group": span["sentry_tags"].get("group", "0"), - "is_segment": span["is_segment"], - "project_id": span["project_id"], - "received": span["received"], - "retention_days": span["retention_days"], - "segment_id": span.get("segment_id", "0"), - "span_id": span["span_id"], - "trace_id": span["trace_id"], - } - rows = [] - for mri, summaries in span.get("_metrics_summary", {}).items(): - for summary in summaries: - rows.append( - { - **common_fields, - **{ - "count": summary.get("count", 0), - "max": summary.get("max", 0.0), - "mri": mri, - "min": summary.get("min", 0.0), - "sum": summary.get("sum", 0.0), - "tags": summary.get("tags", {}), - }, - } - ) - assert ( - requests.post( - settings.SENTRY_SNUBA + "/tests/entities/metrics_summaries/insert", - data=json.dumps(rows), - ).status_code - == 200 - ) - def to_snuba_time_format(self, datetime_value): date_format = "%Y-%m-%d %H:%M:%S%z" return datetime_value.strftime(date_format) @@ -1531,7 +1493,6 @@ def store_segment( tags: Mapping[str, Any] | None = None, measurements: Mapping[str, int | float] | None = None, timestamp: datetime | None = None, - store_metrics_summary: Mapping[str, Sequence[Mapping[str, Any]]] | None = None, sdk_name: str | None = None, op: str | None = None, status: str | None = None, @@ -1570,8 +1531,6 @@ def store_segment( payload["measurements"] = { measurement: {"value": value} for measurement, value in measurements.items() } - if store_metrics_summary: - payload["_metrics_summary"] = store_metrics_summary if parent_span_id: payload["parent_span_id"] = parent_span_id if sdk_name is not None: @@ -1583,9 +1542,6 @@ def store_segment( self.store_span(payload, is_eap=is_eap) - if "_metrics_summary" in payload: - self.store_metrics_summary(payload) - def store_indexed_span( self, project_id: int, @@ -1602,7 +1558,6 @@ def store_indexed_span( measurements: Mapping[str, int | float] | None = None, timestamp: datetime | None = None, store_only_summary: bool = False, - store_metrics_summary: Mapping[str, Sequence[Mapping[str, Any]]] | None = None, group: str = "00", category: str | None = None, organization_id: int = 1, @@ -1644,8 +1599,6 @@ def store_indexed_span( payload["segment_id"] = transaction_id[:16] if profile_id: payload["profile_id"] = profile_id - if store_metrics_summary: - payload["_metrics_summary"] = store_metrics_summary if parent_span_id: payload["parent_span_id"] = parent_span_id if category is not None: @@ -1656,9 +1609,6 @@ def store_indexed_span( if not store_only_summary: self.store_span(payload, is_eap=is_eap) - if "_metrics_summary" in payload: - self.store_metrics_summary(payload) - class BaseMetricsTestCase(SnubaTestCase): ENTITY_SHORTHANDS = { @@ -2273,49 +2223,6 @@ def store_span_metric( subvalue, ) - def store_profile_functions_metric( - self, - value: dict[str, int] | list[int] | int, - metric: str = "function.duration", - internal_metric: str | None = None, - entity: str | None = None, - tags: dict[str, str] | None = None, - timestamp: datetime | None = None, - project: int | None = None, - use_case_id: UseCaseID = UseCaseID.SPANS, - ): - internal_metric = ( - PROFILE_METRICS_MAP[metric] if internal_metric is None else internal_metric - ) - entity = self.ENTITY_MAP[metric] if entity is None else entity - org_id = self.organization.id - - if tags is None: - tags = {} - - if timestamp is None: - metric_timestamp = self.DEFAULT_METRIC_TIMESTAMP.timestamp() - else: - metric_timestamp = timestamp.timestamp() - - if project is None: - project = self.project.id - - val_list: list[int | dict[str, int]] = [] - if not isinstance(value, list): - val_list.append(value) - else: - val_list = value - for subvalue in val_list: - self.store_metric( - org_id, - project, - internal_metric, - tags, - int(metric_timestamp), - subvalue, - ) - def wait_for_metric_count( self, project, diff --git a/src/sentry/testutils/factories.py b/src/sentry/testutils/factories.py index 7d19332d0c351a..79c18d3678968c 100644 --- a/src/sentry/testutils/factories.py +++ b/src/sentry/testutils/factories.py @@ -54,7 +54,6 @@ Incident, IncidentActivity, IncidentProject, - IncidentSeen, IncidentTrigger, IncidentType, TriggerStatus, @@ -99,13 +98,6 @@ from sentry.models.grouphistory import GroupHistory from sentry.models.grouplink import GroupLink from sentry.models.grouprelease import GroupRelease -from sentry.models.notificationaction import ( - ActionService, - ActionTarget, - ActionTrigger, - NotificationAction, -) -from sentry.models.notificationsettingprovider import NotificationSettingProvider from sentry.models.organization import Organization from sentry.models.organizationmapping import OrganizationMapping from sentry.models.organizationmember import OrganizationMember @@ -127,6 +119,13 @@ from sentry.models.savedsearch import SavedSearch from sentry.models.team import Team from sentry.models.userreport import UserReport +from sentry.notifications.models.notificationaction import ( + ActionService, + ActionTarget, + ActionTrigger, + NotificationAction, +) +from sentry.notifications.models.notificationsettingprovider import NotificationSettingProvider from sentry.organizations.services.organization import RpcOrganization, RpcUserOrganizationContext from sentry.sentry_apps.installations import ( SentryAppInstallationCreator, @@ -155,6 +154,7 @@ from sentry.types.region import Region, get_local_region, get_region_by_name from sentry.types.token import AuthTokenType from sentry.uptime.models import ( + IntervalSecondsLiteral, ProjectUptimeSubscription, ProjectUptimeSubscriptionMode, UptimeStatus, @@ -861,7 +861,9 @@ def create_commit_file_change(commit, filename): @staticmethod @assume_test_silo_mode(SiloMode.CONTROL) - def create_user(email=None, is_superuser=False, is_staff=False, is_active=True, **kwargs): + def create_user( + email=None, is_superuser=False, is_staff=False, is_active=True, **kwargs + ) -> User: if email is None: email = uuid4().hex + "@example.com" @@ -1506,7 +1508,6 @@ def create_incident( date_started=None, date_detected=None, date_closed=None, - seen_by=None, alert_rule=None, subscription=None, activation=None, @@ -1533,11 +1534,7 @@ def create_incident( ) for project in projects: IncidentProject.objects.create(incident=incident, project=project) - if seen_by: - for user in seen_by: - IncidentSeen.objects.create( - incident=incident, user_id=user.id, last_seen=timezone.now() - ) + return incident @staticmethod @@ -1799,7 +1796,7 @@ def create_identity_provider( @staticmethod @assume_test_silo_mode(SiloMode.CONTROL) def create_identity( - user: Any, identity_provider: IdentityProvider, external_id: str, **kwargs: Any + user: User | RpcUser, identity_provider: IdentityProvider, external_id: str, **kwargs: Any ) -> Identity: return Identity.objects.create( external_id=external_id, @@ -1953,12 +1950,13 @@ def create_uptime_subscription( url_domain: str, url_domain_suffix: str, host_provider_id: str, - interval_seconds: int, + interval_seconds: IntervalSecondsLiteral, timeout_ms: int, method, headers, body, date_updated: datetime, + trace_sampling: bool = False, ): return UptimeSubscription.objects.create( type=type, @@ -1974,6 +1972,7 @@ def create_uptime_subscription( method=method, headers=headers, body=body, + trace_sampling=trace_sampling, ) @staticmethod @@ -2069,7 +2068,7 @@ def create_workflow( organization = Factories.create_organization() if name is None: name = petname.generate(2, " ", letters=10).title() - return Workflow.objects.create(organization=organization, name=name) + return Workflow.objects.create(organization=organization, name=name, **kwargs) @staticmethod @assume_test_silo_mode(SiloMode.REGION) @@ -2097,9 +2096,7 @@ def create_workflow_data_condition_group( @staticmethod @assume_test_silo_mode(SiloMode.REGION) - def create_data_condition( - **kwargs, - ) -> DataCondition: + def create_data_condition(**kwargs) -> DataCondition: return DataCondition.objects.create(**kwargs) @staticmethod @@ -2121,21 +2118,14 @@ def create_data_source( @staticmethod @assume_test_silo_mode(SiloMode.REGION) def create_detector( - organization: Organization | None = None, name: str | None = None, - owner_user_id: int | None = None, - owner_team: Team | None = None, **kwargs, ) -> Detector: - if organization is None: - organization = Factories.create_organization() if name is None: name = petname.generate(2, " ", letters=10).title() + return Detector.objects.create( - organization=organization, name=name, - owner_user_id=owner_user_id, - owner_team=owner_team, **kwargs, ) diff --git a/src/sentry/testutils/fixtures.py b/src/sentry/testutils/fixtures.py index e34e2ad32393a5..bff1a03aa1ec3b 100644 --- a/src/sentry/testutils/fixtures.py +++ b/src/sentry/testutils/fixtures.py @@ -1,6 +1,6 @@ from __future__ import annotations -from collections.abc import Mapping +from collections.abc import Iterable, Mapping from datetime import datetime, timedelta from typing import Any @@ -9,8 +9,7 @@ from django.utils.functional import cached_property from sentry.eventstore.models import Event -from sentry.incidents.models.alert_rule import AlertRuleMonitorTypeInt -from sentry.incidents.models.incident import IncidentActivityType +from sentry.incidents.models.alert_rule import AlertRule, AlertRuleMonitorTypeInt from sentry.integrations.models.integration import Integration from sentry.integrations.models.organization_integration import OrganizationIntegration from sentry.models.activity import Activity @@ -26,6 +25,7 @@ from sentry.monitors.models import Monitor, MonitorType, ScheduleType from sentry.organizations.services.organization import RpcOrganization from sentry.silo.base import SiloMode +from sentry.snuba.models import QuerySubscription from sentry.testutils.factories import Factories from sentry.testutils.helpers.datetime import before_now, iso_format from sentry.testutils.silo import assume_test_silo_mode @@ -44,7 +44,7 @@ from sentry.users.models.identity import Identity, IdentityProvider from sentry.users.models.user import User from sentry.users.services.user import RpcUser -from sentry.workflow_engine.models import DataSource, Detector, Workflow +from sentry.workflow_engine.models import DataSource, Detector, DetectorState, Workflow from sentry.workflow_engine.types import DetectorPriorityLevel @@ -58,7 +58,7 @@ def projectkey(self): return self.create_project_key(project=self.project) @cached_property - def user(self): + def user(self) -> User: return self.create_user("admin@localhost", is_superuser=True, is_staff=True) @cached_property @@ -183,50 +183,27 @@ def create_project_template(self, **kwargs) -> ProjectTemplate: def create_project_bookmark(self, project=None, *args, **kwargs): if project is None: project = self.project - return Factories.create_project_bookmark(project=project, *args, **kwargs) + return Factories.create_project_bookmark(project, *args, **kwargs) def create_project_key(self, project=None, *args, **kwargs): if project is None: project = self.project - return Factories.create_project_key(project=project, *args, **kwargs) + return Factories.create_project_key(project, *args, **kwargs) - def create_project_rule( - self, - project=None, - action_match=None, - condition_match=None, - comparison_interval=None, - *args, - **kwargs, - ) -> Rule: + def create_project_rule(self, project=None, *args, **kwargs) -> Rule: if project is None: project = self.project - return Factories.create_project_rule( - project=project, - action_data=action_match, - condition_data=condition_match, - *args, - **kwargs, - ) + return Factories.create_project_rule(project, *args, **kwargs) - def create_slack_project_rule( - self, project=None, integration_id=None, channel_id=None, channel_name=None, *args, **kwargs - ): + def create_slack_project_rule(self, project=None, *args, **kwargs): if project is None: project = self.project - return Factories.create_slack_project_rule( - project, - integration_id=integration_id, - channel_id=channel_id, - channel_name=channel_name, - *args, - **kwargs, - ) + return Factories.create_slack_project_rule(project, *args, **kwargs) - def create_release(self, project=None, user=None, *args, **kwargs): + def create_release(self, project=None, *args, **kwargs): if project is None: project = self.project - return Factories.create_release(project=project, user=user, *args, **kwargs) + return Factories.create_release(project, *args, **kwargs) def create_group_release(self, project: Project | None = None, *args, **kwargs) -> GroupRelease: if project is None: @@ -263,7 +240,7 @@ def create_code_mapping(self, project=None, repo=None, organization_integration= def create_repo(self, project=None, *args, **kwargs): if project is None: project = self.project - return Factories.create_repo(project=project, *args, **kwargs) + return Factories.create_repo(project, *args, **kwargs) def create_commit(self, *args, **kwargs): return Factories.create_commit(*args, **kwargs) @@ -274,7 +251,7 @@ def create_commit_author(self, *args, **kwargs): def create_commit_file_change(self, *args, **kwargs): return Factories.create_commit_file_change(*args, **kwargs) - def create_user(self, *args, **kwargs): + def create_user(self, *args, **kwargs) -> User: return Factories.create_user(*args, **kwargs) def create_useremail(self, *args, **kwargs): @@ -291,7 +268,7 @@ def create_usersocialauth( user: User | None = None, provider: str | None = None, uid: str | None = None, - extra_data: Mapping[str, Any] | None = None, + extra_data: dict[str, Any] | None = None, ): if not user: user = self.user @@ -305,7 +282,7 @@ def store_event(self, *args, **kwargs) -> Event: def create_group(self, project=None, *args, **kwargs): if project is None: project = self.project - return Factories.create_group(project=project, *args, **kwargs) + return Factories.create_group(project, *args, **kwargs) def create_file(self, **kwargs): return Factories.create_file(**kwargs) @@ -316,12 +293,12 @@ def create_file_from_path(self, *args, **kwargs): def create_event_attachment(self, event=None, *args, **kwargs): if event is None: event = self.event - return Factories.create_event_attachment(event=event, *args, **kwargs) + return Factories.create_event_attachment(event, *args, **kwargs) - def create_dif_file(self, project=None, *args, **kwargs): + def create_dif_file(self, project: Project | None = None, *args, **kwargs): if project is None: project = self.project - return Factories.create_dif_file(project=project, *args, **kwargs) + return Factories.create_dif_file(project, *args, **kwargs) def create_dif_from_path(self, project=None, *args, **kwargs): if project is None: @@ -385,28 +362,21 @@ def create_integration_external_issue(self, *args, **kwargs): def create_integration_external_project(self, *args, **kwargs): return Factories.create_integration_external_project(*args, **kwargs) - def create_incident(self, organization=None, projects=None, subscription=None, *args, **kwargs): + def create_incident(self, organization=None, projects=None, *args, **kwargs): if not organization: organization = self.organization if projects is None: projects = [self.project] - return Factories.create_incident( - organization=organization, projects=projects, subscription=subscription, *args, **kwargs - ) - - def create_incident_activity(self, incident, *args, **kwargs): - return Factories.create_incident_activity(incident=incident, *args, **kwargs) + return Factories.create_incident(organization, projects, *args, **kwargs) - def create_incident_comment(self, incident, *args, **kwargs): - return self.create_incident_activity( - incident, type=IncidentActivityType.COMMENT.value, *args, **kwargs - ) + def create_incident_activity(self, *args, **kwargs): + return Factories.create_incident_activity(*args, **kwargs) def create_incident_trigger(self, incident, alert_rule_trigger, status): return Factories.create_incident_trigger(incident, alert_rule_trigger, status=status) - def create_alert_rule(self, organization=None, projects=None, *args, **kwargs): + def create_alert_rule(self, organization=None, projects=None, *args, **kwargs) -> AlertRule: if not organization: organization = self.organization if projects is None: @@ -415,8 +385,8 @@ def create_alert_rule(self, organization=None, projects=None, *args, **kwargs): def create_alert_rule_activation( self, - alert_rule=None, - query_subscriptions=None, + alert_rule: AlertRule | None = None, + query_subscriptions: Iterable[QuerySubscription] | None = None, project=None, monitor_type=AlertRuleMonitorTypeInt.ACTIVATED, activator=None, @@ -425,9 +395,7 @@ def create_alert_rule_activation( **kwargs, ): if not alert_rule: - alert_rule = self.create_alert_rule( - monitor_type=monitor_type, - ) + alert_rule = self.create_alert_rule(monitor_type=monitor_type) if not query_subscriptions: projects = [project] if project else [self.project] # subscribing an activated alert rule will create an activation @@ -441,9 +409,7 @@ def create_alert_rule_activation( created_activations = [] for sub in query_subscriptions: created_activations.append( - Factories.create_alert_rule_activation( - alert_rule=alert_rule, query_subscription=sub, *args, **kwargs - ) + Factories.create_alert_rule_activation(alert_rule, sub, *args, **kwargs) ) return created_activations @@ -536,7 +502,7 @@ def create_slack_integration( self, organization: Organization, external_id: str = "TXXXXXXX1", - user: RpcUser | None = None, + user: RpcUser | User | None = None, identity_external_id: str = "UXXXXXXX1", **kwargs: Any, ): @@ -636,9 +602,6 @@ def create_dashboard_widget(self, *args, **kwargs): def create_dashboard_widget_query(self, *args, **kwargs): return Factories.create_dashboard_widget_query(*args, **kwargs) - def create_workflow_action(self, *args, **kwargs) -> Workflow: - return Factories.create_workflow_action(*args, **kwargs) - def create_workflow(self, *args, **kwargs) -> Workflow: return Factories.create_workflow(*args, **kwargs) @@ -652,23 +615,34 @@ def create_data_condition( type="", condition_result=None, condition_group=None, + **kwargs, ): if condition_result is None: condition_result = str(DetectorPriorityLevel.HIGH.value) if condition_group is None: condition_group = self.create_data_condition_group() + return Factories.create_data_condition( condition=condition, comparison=comparison, type=type, condition_result=condition_result, condition_group=condition_group, + **kwargs, ) - def create_detector(self, *args, **kwargs) -> Detector: - return Factories.create_detector(*args, **kwargs) + def create_detector( + self, + *args, + project=None, + **kwargs, + ) -> Detector: + if project is None: + project = self.create_project(organization=self.organization) + + return Factories.create_detector(*args, project=project, **kwargs) - def create_detector_state(self, *args, **kwargs) -> Detector: + def create_detector_state(self, *args, **kwargs) -> DetectorState: return Factories.create_detector_state(*args, **kwargs) def create_data_source_detector(self, *args, **kwargs): @@ -689,7 +663,7 @@ def create_detector_workflow(self, *args, **kwargs): def create_workflow_data_condition_group(self, *args, **kwargs): return Factories.create_workflow_data_condition_group(*args, **kwargs) - # workflow_engine action + # workflow_engine.models.action def create_action(self, *args, **kwargs): return Factories.create_action(*args, **kwargs) @@ -708,6 +682,7 @@ def create_uptime_subscription( headers=None, body=None, date_updated: None | datetime = None, + trace_sampling: bool = False, ) -> UptimeSubscription: if date_updated is None: date_updated = timezone.now() @@ -728,6 +703,7 @@ def create_uptime_subscription( method=method, headers=headers, body=body, + trace_sampling=trace_sampling, ) def create_project_uptime_subscription( diff --git a/src/sentry/testutils/helpers/backups.py b/src/sentry/testutils/helpers/backups.py index 928f9beea8a769..d96fc85e62cb0d 100644 --- a/src/sentry/testutils/helpers/backups.py +++ b/src/sentry/testutils/helpers/backups.py @@ -31,6 +31,7 @@ sorted_dependencies, ) from sentry.backup.exports import ( + ExportCheckpointer, export_in_config_scope, export_in_global_scope, export_in_organization_scope, @@ -43,15 +44,10 @@ from sentry.backup.validate import validate from sentry.data_secrecy.models import DataSecrecyWaiver from sentry.db.models.paranoia import ParanoidModel -from sentry.incidents.models.alert_rule import ( - AlertRuleExcludedProjects, - AlertRuleMonitorTypeInt, - AlertRuleTriggerExclusion, -) +from sentry.incidents.models.alert_rule import AlertRuleMonitorTypeInt from sentry.incidents.models.incident import ( IncidentActivity, IncidentSnapshot, - IncidentSubscription, IncidentTrigger, PendingIncidentSnapshot, TimeSeriesSnapshot, @@ -68,7 +64,7 @@ from sentry.models.authidentity import AuthIdentity from sentry.models.authprovider import AuthProvider from sentry.models.counter import Counter -from sentry.models.dashboard import Dashboard, DashboardTombstone +from sentry.models.dashboard import Dashboard, DashboardFavoriteUser, DashboardTombstone from sentry.models.dashboard_permissions import DashboardPermissions from sentry.models.dashboard_widget import ( DashboardWidget, @@ -147,7 +143,12 @@ def __init__(self, info: ComparatorFindings): self.info = info -def export_to_file(path: Path, scope: ExportScope, filter_by: set[str] | None = None) -> Any: +def export_to_file( + path: Path, + scope: ExportScope, + filter_by: set[str] | None = None, + checkpointer: ExportCheckpointer | None = None, +) -> Any: """ Helper function that exports the current state of the database to the specified file. """ @@ -157,13 +158,31 @@ def export_to_file(path: Path, scope: ExportScope, filter_by: set[str] | None = # These functions are just thin wrappers, but its best to exercise them directly anyway in # case that ever changes. if scope == ExportScope.Global: - export_in_global_scope(tmp_file, printer=NOOP_PRINTER) + export_in_global_scope( + tmp_file, + printer=NOOP_PRINTER, + checkpointer=checkpointer, + ) elif scope == ExportScope.Config: - export_in_config_scope(tmp_file, printer=NOOP_PRINTER) + export_in_config_scope( + tmp_file, + printer=NOOP_PRINTER, + checkpointer=checkpointer, + ) elif scope == ExportScope.Organization: - export_in_organization_scope(tmp_file, org_filter=filter_by, printer=NOOP_PRINTER) + export_in_organization_scope( + tmp_file, + org_filter=filter_by, + printer=NOOP_PRINTER, + checkpointer=checkpointer, + ) elif scope == ExportScope.User: - export_in_user_scope(tmp_file, user_filter=filter_by, printer=NOOP_PRINTER) + export_in_user_scope( + tmp_file, + user_filter=filter_by, + printer=NOOP_PRINTER, + checkpointer=checkpointer, + ) else: raise AssertionError(f"Unknown `ExportScope`: `{scope.name}`") @@ -193,7 +212,9 @@ def export_to_encrypted_tarball( path: Path, scope: ExportScope, *, + rsa_key_pair: tuple[bytes, bytes], filter_by: set[str] | None = None, + checkpointer: ExportCheckpointer | None = None, ) -> Any: """ Helper function that exports the current state of the database to the specified encrypted @@ -201,7 +222,7 @@ def export_to_encrypted_tarball( """ # Generate a public-private key pair. - (private_key_pem, public_key_pem) = generate_rsa_key_pair() + (private_key_pem, public_key_pem) = rsa_key_pair public_key_fp = io.BytesIO(public_key_pem) # Run the appropriate `export_in_...` command with encryption enabled. @@ -211,11 +232,17 @@ def export_to_encrypted_tarball( # case that ever changes. if scope == ExportScope.Global: export_in_global_scope( - tmp_file, encryptor=LocalFileEncryptor(public_key_fp), printer=NOOP_PRINTER + tmp_file, + encryptor=LocalFileEncryptor(public_key_fp), + printer=NOOP_PRINTER, + checkpointer=checkpointer, ) elif scope == ExportScope.Config: export_in_config_scope( - tmp_file, encryptor=LocalFileEncryptor(public_key_fp), printer=NOOP_PRINTER + tmp_file, + encryptor=LocalFileEncryptor(public_key_fp), + printer=NOOP_PRINTER, + checkpointer=checkpointer, ) elif scope == ExportScope.Organization: export_in_organization_scope( @@ -223,6 +250,7 @@ def export_to_encrypted_tarball( encryptor=LocalFileEncryptor(public_key_fp), org_filter=filter_by, printer=NOOP_PRINTER, + checkpointer=checkpointer, ) elif scope == ExportScope.User: export_in_user_scope( @@ -230,6 +258,7 @@ def export_to_encrypted_tarball( encryptor=LocalFileEncryptor(public_key_fp), user_filter=filter_by, printer=NOOP_PRINTER, + checkpointer=checkpointer, ) else: raise AssertionError(f"Unknown `ExportScope`: `{scope.name}`") @@ -477,19 +506,15 @@ def create_exhaustive_organization( ) # AlertRule* - other_project = self.create_project(name=f"other-project-{slug}", teams=[team]) alert = self.create_alert_rule( organization=org, projects=[project], user=owner, ) - AlertRuleExcludedProjects.objects.create(alert_rule=alert, project=other_project) alert.user_id = owner_id alert.save() trigger = self.create_alert_rule_trigger(alert_rule=alert) - AlertRuleTriggerExclusion.objects.create( - alert_rule_trigger=trigger, query_subscription=alert.snuba_query.subscriptions.first() - ) + assert alert.snuba_query is not None self.create_alert_rule_trigger_action(alert_rule_trigger=trigger) activated_alert = self.create_alert_rule( organization=org, @@ -526,7 +551,6 @@ def create_exhaustive_organization( unique_users=1, total_events=1, ) - IncidentSubscription.objects.create(incident=incident, user_id=owner_id) IncidentTrigger.objects.create( incident=incident, alert_rule_trigger=trigger, @@ -544,6 +568,10 @@ def create_exhaustive_organization( created_by_id=owner_id, organization=org, ) + DashboardFavoriteUser.objects.create( + dashboard=dashboard, + user_id=owner.id, + ) permissions = DashboardPermissions.objects.create( is_editable_by_everyone=True, dashboard=dashboard ) @@ -624,7 +652,7 @@ def create_exhaustive_organization( # Setup a test 'Issue Rule' and 'Automation' workflow = self.create_workflow(organization=org) - detector = self.create_detector(organization=org) + detector = self.create_detector(project=project) self.create_detector_workflow(detector=detector, workflow=workflow) self.create_detector_state(detector=detector) @@ -633,7 +661,7 @@ def create_exhaustive_organization( organization=org, ) - send_notification_action = self.create_action(type=Action.Type.Notification, data="") + send_notification_action = self.create_action(type=Action.Type.NOTIFICATION, data="") self.create_data_condition_group_action( action=send_notification_action, condition_group=notification_condition_group, @@ -661,7 +689,7 @@ def create_exhaustive_organization( ) # TODO @saponifi3d: Create or define trigger workflow action type - trigger_workflows_action = self.create_action(type=Action.Type.TriggerWorkflow, data="") + trigger_workflows_action = self.create_action(type=Action.Type.WEBHOOK, data="") self.create_data_condition_group_action( action=trigger_workflows_action, condition_group=detector_conditions ) diff --git a/src/sentry/testutils/requests.py b/src/sentry/testutils/requests.py index f2da822ad0d1bb..a7adf1f9ce4b0e 100644 --- a/src/sentry/testutils/requests.py +++ b/src/sentry/testutils/requests.py @@ -4,7 +4,7 @@ from collections.abc import Callable from typing import Optional -from django.contrib.auth.models import AnonymousUser, User +from django.contrib.auth.models import AnonymousUser from django.core.cache import cache from django.http import HttpRequest @@ -12,6 +12,7 @@ from sentry.middleware.auth import AuthenticationMiddleware from sentry.middleware.placeholder import placeholder_get_response from sentry.testutils.factories import Factories +from sentry.users.models.user import User from sentry.utils.auth import login RequestFactory = Callable[[], Optional[tuple[HttpRequest, User]]] diff --git a/src/sentry/toolbar/views/iframe_view.py b/src/sentry/toolbar/views/iframe_view.py index ba0ff1c5115fd9..58abcb5439cff1 100644 --- a/src/sentry/toolbar/views/iframe_view.py +++ b/src/sentry/toolbar/views/iframe_view.py @@ -47,7 +47,7 @@ def get( allowed_origins: list[str] = project.get_option("sentry:toolbar_allowed_origins") if referrer and is_origin_allowed(referrer, allowed_origins): - return self._respond_with_state("success") + return self._respond_with_state("logged-in") return self._respond_with_state("invalid-domain") diff --git a/src/sentry/toolbar/views/login_success_view.py b/src/sentry/toolbar/views/login_success_view.py index 55f37a385f7490..c4d4c256017796 100644 --- a/src/sentry/toolbar/views/login_success_view.py +++ b/src/sentry/toolbar/views/login_success_view.py @@ -18,6 +18,7 @@ def get(self, request: HttpRequest, organization, project_id_or_slug): TEMPLATE, status=200, context={ + "organization_slug": organization.slug, "delay_sec": int(delay_ms / 1000), "delay_ms": delay_ms, "cookie": f"{session_cookie_name}={request.COOKIES.get(session_cookie_name)}", diff --git a/src/sentry/types/grouphash_metadata.py b/src/sentry/types/grouphash_metadata.py new file mode 100644 index 00000000000000..2e93e2ffd15372 --- /dev/null +++ b/src/sentry/types/grouphash_metadata.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +from typing import NotRequired, TypedDict + +# NOTE: The structure in these metadata types is intentionaly flat, to make it easier to query in +# Redash or BigQuery, and they are all merged into a single flat JSON blob (which is then stored in +# `GroupHashMetadata.hashing_metadata`). Therefore, if entries are added, they should be namespaced +# according to their corresponding hash basis (so, for example, `fingerprint_source` and +# `message_source`, rather than just `source`), both for clarity and to avoid collisions. + + +class FingerprintHashingMetadata(TypedDict): + """ + Fingerprint data, gathered both during stand-alone custom/built-in fingerprinting and hybrid + fingerprinting involving message, stacktrace, security, or template hashing + """ + + # The fingerprint value + fingerprint: str + # Either "client", "server_builtin_rule", or "server_custom_rule". (We don't have a "none of the + # above" option here because we only record fingerprint metadata in cases where there's some + # sort of custom fingerprint.) + fingerprint_source: str + # The fingerprint value set in the SDK, if anything other than ["{{ default }}"]. Note that just + # because this is set doesn't mean we necessarily used it for grouping, since server-side rules + # take precedence over client fingerprints. See `fingerprint_source` above. + client_fingerprint: NotRequired[str] + # The server-side rule applied, if any + matched_fingerprinting_rule: NotRequired[str] + # Whether or not a hybrid fingerprint (one involving both the signal value `{{ default }}` and a + # custom value) was used. In that case, we group as we normally would, but then split the events + # into more granular groups based on the custom value. + is_hybrid_fingerprint: bool + + +class MessageHashingMetadata(TypedDict): + """ + Data gathered when an event is grouped by log message or error type and value + """ + + # Either "message" (from "message" or "logentry") or "exception" (error type and value, in cases + # where there's no stacktrace) + message_source: str + # Whether we've done any parameterization of the message, such as replacing a number with "" + message_parameterized: bool + + +class SaltedMessageHashingMetadata(MessageHashingMetadata, FingerprintHashingMetadata): + """ + Data from message-based bybrid fingerprinting + """ + + pass + + +class StacktraceHashingMetadata(TypedDict): + """ + Data gathered when an event is grouped based on a stacktrace found in an exception, a thread, or + diretly in the event + """ + + # Either "in-app" or "system" + stacktrace_type: str + # Where in the event data the stacktrace was found - either "exception", "thread", or + # "top-level" + stacktrace_location: str + # The number of stacktraces used for grouping (will be more than 1 in cases of chained + # exceptions) + num_stacktraces: int + + +class SaltedStacktraceHashingMetadata(StacktraceHashingMetadata, FingerprintHashingMetadata): + """ + Data from stacktrace-based bybrid fingerprinting + """ + + pass + + +class SecurityHashingMetadata(TypedDict): + """ + Data gathered when grouping browser-based security (Content Security Policy, Certifcate + Transparency, Online Certificate Status Protocol Stapling, or HTTP Public Key Pinning) reports + """ + + # Either "csp", "expect-ct", "expect-staple", or "hpkp" + security_report_type: str + # Domain name of the blocked address + blocked_host: str + # The CSP directive which was violated + csp_directive: NotRequired[str] + # In the case of a local `script-src` violation, whether it's an `unsafe-inline` or an + # `unsafe-eval` violation + csp_script_violation: NotRequired[str] + + +class SaltedSecurityHashingMetadata(SecurityHashingMetadata, FingerprintHashingMetadata): + """ + Data from security-report-based bybrid fingerprinting + """ + + pass + + +class TemplateHashingMetadata(TypedDict): + """ + Data gathered when grouping errors generated by Django templates + """ + + # The name of the template with the invalid template variable + template_name: NotRequired[str] + # The text of the line in the template containing the invalid variable + template_context_line: NotRequired[str] + + +class SaltedTemplateHashingMetadata(TemplateHashingMetadata, FingerprintHashingMetadata): + """ + Data from template-based bybrid fingerprinting + """ + + pass + + +class ChecksumHashingMetadata(TypedDict): + """ + Data gathered when legacy checksum grouping (wherein a hash is provided directly in the event) + is used + """ + + # The checksum used for grouping + checksum: str + # The incoming checksum value, if it was something other than a 32-digit hex value and we + # therefore had to hash it before using it + raw_checksum: NotRequired[str] + + +class FallbackHashingMetadata(TypedDict): + """ + Data gathered when no other grouping method produces results + """ + + # Whether we landed in the fallback because of a lack of data, because we had a stacktrace but + # all frames were ignored, or some other reason + fallback_reason: str + + +HashingMetadata = ( + FingerprintHashingMetadata + | MessageHashingMetadata + | SaltedMessageHashingMetadata + | StacktraceHashingMetadata + | SaltedStacktraceHashingMetadata + | SecurityHashingMetadata + | SaltedSecurityHashingMetadata + | TemplateHashingMetadata + | SaltedTemplateHashingMetadata + | ChecksumHashingMetadata + | FallbackHashingMetadata +) diff --git a/src/sentry/types/region.py b/src/sentry/types/region.py index 2fb0c7944804b1..7c16d6fbea23d7 100644 --- a/src/sentry/types/region.py +++ b/src/sentry/types/region.py @@ -2,7 +2,7 @@ from collections.abc import Collection, Iterable from enum import Enum -from typing import Any +from typing import TYPE_CHECKING, Any from urllib.parse import urljoin import sentry_sdk @@ -16,6 +16,9 @@ from sentry.utils import json from sentry.utils.env import in_test_environment +if TYPE_CHECKING: + from sentry.sentry_apps.models.sentry_app import SentryApp + class RegionCategory(Enum): MULTI_TENANT = "MULTI_TENANT" @@ -343,6 +346,25 @@ def find_regions_for_user(user_id: int) -> set[str]: return find_regions_for_orgs(org_ids) +@control_silo_function +def find_regions_for_sentry_app(sentry_app: SentryApp) -> set[str]: + from sentry.models.organizationmapping import OrganizationMapping + from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation + + if SiloMode.get_current_mode() == SiloMode.MONOLITH: + return {settings.SENTRY_MONOLITH_REGION} + + organizations_with_installations = SentryAppInstallation.objects.filter( + sentry_app=sentry_app + ).values_list("organization_id") + regions = ( + OrganizationMapping.objects.filter(organization_id__in=organizations_with_installations) + .distinct("region_name") + .values_list("region_name") + ) + return {r[0] for r in regions} + + def find_all_region_names() -> Iterable[str]: return get_global_directory().get_region_names() diff --git a/src/sentry/uptime/endpoints/serializers.py b/src/sentry/uptime/endpoints/serializers.py index 96bfc573f411b8..a652f31b70df13 100644 --- a/src/sentry/uptime/endpoints/serializers.py +++ b/src/sentry/uptime/endpoints/serializers.py @@ -23,6 +23,7 @@ class ProjectUptimeSubscriptionSerializerResponse(TypedDict): intervalSeconds: int timeoutMs: int owner: ActorSerializerResponse + traceSampling: bool @register(ProjectUptimeSubscription) @@ -70,4 +71,5 @@ def serialize( "intervalSeconds": obj.uptime_subscription.interval_seconds, "timeoutMs": obj.uptime_subscription.timeout_ms, "owner": attrs["owner"], + "traceSampling": obj.uptime_subscription.trace_sampling, } diff --git a/src/sentry/uptime/endpoints/validators.py b/src/sentry/uptime/endpoints/validators.py index 9a3634baa3e50e..9d29d964776f97 100644 --- a/src/sentry/uptime/endpoints/validators.py +++ b/src/sentry/uptime/endpoints/validators.py @@ -1,5 +1,4 @@ from collections.abc import Sequence -from datetime import timedelta import jsonschema from drf_spectacular.utils import extend_schema_serializer @@ -12,7 +11,11 @@ from sentry.auth.superuser import is_active_superuser from sentry.models.environment import Environment from sentry.uptime.detectors.url_extraction import extract_domain_parts -from sentry.uptime.models import ProjectUptimeSubscription, ProjectUptimeSubscriptionMode +from sentry.uptime.models import ( + ProjectUptimeSubscription, + ProjectUptimeSubscriptionMode, + UptimeSubscription, +) from sentry.uptime.subscriptions.subscriptions import ( MAX_MANUAL_SUBSCRIPTIONS_PER_ORG, MaxManualUptimeSubscriptionsReached, @@ -33,18 +36,8 @@ Importantly domains like `vercel.dev` are considered TLDs as defined by the public suffix list (PSL). See `extract_domain_parts` fo more details """ -SUPPORTED_HTTP_METHODS = ["GET", "POST", "HEAD", "PUT", "DELETE", "PATCH", "OPTIONS"] MAX_REQUEST_SIZE_BYTES = 1000 -# This matches the jsonschema for the check config -VALID_INTERVALS = [ - timedelta(minutes=1), - timedelta(minutes=5), - timedelta(minutes=10), - timedelta(minutes=20), - timedelta(minutes=30), - timedelta(minutes=60), -] HEADERS_LIST_SCHEMA = { "type": "array", @@ -76,7 +69,7 @@ class UptimeMonitorValidator(CamelSnakeSerializer): name = serializers.CharField( required=True, max_length=128, - help_text="Name of the uptime monitor", + help_text="Name of the uptime monitor.", ) owner = ActorField( required=False, @@ -87,23 +80,40 @@ class UptimeMonitorValidator(CamelSnakeSerializer): max_length=64, required=False, allow_null=True, - help_text="Name of the environment", + help_text="Name of the environment to create uptime issues in.", ) url = URLField(required=True, max_length=255) interval_seconds = serializers.ChoiceField( - required=True, choices=[int(i.total_seconds()) for i in VALID_INTERVALS] + required=True, + choices=UptimeSubscription.IntervalSeconds.choices, + help_text="Time in seconds between uptime checks.", ) timeout_ms = serializers.IntegerField( required=True, min_value=1000, max_value=30_000, + help_text="The number of milliseconds the request will wait for a response before timing-out.", ) mode = serializers.IntegerField(required=False) method = serializers.ChoiceField( - required=False, choices=list(zip(SUPPORTED_HTTP_METHODS, SUPPORTED_HTTP_METHODS)) + required=False, + choices=UptimeSubscription.SupportedHTTPMethods.choices, + help_text="The HTTP method used to make the check request.", + ) + headers = serializers.JSONField( + required=False, + help_text="Additional headers to send with the check request.", + ) + trace_sampling = serializers.BooleanField( + required=False, + default=False, + help_text="When enabled allows check requets to be considered for dowstream performance tracing.", + ) + body = serializers.CharField( + required=False, + allow_null=True, + help_text="The body to send with the check request.", ) - headers = serializers.JSONField(required=False) - body = serializers.CharField(required=False, allow_null=True) def validate(self, attrs): headers = [] @@ -181,6 +191,7 @@ def create(self, validated_data): name=validated_data["name"], mode=validated_data.get("mode", ProjectUptimeSubscriptionMode.MANUAL), owner=validated_data.get("owner"), + trace_sampling=validated_data.get("trace_sampling", False), **method_headers_body, ) except MaxManualUptimeSubscriptionsReached: @@ -215,6 +226,11 @@ def update(self, instance: ProjectUptimeSubscription, data): body = data["body"] if "body" in data else instance.uptime_subscription.body name = data["name"] if "name" in data else instance.name owner = data["owner"] if "owner" in data else instance.owner + trace_sampling = ( + data["trace_sampling"] + if "trace_sampling" in data + else instance.uptime_subscription.trace_sampling + ) if "environment" in data: environment = Environment.get_or_create( @@ -238,6 +254,7 @@ def update(self, instance: ProjectUptimeSubscription, data): body=body, name=name, owner=owner, + trace_sampling=trace_sampling, ) create_audit_entry( request=self.context["request"], diff --git a/src/sentry/uptime/migrations/0002_remove_separate_remote_subscription.py b/src/sentry/uptime/migrations/0002_remove_separate_remote_subscription.py index bd79fb509bbbc4..cee3dbc032f816 100644 --- a/src/sentry/uptime/migrations/0002_remove_separate_remote_subscription.py +++ b/src/sentry/uptime/migrations/0002_remove_separate_remote_subscription.py @@ -22,6 +22,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0001_uptime_subscriptions"), ("remote_subscriptions", "0001_remote_subscription"), diff --git a/src/sentry/uptime/migrations/0003_drop_remote_subscription.py b/src/sentry/uptime/migrations/0003_drop_remote_subscription.py index 77cf78803a6b1b..baaa9f57d0b69b 100644 --- a/src/sentry/uptime/migrations/0003_drop_remote_subscription.py +++ b/src/sentry/uptime/migrations/0003_drop_remote_subscription.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0002_remove_separate_remote_subscription"), ] diff --git a/src/sentry/uptime/migrations/0004_projectuptimesubscription_mode.py b/src/sentry/uptime/migrations/0004_projectuptimesubscription_mode.py index 0dec267cd7a3bc..7c45752c8c0ea7 100644 --- a/src/sentry/uptime/migrations/0004_projectuptimesubscription_mode.py +++ b/src/sentry/uptime/migrations/0004_projectuptimesubscription_mode.py @@ -22,6 +22,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0731_add_insight_project_flags"), ("uptime", "0003_drop_remote_subscription"), diff --git a/src/sentry/uptime/migrations/0005_uptime_status.py b/src/sentry/uptime/migrations/0005_uptime_status.py index b2e5c67d86a5d1..5402acf8352eb2 100644 --- a/src/sentry/uptime/migrations/0005_uptime_status.py +++ b/src/sentry/uptime/migrations/0005_uptime_status.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0004_projectuptimesubscription_mode"), ] diff --git a/src/sentry/uptime/migrations/0006_projectuptimesubscription_name_owner.py b/src/sentry/uptime/migrations/0006_projectuptimesubscription_name_owner.py index 700265fef0eb70..61a26021ead98c 100644 --- a/src/sentry/uptime/migrations/0006_projectuptimesubscription_name_owner.py +++ b/src/sentry/uptime/migrations/0006_projectuptimesubscription_name_owner.py @@ -23,6 +23,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("sentry", "0742_backfill_alertrule_detection_type"), ("uptime", "0005_uptime_status"), diff --git a/src/sentry/uptime/migrations/0008_uptime_url_suffix.py b/src/sentry/uptime/migrations/0008_uptime_url_suffix.py index f1ea789154cca2..57ffe779e502ed 100644 --- a/src/sentry/uptime/migrations/0008_uptime_url_suffix.py +++ b/src/sentry/uptime/migrations/0008_uptime_url_suffix.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0007_update_detected_subscription_interval"), ] diff --git a/src/sentry/uptime/migrations/0011_remove_uptime_whois_columns_db.py b/src/sentry/uptime/migrations/0011_remove_uptime_whois_columns_db.py index fe4996ed21c1dc..529f7f7731c993 100644 --- a/src/sentry/uptime/migrations/0011_remove_uptime_whois_columns_db.py +++ b/src/sentry/uptime/migrations/0011_remove_uptime_whois_columns_db.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0010_remove_uptime_whois_columns_state"), ] diff --git a/src/sentry/uptime/migrations/0012_uptime_subscription_request_fields.py b/src/sentry/uptime/migrations/0012_uptime_subscription_request_fields.py index 87b20132abb29c..882fdca1fe9154 100644 --- a/src/sentry/uptime/migrations/0012_uptime_subscription_request_fields.py +++ b/src/sentry/uptime/migrations/0012_uptime_subscription_request_fields.py @@ -20,6 +20,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("uptime", "0011_remove_uptime_whois_columns_db"), ] diff --git a/src/sentry/uptime/migrations/0018_add_trace_sampling_field_to_uptime.py b/src/sentry/uptime/migrations/0018_add_trace_sampling_field_to_uptime.py new file mode 100644 index 00000000000000..d16374639f1e48 --- /dev/null +++ b/src/sentry/uptime/migrations/0018_add_trace_sampling_field_to_uptime.py @@ -0,0 +1,50 @@ +# Generated by Django 5.1.1 on 2024-11-14 16:44 + +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + allow_run_sql = True + + dependencies = [ + ("uptime", "0017_unique_on_timeout"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + """ + ALTER TABLE "uptime_uptimesubscription" ADD COLUMN IF NOT EXISTS "trace_sampling" boolean NOT NULL DEFAULT false; + """, + reverse_sql=""" + ALTER TABLE "uptime_uptimesubscription" DROP COLUMN "trace_sampling"; + """, + hints={"tables": ["uptime_uptimesubscription"]}, + ), + ], + state_operations=[ + migrations.AddField( + model_name="uptimesubscription", + name="trace_sampling", + field=models.BooleanField(default=False), + ), + ], + ), + ] diff --git a/src/sentry/uptime/models.py b/src/sentry/uptime/models.py index 1ed3dc2aa3867f..9f53ee68766cd1 100644 --- a/src/sentry/uptime/models.py +++ b/src/sentry/uptime/models.py @@ -1,6 +1,6 @@ import enum from datetime import timedelta -from typing import ClassVar, Self +from typing import ClassVar, Literal, Self from django.conf import settings from django.db import models @@ -29,6 +29,9 @@ sort_keys=True, ).encode +SupportedHTTPMethodsLiteral = Literal["GET", "POST", "HEAD", "PUT", "DELETE", "PATCH", "OPTIONS"] +IntervalSecondsLiteral = Literal[60, 300, 600, 1200, 1800, 3600] + @region_silo_model class UptimeSubscription(BaseRemoteSubscription, DefaultFieldsModelExisting): @@ -36,6 +39,23 @@ class UptimeSubscription(BaseRemoteSubscription, DefaultFieldsModelExisting): # any projects/orgs. Will fix this in a later pr __relocation_scope__ = RelocationScope.Excluded + class SupportedHTTPMethods(models.TextChoices): + GET = "GET", "GET" + POST = "POST", "POST" + HEAD = "HEAD", "HEAD" + PUT = "PUT", "PUT" + DELETE = "DELETE", "DELETE" + PATCH = "PATCH", "PATCH" + OPTIONS = "OPTIONS", "OPTIONS" + + class IntervalSeconds(models.IntegerChoices): + ONE_MINUTE = 60, "1 minute" + FIVE_MINUTES = 300, "5 minutes" + TEN_MINUTES = 600, "10 minutes" + TWENTY_MINUTES = 1200, "20 minutes" + THIRTY_MINUTES = 1800, "30 minutes" + ONE_HOUR = 3600, "1 hour" + # The url to check url = models.CharField(max_length=255) # The domain of the url, extracted via TLDExtract @@ -48,15 +68,22 @@ class UptimeSubscription(BaseRemoteSubscription, DefaultFieldsModelExisting): # The name of the provider hosting this domain host_provider_name = models.CharField(max_length=255, db_index=True, null=True) # How frequently to run the check in seconds - interval_seconds = models.IntegerField() + interval_seconds: models.IntegerField[IntervalSecondsLiteral, IntervalSecondsLiteral] = ( + models.IntegerField(choices=IntervalSeconds) + ) # How long to wait for a response from the url before we assume a timeout timeout_ms = models.IntegerField() # HTTP method to perform the check with - method = models.CharField(max_length=20, db_default="GET") + method: models.CharField[SupportedHTTPMethodsLiteral, SupportedHTTPMethodsLiteral] = ( + models.CharField(max_length=20, choices=SupportedHTTPMethods, db_default="GET") + ) # HTTP headers to send when performing the check headers = JSONField(json_dumps=headers_json_encoder, db_default=[]) # HTTP body to send when performing the check body = models.TextField(null=True) + # How to sample traces for this monitor. Note that we always send a trace_id, so any errors will + # be associated, this just controls the span sampling. + trace_sampling = models.BooleanField(default=False) objects: ClassVar[BaseManager[Self]] = BaseManager( cache_fields=["pk", "subscription_id"], @@ -98,6 +125,7 @@ class UptimeStatus(enum.IntEnum): class ProjectUptimeSubscription(DefaultFieldsModelExisting): # TODO: This should be included in export/import, but right now it has no relation to # any projects/orgs. Will fix this in a later pr + __relocation_scope__ = RelocationScope.Excluded project = FlexibleForeignKey("sentry.Project") diff --git a/src/sentry/uptime/rdap/__init__.py b/src/sentry/uptime/rdap/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/src/sentry/uptime/subscriptions/subscriptions.py b/src/sentry/uptime/subscriptions/subscriptions.py index a8d580d1312e2b..2a0e78b7794819 100644 --- a/src/sentry/uptime/subscriptions/subscriptions.py +++ b/src/sentry/uptime/subscriptions/subscriptions.py @@ -72,6 +72,7 @@ def get_or_create_uptime_subscription( method: str = "GET", headers: Sequence[tuple[str, str]] | None = None, body: str | None = None, + trace_sampling: bool = False, ) -> UptimeSubscription: """ Creates a new uptime subscription. This creates the row in postgres, and fires a task that will send the config @@ -101,6 +102,7 @@ def get_or_create_uptime_subscription( method=method, headers=headers, # type: ignore[misc] body=body, + trace_sampling=trace_sampling, ) created = True except IntegrityError: @@ -157,6 +159,7 @@ def get_or_create_project_uptime_subscription( mode: ProjectUptimeSubscriptionMode = ProjectUptimeSubscriptionMode.MANUAL, name: str = "", owner: Actor | None = None, + trace_sampling: bool = False, ) -> tuple[ProjectUptimeSubscription, bool]: """ Links a project to an uptime subscription so that it can process results. @@ -169,7 +172,13 @@ def get_or_create_project_uptime_subscription( raise MaxManualUptimeSubscriptionsReached uptime_subscription = get_or_create_uptime_subscription( - url, interval_seconds, timeout_ms, method, headers, body + url=url, + interval_seconds=interval_seconds, + timeout_ms=timeout_ms, + method=method, + headers=headers, + body=body, + trace_sampling=trace_sampling, ) owner_user_id = None owner_team_id = None @@ -200,13 +209,20 @@ def update_project_uptime_subscription( body: str | None, name: str, owner: Actor | None, + trace_sampling: bool, ): """ Links a project to an uptime subscription so that it can process results. """ cur_uptime_subscription = uptime_monitor.uptime_subscription new_uptime_subscription = get_or_create_uptime_subscription( - url, interval_seconds, timeout_ms, method, headers, body + url=url, + interval_seconds=interval_seconds, + timeout_ms=timeout_ms, + method=method, + headers=headers, + body=body, + trace_sampling=trace_sampling, ) updated_subscription = cur_uptime_subscription.id != new_uptime_subscription.id diff --git a/src/sentry/uptime/subscriptions/tasks.py b/src/sentry/uptime/subscriptions/tasks.py index a31206a5b092c0..c0936b840ddce4 100644 --- a/src/sentry/uptime/subscriptions/tasks.py +++ b/src/sentry/uptime/subscriptions/tasks.py @@ -91,10 +91,11 @@ def uptime_subscription_to_check_config( config: CheckConfig = { "subscription_id": subscription_id, "url": subscription.url, - "interval_seconds": subscription.interval_seconds, # type: ignore[typeddict-item] + "interval_seconds": subscription.interval_seconds, "timeout_ms": subscription.timeout_ms, - "request_method": subscription.method, # type: ignore[typeddict-item] + "request_method": subscription.method, "request_headers": headers, + "trace_sampling": subscription.trace_sampling, } if subscription.body is not None: config["request_body"] = subscription.body diff --git a/src/sentry/users/api/bases/user.py b/src/sentry/users/api/bases/user.py index 2e7284d461dc54..3664d649fb3f2d 100644 --- a/src/sentry/users/api/bases/user.py +++ b/src/sentry/users/api/bases/user.py @@ -139,7 +139,7 @@ class RegionSiloUserEndpoint(Endpoint): permission_classes = (UserPermission,) def convert_args( - self, request: Request, user_id: str | None = None, *args: Any, **kwargs: Any + self, request: Request, user_id: int | str | None = None, *args: Any, **kwargs: Any ) -> Any: user: RpcUser | User | None = None diff --git a/src/sentry/utils/concurrent.py b/src/sentry/utils/concurrent.py index 88e13294c17ef9..82e1b706d2fa47 100644 --- a/src/sentry/utils/concurrent.py +++ b/src/sentry/utils/concurrent.py @@ -9,17 +9,15 @@ from contextlib import contextmanager from queue import Full, PriorityQueue from time import time -from typing import Generic, NamedTuple, TypeVar +from typing import Any, NamedTuple import sentry_sdk import sentry_sdk.scope logger = logging.getLogger(__name__) -T = TypeVar("T") - -def execute(function: Callable[..., T], daemon=True): +def execute[T](function: Callable[..., T], daemon=True) -> Future[T]: future: Future[T] = Future() def run(): @@ -41,7 +39,7 @@ def run(): @functools.total_ordering -class PriorityTask(NamedTuple, Generic[T]): +class PriorityTask[T](NamedTuple): priority: int item: tuple[sentry_sdk.Scope, sentry_sdk.Scope, Callable[[], T], Future[T]] @@ -52,7 +50,7 @@ def __lt__(self, b): return self.priority < b.priority -class TimedFuture(Future[T]): +class TimedFuture[T](Future[T]): _condition: threading.Condition _state: str @@ -124,7 +122,7 @@ def set_exception(self, *args, **kwargs): return super().set_exception(*args, **kwargs) -class Executor(Generic[T]): +class Executor: """ This class provides an API for executing tasks in different contexts (immediately, or asynchronously.) @@ -136,9 +134,15 @@ class Executor(Generic[T]): to allow controlling whether or not queue insertion should be blocking. """ - Future = TimedFuture - - def submit(self, callable, priority=0, block=True, timeout=None) -> TimedFuture[T]: + def submit[ + T + ]( + self, + callable: Callable[[], T], + priority: int = 0, + block: bool = True, + timeout=None, + ) -> TimedFuture[T]: """ Enqueue a task to be executed, returning a ``TimedFuture``. @@ -149,7 +153,7 @@ def submit(self, callable, priority=0, block=True, timeout=None) -> TimedFuture[ raise NotImplementedError -class SynchronousExecutor(Executor[T]): +class SynchronousExecutor(Executor): """ This executor synchronously executes callables in the current thread. @@ -160,11 +164,11 @@ class SynchronousExecutor(Executor[T]): # TODO: The ``Future`` implementation here could be replaced with a # lock-free future for efficiency. - def submit(self, callable, *args, **kwargs): + def submit[T](self, callable: Callable[[], T], *args, **kwargs) -> TimedFuture[T]: """ Immediately execute a callable, returning a ``TimedFuture``. """ - future: Future[T] = self.Future() + future: TimedFuture[T] = TimedFuture() assert future.set_running_or_notify_cancel() try: result = callable() @@ -175,7 +179,7 @@ def submit(self, callable, *args, **kwargs): return future -class ThreadedExecutor(Executor[T]): +class ThreadedExecutor(Executor): """\ This executor provides a method of executing callables in a threaded worker pool. The number of outstanding requests can be limited by the ``maxsize`` @@ -190,7 +194,7 @@ def __init__(self, worker_count=1, maxsize=0): self.__worker_count = worker_count self.__workers = set() self.__started = False - self.__queue: PriorityQueue[PriorityTask[T]] = PriorityQueue(maxsize) + self.__queue: PriorityQueue[PriorityTask[Any]] = PriorityQueue(maxsize) self.__lock = threading.Lock() def __worker(self): @@ -223,7 +227,9 @@ def start(self): self.__started = True - def submit(self, callable, priority=0, block=True, timeout=None): + def submit[ + T + ](self, callable: Callable[[], T], priority=0, block=True, timeout=None) -> TimedFuture[T]: """\ Enqueue a task to be executed, returning a ``TimedFuture``. @@ -237,7 +243,7 @@ def submit(self, callable, priority=0, block=True, timeout=None): if not self.__started: self.start() - future: Future[T] = self.Future() + future: TimedFuture[T] = TimedFuture() task = PriorityTask( priority, ( diff --git a/src/sentry/utils/event_tracker.py b/src/sentry/utils/event_tracker.py new file mode 100644 index 00000000000000..2d7c6a97d2f1ef --- /dev/null +++ b/src/sentry/utils/event_tracker.py @@ -0,0 +1,56 @@ +import logging +from enum import StrEnum + +from sentry import options +from sentry.ingest.types import ConsumerType + + +class TransactionStageStatus(StrEnum): + # the transaction is stored to rc-transactions + REDIS_PUT = "redis_put" + + # a save_transactions task is kicked off + SAVE_TXN_STARTED = "save_txn_started" + + # a save_transactions task is finished + SAVE_TXN_FINISHED = "save_txn_finished" + + # the transaction is published to the `events` topic for snuba/sbc consumers to consume + SNUBA_TOPIC_PUT = "snuba_topic_put" + + # the transaction is deleted from rc-transactions + REDIS_DELETED = "redis_deleted" + + # a post_process task is finished + POST_PROCESS_FINISHED = "post_process_finished" + + +logger = logging.getLogger(__name__) + + +def track_sampled_event( + event_id: str, consumer_type: ConsumerType, status: TransactionStageStatus +) -> None: + """ + Records how far an event has made it through the ingestion pipeline. + Each event type will pick up its sampling rate from its registered option. + """ + + sample_rate = options.get(f"performance.event-tracker.sample-rate.{consumer_type}") + if sample_rate == 0: + return + + event_float = (int(event_id, 16) % 10000) / 10000 + if event_float < sample_rate: + extra = { + "event_id": event_id, + "consumer_type": consumer_type, + "status": status, + } + _do_record(extra) + + +def _do_record(extra): + # All Python logs will be picked up by Google Cloud Logging. + # TODO: make a google Cloud Sink to filter for these EventTracker logs and put them into BigQuery and do data analysis downstream + logger.info("EventTracker.recorded", extra=extra) diff --git a/src/sentry/utils/flag.py b/src/sentry/utils/flag.py index 335f4147c92987..132e426ed2d8e7 100644 --- a/src/sentry/utils/flag.py +++ b/src/sentry/utils/flag.py @@ -1,80 +1,26 @@ -# Thread safe flag tracking wrapper. -from contextvars import ContextVar +import logging -flag_manager = ContextVar("flag_manager") # type: ignore[var-annotated] +import sentry_sdk +from sentry_sdk.integrations import Integration +logger = logging.getLogger() -def initialize_flag_manager(capacity: int = 100) -> None: - flag_manager.set(FlagManager(capacity=capacity)) +class FlagPoleIntegration(Integration): + identifier = "flag_pole" -# NOTE: If not properly initialized this function is a no-op. -def process_flag_result(flag: str, result: bool) -> None: - try: - _flag_manager = flag_manager.get() - _flag_manager.insert(f"feature.{flag}", result) - except LookupError: - return None + @staticmethod + def setup_once(): + scope = sentry_sdk.get_current_scope() + scope.add_error_processor(flag_error_processor) -# NOTE: If not properly initialized this function is a no-op. -def get_flags_serialized(): - try: - _flag_manager = flag_manager.get() - return _flag_manager.serialize() - except LookupError: - return [] +def flag_error_processor(event, exc_info): + scope = sentry_sdk.get_current_scope() + event["contexts"]["flags"] = {"values": scope.flags.get()} + return event -# Flag tracking implementation. -import itertools -from typing import TypedDict - - -class SerializedFlag(TypedDict): - flag: str - result: bool - - -class Flag: - __slots__ = ("flag", "result") - - def __init__(self, flag: str, result: bool) -> None: - self.flag = flag - self.result = result - - @property - def asdict(self) -> SerializedFlag: - return {"flag": self.flag, "result": self.result} - - -class FlagManager: - # NOTE: Implemented using a ciruclar buffer instead an LRU for ease - # of implementation. - - def __init__(self, capacity: int) -> None: - assert capacity > 0 - self.buffer: list[Flag] = [] - self.capacity = capacity - self.ip = 0 - - @property - def index(self): - return self.ip % self.capacity - - def insert(self, flag: str, result: bool) -> None: - flag_ = Flag(flag, result) - - if self.ip >= self.capacity: - self.buffer[self.index] = flag_ - else: - self.buffer.append(flag_) - - self.ip += 1 - - def serialize(self) -> list[SerializedFlag]: - if self.ip >= self.capacity: - iterator = itertools.chain(range(self.index, self.capacity), range(0, self.index)) - return [self.buffer[i].asdict for i in iterator] - else: - return [flag.asdict for flag in self.buffer] +def flag_pole_hook(flag: str, result: bool): + flags = sentry_sdk.get_current_scope().flags + flags.set(flag, result) diff --git a/src/sentry/utils/locking/backends/migration.py b/src/sentry/utils/locking/backends/migration.py index c7b80c92c01f2c..10fa6329c38460 100644 --- a/src/sentry/utils/locking/backends/migration.py +++ b/src/sentry/utils/locking/backends/migration.py @@ -1,6 +1,7 @@ -from collections.abc import Callable, Mapping -from typing import Any, Optional, Union +from collections.abc import Callable +from typing import Optional, Union +from sentry.conf.types.service_options import ServiceOptions from sentry.utils.locking.backends import LockBackend from sentry.utils.services import build_instance_from_options_of_type, resolve_callable @@ -53,8 +54,8 @@ def selector_func(key, routing_key, backend_new, backend_old): def __init__( self, - backend_new_config: Mapping[str, Any], - backend_old_config: Mapping[str, Any], + backend_new_config: ServiceOptions, + backend_old_config: ServiceOptions, selector_func_path: str | SelectorFncType | None = None, ): self.backend_new = build_instance_from_options_of_type(LockBackend, backend_new_config) diff --git a/src/sentry/utils/prompts.py b/src/sentry/utils/prompts.py index a5c7814b2ac592..d89886fd24f525 100644 --- a/src/sentry/utils/prompts.py +++ b/src/sentry/utils/prompts.py @@ -22,6 +22,8 @@ "issue_feature_flags_inline_onboarding": {"required_fields": ["organization_id", "project_id"]}, "issue_feedback_hidden": {"required_fields": ["organization_id", "project_id"]}, "issue_views_add_view_banner": {"required_fields": ["organization_id"]}, + "rollback_2024_sidebar": {"required_fields": ["organization_id"]}, + "rollback_2024_dropdown": {"required_fields": ["organization_id"]}, } diff --git a/src/sentry/utils/relocation.py b/src/sentry/utils/relocation.py index 0c9d4d2e5ec45e..219a7369a662f0 100644 --- a/src/sentry/utils/relocation.py +++ b/src/sentry/utils/relocation.py @@ -5,16 +5,32 @@ from contextlib import contextmanager from enum import Enum, unique from functools import lru_cache +from io import BytesIO from string import Template from typing import Any from uuid import UUID +from django.core.files.storage import Storage from django.utils import timezone +from orjson import JSONDecodeError from sentry import options -from sentry.backup.dependencies import dependencies, get_model_name, sorted_dependencies +from sentry.backup.crypto import ( + DecryptionError, + EncryptorDecryptorPair, + create_encrypted_export_tarball, + decrypt_encrypted_tarball, +) +from sentry.backup.dependencies import ( + NormalizedModelName, + dependencies, + get_model_name, + sorted_dependencies, +) +from sentry.backup.exports import ExportCheckpointer, ExportCheckpointerError from sentry.backup.helpers import Printer from sentry.backup.scopes import RelocationScope +from sentry.backup.services.import_export.model import RpcExportOk from sentry.http import get_server_hostname from sentry.models.files.utils import get_relocation_storage from sentry.models.relocation import Relocation, RelocationFile @@ -308,9 +324,87 @@ class OrderedTask(Enum): ) -# A custom logger that roughly matches the parts of the `click.echo` interface that the -# `import_*` methods rely on. +class StorageBackedCheckpointExporter(ExportCheckpointer): + """ + An export checkpointer that uses GCP cloud storage to store encrypted checkpoints for every + model we export for a SAAS_TO_SAAS relocation. + """ + + def __init__( + self, + *, + crypto: EncryptorDecryptorPair, + uuid: UUID, + storage: Storage, + ): + self.__crypto = crypto + self.__uuid = uuid + self.__storage = storage + + def _get_path_name(self, model_name: NormalizedModelName) -> str: + return f"runs/{self.__uuid}/saas_to_saas_export/_checkpoints/{str(model_name)}.enc.tar" + + def get(self, model_name: NormalizedModelName) -> RpcExportOk | None: + logger_data: dict[str, Any] = {"uuid": str(self.__uuid), "model": str(model_name)} + path_name = self._get_path_name(model_name) + if not self.__storage.exists(path_name): + logger.info( + "Export checkpointer: miss", + extra=logger_data, + ) + return None + + try: + with self.__storage.open(path_name, "rb") as fp: + logger_data["encrypted_contents_size"] = fp.tell() + json_data = decrypt_encrypted_tarball(fp, self.__crypto.decryptor) + parsed_json = self._parse_cached_json(json_data) + if parsed_json is None: + logger.info( + "Export checkpointer: miss", + extra=logger_data, + ) + else: + logger_data["max_pk"] = parsed_json.max_pk + logger.info( + "Export checkpointer: read", + extra=logger_data, + ) + + return parsed_json + except (FileNotFoundError, DecryptionError, JSONDecodeError, ExportCheckpointerError): + logger.info( + "Export checkpointer: miss", + extra=logger_data, + ) + return None + + def add(self, model_name: NormalizedModelName, json_export: Any) -> None: + logger_data: dict[str, Any] = {"uuid": str(self.__uuid), "model": str(model_name)} + path_name = self._get_path_name(model_name) + if not isinstance(json_export, list): + return None + + out_bytes = create_encrypted_export_tarball(json_export, self.__crypto.encryptor).getvalue() + fp = BytesIO() + fp.write(out_bytes) + fp.seek(0) + self.__storage.save(path_name, fp) + + logger_data["encrypted_contents_size"] = fp.tell() + logger_data["model_count"] = len(json_export) + logger.info( + "Export checkpointer: write", + extra=logger_data, + ) + + class LoggingPrinter(Printer): + """ + A custom logger that roughly matches the parts of the `click.echo` interface that the `import_*` + and `export_*` backup methods rely on. + """ + def __init__(self, uuid: UUID): self.uuid = uuid super().__init__() diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 693e64e1772db8..321ed77205607a 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -26,7 +26,7 @@ from sentry.features.rollout import in_random_rollout from sentry.utils import metrics from sentry.utils.db import DjangoAtomicIntegration -from sentry.utils.flag import get_flags_serialized +from sentry.utils.flag import FlagPoleIntegration from sentry.utils.rust import RustInfoIntegration # Can't import models in utils because utils should be the bottom of the food chain @@ -242,11 +242,6 @@ def before_send(event: Event, _: Hint) -> Event | None: event["tags"]["silo_mode"] = str(settings.SILO_MODE) if settings.SENTRY_REGION: event["tags"]["sentry_region"] = settings.SENTRY_REGION - - if "contexts" not in event: - event["contexts"] = {} - event["contexts"]["flags"] = {"values": get_flags_serialized()} - return event @@ -470,6 +465,7 @@ def flush( RustInfoIntegration(), RedisIntegration(), ThreadingIntegration(propagate_hub=True), + FlagPoleIntegration(), ], **sdk_options, ) diff --git a/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py b/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py index 3c7eff019753ea..0053f191a524c8 100644 --- a/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py +++ b/src/sentry/utils/sdk_crashes/sdk_crash_detection_config.py @@ -1,5 +1,5 @@ from collections.abc import Sequence -from dataclasses import dataclass +from dataclasses import dataclass, field from enum import Enum, unique from typing import TypedDict @@ -12,6 +12,14 @@ ) +@dataclass +class FunctionAndPathPattern: + """Both the function and path pattern must match for a frame to be considered a SDK frame.""" + + function_pattern: str + path_pattern: str + + @dataclass class SDKFrameConfig: function_patterns: set[str] @@ -20,6 +28,8 @@ class SDKFrameConfig: path_replacer: PathReplacer + function_and_path_patterns: list[FunctionAndPathPattern] = field(default_factory=list) + @unique class SdkName(Enum): @@ -42,13 +52,11 @@ class SDKCrashDetectionConfig: sample_rate: float """The organization allowlist to detect crashes for. If empty, all organizations are allowed. Use the sample_rate to disable the SDK crash detection for all organizations.""" organization_allowlist: list[int] - """The SDK names to detect crashes for. For example, ["sentry.cocoa", "sentry.cocoa.react-native"].""" - sdk_names: Sequence[str] + """The SDK names including their min versions to detect crashes for. For example, {"sentry.cocoa": "8.2.0", "sentry.cocoa.react-native": "8.2.0"}.""" + sdk_names: dict[str, str] """Whether to report fatal errors. If true, both unhandled and fatal errors are reported. If false, only unhandled errors are reported.""" report_fatal_errors: bool - """The minimum SDK version to detect crashes for. For example, "8.2.0".""" - min_sdk_version: str """The system library path patterns to detect system frames. For example, `System/Library/*` """ system_library_path_patterns: set[str] """The configuration for detecting SDK frames.""" @@ -69,26 +77,28 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]: cocoa_options = _get_options(sdk_name=SdkName.Cocoa, has_organization_allowlist=False) if cocoa_options: + # Since changing the debug image type to macho (https://github.com/getsentry/sentry-cocoa/pull/2701) + # released in sentry-cocoa 8.2.0 (https://github.com/getsentry/sentry-cocoa/blob/main/CHANGELOG.md#820), + # the frames contain the full paths required for detecting system frames in is_system_library_frame. + # Therefore, we require at least sentry-cocoa 8.2.0. + + cocoa_min_sdk_version = "8.2.0" + cocoa_config = SDKCrashDetectionConfig( sdk_name=SdkName.Cocoa, project_id=cocoa_options["project_id"], sample_rate=cocoa_options["sample_rate"], organization_allowlist=cocoa_options["organization_allowlist"], - sdk_names=[ - "sentry.cocoa", - "sentry.cocoa.capacitor", - "sentry.cocoa.react-native", - "sentry.cocoa.dotnet", - "sentry.cocoa.flutter", - "sentry.cocoa.kmp", - "sentry.cocoa.unity", - "sentry.cocoa.unreal", - ], - # Since changing the debug image type to macho (https://github.com/getsentry/sentry-cocoa/pull/2701) - # released in sentry-cocoa 8.2.0 (https://github.com/getsentry/sentry-cocoa/blob/main/CHANGELOG.md#820), - # the frames contain the full paths required for detecting system frames in is_system_library_frame. - # Therefore, we require at least sentry-cocoa 8.2.0. - min_sdk_version="8.2.0", + sdk_names={ + "sentry.cocoa": cocoa_min_sdk_version, + "sentry.cocoa.capacitor": cocoa_min_sdk_version, + "sentry.cocoa.react-native": cocoa_min_sdk_version, + "sentry.cocoa.dotnet": cocoa_min_sdk_version, + "sentry.cocoa.flutter": cocoa_min_sdk_version, + "sentry.cocoa.kmp": cocoa_min_sdk_version, + "sentry.cocoa.unity": cocoa_min_sdk_version, + "sentry.cocoa.unreal": cocoa_min_sdk_version, + }, report_fatal_errors=False, system_library_path_patterns={r"/System/Library/**", r"/usr/lib/**"}, sdk_frame_config=SDKFrameConfig( @@ -116,12 +126,11 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]: project_id=react_native_options["project_id"], sample_rate=react_native_options["sample_rate"], organization_allowlist=react_native_options["organization_allowlist"], - sdk_names=[ - "sentry.javascript.react-native", - ], # 4.0.0 was released in June 2022, see https://github.com/getsentry/sentry-react-native/releases/tag/4.0.0. # We require at least sentry-react-native 4.0.0 to only detect SDK crashes for not too old versions. - min_sdk_version="4.0.0", + sdk_names={ + "sentry.javascript.react-native": "4.0.0", + }, report_fatal_errors=False, system_library_path_patterns={ r"**/react-native/Libraries/**", @@ -157,36 +166,43 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]: ) configs.append(react_native_config) + # 0.6.0 was released in Feb 2023, see https://github.com/getsentry/sentry-native/releases/tag/0.6.0. + native_min_sdk_version = "0.6.0" + java_options = _get_options(sdk_name=SdkName.Java, has_organization_allowlist=True) if java_options: + # The sentry-java SDK sends SDK frames for uncaught exceptions since 7.0.0, which is required for detecting SDK crashes. + # 7.0.0 was released in Nov 2023, see https://github.com/getsentry/sentry-java/releases/tag/7.0.0 + java_min_sdk_version = "7.0.0" + java_config = SDKCrashDetectionConfig( sdk_name=SdkName.Java, project_id=java_options["project_id"], sample_rate=java_options["sample_rate"], organization_allowlist=java_options["organization_allowlist"], - sdk_names=[ - "sentry.java.android", - "sentry.java.android.capacitor", - "sentry.java.android.dotnet", - "sentry.java.android.flutter", - "sentry.java.android.kmp", - "sentry.java.android.react-native", - "sentry.java.android.timber", - "sentry.java.android.unity", - "sentry.java.android.unreal", - "sentry.java.jul", - "sentry.java.kmp", - "sentry.java.log4j2", - "sentry.java.logback", - "sentry.java.opentelemetry.agent", - "sentry.java.spring", - "sentry.java.spring-boot", - "sentry.java.spring-boot.jakarta", - "sentry.java.spring.jakarta", - ], - # The sentry-java SDK sends SDK frames for uncaught exceptions since 7.0.0, which is required for detecting SDK crashes. - # 7.0.0 was released in Nov 2023, see https://github.com/getsentry/sentry-java/releases/tag/7.0.0 - min_sdk_version="7.0.0", + sdk_names={ + "sentry.java.android": java_min_sdk_version, + "sentry.java.android.capacitor": java_min_sdk_version, + "sentry.java.android.dotnet": java_min_sdk_version, + "sentry.java.android.flutter": java_min_sdk_version, + "sentry.java.android.kmp": java_min_sdk_version, + "sentry.java.android.react-native": java_min_sdk_version, + "sentry.java.android.timber": java_min_sdk_version, + "sentry.java.android.unity": java_min_sdk_version, + "sentry.java.android.unreal": java_min_sdk_version, + "sentry.java.jul": java_min_sdk_version, + "sentry.java.kmp": java_min_sdk_version, + "sentry.java.log4j2": java_min_sdk_version, + "sentry.java.logback": java_min_sdk_version, + "sentry.java.opentelemetry.agent": java_min_sdk_version, + "sentry.java.spring": java_min_sdk_version, + "sentry.java.spring-boot": java_min_sdk_version, + "sentry.java.spring-boot.jakarta": java_min_sdk_version, + "sentry.java.spring.jakarta": java_min_sdk_version, + # Required for getting Android Runtime Tracer crashes. + # This is the same as for the native SDK Crash Detection Config + "sentry.native.android": native_min_sdk_version, + }, report_fatal_errors=False, system_library_path_patterns={ r"java.**", @@ -196,13 +212,33 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]: r"com.android.internal.**", r"kotlin.**", r"dalvik.**", + r"/apex/com.android.*/lib*/**", }, sdk_frame_config=SDKFrameConfig( function_patterns=set(), path_patterns={ r"io.sentry.**", }, - path_replacer=KeepFieldPathReplacer(fields={"module", "filename"}), + # The Android Runtime Tracer can crash when users enable profiling in the + # Sentry Android SDK. While the Sentry Android SDK doesn't directly cause + # these crashes, we must know when they occur. As Sentry doesn't appear in + # the stacktrace, we filter for the following specific methods in the + # specified Android apex packages. + function_and_path_patterns=[ + FunctionAndPathPattern( + function_pattern=r"*pthread_getcpuclockid*", + path_pattern=r"/apex/com.android.art/lib64/bionic/libc.so", + ), + FunctionAndPathPattern( + function_pattern=r"*art::Trace::StopTracing*", + path_pattern=r"/apex/com.android.art/lib64/libart.so", + ), + FunctionAndPathPattern( + function_pattern=r"*art::Thread::DumpState*", + path_pattern=r"/apex/com.android.art/lib64/libart.so", + ), + ], + path_replacer=KeepFieldPathReplacer(fields={"module", "filename", "package"}), ), sdk_crash_ignore_functions_matchers=set(), ) @@ -216,20 +252,18 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]: project_id=native_options["project_id"], sample_rate=native_options["sample_rate"], organization_allowlist=native_options["organization_allowlist"], - sdk_names=[ - "sentry.native", - "sentry.native.android", - "sentry.native.android.capacitor", - "sentry.native.android.flutter", - "sentry.native.android.react-native", - "sentry.native.android.unity", - "sentry.native.android.unreal", - "sentry.native.dotnet", - "sentry.native.unity", - "sentry.native.unreal", - ], - # 0.6.0 was released in Feb 2023, see https://github.com/getsentry/sentry-native/releases/tag/0.6.0. - min_sdk_version="0.6.0", + sdk_names={ + "sentry.native": native_min_sdk_version, + "sentry.native.android": native_min_sdk_version, + "sentry.native.android.capacitor": native_min_sdk_version, + "sentry.native.android.flutter": native_min_sdk_version, + "sentry.native.android.react-native": native_min_sdk_version, + "sentry.native.android.unity": native_min_sdk_version, + "sentry.native.android.unreal": native_min_sdk_version, + "sentry.native.dotnet": native_min_sdk_version, + "sentry.native.unity": native_min_sdk_version, + "sentry.native.unreal": native_min_sdk_version, + }, report_fatal_errors=False, system_library_path_patterns={ # well known locations for unix paths @@ -267,15 +301,19 @@ def build_sdk_crash_detection_configs() -> Sequence[SDKCrashDetectionConfig]: dart_options = _get_options(sdk_name=SdkName.Dart, has_organization_allowlist=True) if dart_options: + # Since 8.2.0 the Dart SDK sends SDK frames, which is required; + # see https://github.com/getsentry/sentry-dart/releases/tag/8.2.0 + dart_min_sdk_version = "8.2.1" + dart_config = SDKCrashDetectionConfig( sdk_name=SdkName.Dart, project_id=dart_options["project_id"], sample_rate=dart_options["sample_rate"], organization_allowlist=dart_options["organization_allowlist"], - sdk_names=["sentry.dart", "sentry.dart.flutter"], - # Since 8.2.0 the Dart SDK sends SDK frames, which is required; - # see https://github.com/getsentry/sentry-dart/releases/tag/8.2.0 - min_sdk_version="8.2.1", + sdk_names={ + "sentry.dart": dart_min_sdk_version, + "sentry.dart.flutter": dart_min_sdk_version, + }, report_fatal_errors=True, system_library_path_patterns={ # Dart diff --git a/src/sentry/utils/sdk_crashes/sdk_crash_detector.py b/src/sentry/utils/sdk_crashes/sdk_crash_detector.py index 700b2c2eaac5af..83f9d9834c7636 100644 --- a/src/sentry/utils/sdk_crashes/sdk_crash_detector.py +++ b/src/sentry/utils/sdk_crashes/sdk_crash_detector.py @@ -28,11 +28,12 @@ def is_sdk_supported( sdk_name: str, sdk_version: str, ) -> bool: - if sdk_name not in self.config.sdk_names: + minimum_sdk_version_string = self.config.sdk_names.get(sdk_name) + if not minimum_sdk_version_string: return False try: - minimum_sdk_version = Version(self.config.min_sdk_version) + minimum_sdk_version = Version(minimum_sdk_version_string) actual_sdk_version = Version(sdk_version) if actual_sdk_version < minimum_sdk_version: @@ -103,6 +104,18 @@ def is_sdk_frame(self, frame: Mapping[str, Any]) -> bool: function = frame.get("function") if function: + for ( + function_and_path_pattern + ) in self.config.sdk_frame_config.function_and_path_patterns: + function_pattern = function_and_path_pattern.function_pattern + path_pattern = function_and_path_pattern.path_pattern + + function_matches = glob_match(function, function_pattern, ignorecase=True) + path_matches = self._path_patters_match_frame({path_pattern}, frame) + + if function_matches and path_matches: + return True + for patterns in self.config.sdk_frame_config.function_patterns: if glob_match(function, patterns, ignorecase=True): return True diff --git a/src/sentry/utils/services.py b/src/sentry/utils/services.py index 5a21d7c6318d70..b959b0f7984a82 100644 --- a/src/sentry/utils/services.py +++ b/src/sentry/utils/services.py @@ -6,11 +6,12 @@ import logging import threading from collections.abc import Callable, Mapping, Sequence -from typing import Any, TypeVar +from typing import Any -from rest_framework.request import Request +from django.http.request import HttpRequest from sentry import options +from sentry.conf.types.service_options import ServiceOptions from sentry.utils.concurrent import Executor, FutureSet, ThreadedExecutor, TimedFuture # TODO: adjust modules to import from new location -- the weird `as` syntax is for mypy @@ -18,26 +19,21 @@ from sentry.utils.lazy_service_wrapper import Service as Service from .imports import import_string -from .types import AnyCallable logger = logging.getLogger(__name__) -T = TypeVar("T") -CallableT = TypeVar("CallableT", bound=Callable[..., object]) - - -def resolve_callable(value: str | CallableT) -> CallableT: - if callable(value): - return value - elif isinstance(value, str): +def resolve_callable[CallableT: Callable[..., object]](value: str | CallableT) -> CallableT: + if isinstance(value, str): return import_string(value) + elif callable(value): + return value else: raise TypeError("Expected callable or string") class Context: - def __init__(self, request: Request, backends: dict[type[Service | None], Service]): + def __init__(self, request: HttpRequest | None, backends: dict[type[Service | None], Service]): self.request = request self.backends = backends @@ -51,7 +47,7 @@ def copy(self) -> Context: ] Callback = Callable[ - [Context, str, Mapping[str, Any], Sequence[str], Sequence[TimedFuture]], + [Context, str, Mapping[str, Any], Sequence[str], Sequence[TimedFuture[Any] | None]], None, ] @@ -253,7 +249,7 @@ def call_backend_method(context: Context, backend: Service, is_primary: bool) -> # executed before the primary request is queued. This is such a # strange usage pattern that I don't think it's worth optimizing # for.) - results = [None] * len(selected_backend_names) + results: list[TimedFuture[Any] | None] = [None] * len(selected_backend_names) for i, backend_name in enumerate(selected_backend_names[1:], 1): try: backend, executor = self.backends[backend_name] @@ -276,7 +272,7 @@ def call_backend_method(context: Context, backend: Service, is_primary: bool) -> # calling thread. (We don't have to protect this from ``KeyError`` # since we already ensured that the primary backend exists.) backend, executor = self.backends[selected_backend_names[0]] - results[0] = executor.submit( + result = results[0] = executor.submit( functools.partial(call_backend_method, context.copy(), backend, is_primary=True), priority=0, block=True, @@ -289,14 +285,13 @@ def call_backend_method(context: Context, backend: Service, is_primary: bool) -> ) ) - result: TimedFuture = results[0] return result.result() return execute def build_instance_from_options( - options: Mapping[str, object], + options: ServiceOptions, *, default_constructor: Callable[..., object] | None = None, ) -> object: @@ -313,9 +308,11 @@ def build_instance_from_options( return constructor(**options.get("options", {})) -def build_instance_from_options_of_type( +def build_instance_from_options_of_type[ + T +]( tp: type[T], - options: Mapping[str, object], + options: ServiceOptions, *, default_constructor: Callable[..., T] | None = None, ) -> T: @@ -364,17 +361,17 @@ class ServiceDelegator(Delegator, Service): def __init__( self, backend_base: str, - backends: Mapping[str, Mapping[str, Any]], - selector_func: str | AnyCallable, - callback_func: str | AnyCallable | None = None, + backends: Mapping[str, ServiceOptions], + selector_func: str | Selector, + callback_func: str | Callback | None = None, ): super().__init__( import_string(backend_base), { name: ( - build_instance_from_options(options), - build_instance_from_options( - options.get("executor", {}), default_constructor=ThreadedExecutor + build_instance_from_options_of_type(Service, options), + build_instance_from_options_of_type( + Executor, options.get("executor", {}), default_constructor=ThreadedExecutor ), ) for name, options in backends.items() @@ -435,9 +432,7 @@ def selector(context: Context, method: str, callargs: Mapping[str, Any]) -> list else: intkey = key - if not isinstance(intkey, int): - logger.error("make_writebehind_selector.invalid", extra={"received_type": type(intkey)}) - return [move_from] + assert isinstance(intkey, int), intkey if rollout_rate < 0: if (intkey % 10000) / 10000 < rollout_rate * -1.0: diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 0b7bfb116ec057..8e61baf6addd9b 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -3,6 +3,7 @@ import dataclasses import functools import logging +import math import os import re import time @@ -165,6 +166,7 @@ def log_snuba_info(content): "origin.transaction": "sentry_tags[transaction]", "is_transaction": "is_segment", "sdk.name": "sentry_tags[sdk.name]", + "sdk.version": "sentry_tags[sdk.version]", "trace.status": "sentry_tags[trace.status]", "messaging.destination.name": "sentry_tags[messaging.destination.name]", "messaging.message.id": "sentry_tags[messaging.message.id]", @@ -201,6 +203,7 @@ def log_snuba_info(content): "timestamp": "timestamp", "trace": "trace_id", "transaction": "segment_name", + "transaction.op": "attr_str[sentry.transaction.op]", # `transaction.id` and `segment.id` is going to be replaced by `transaction.span_id` please do not use # transaction.id is "wrong", its pointing to segment_id to return something for the transistion, but represents the # txn event id(32 char uuid). EAP will no longer be storing this. @@ -215,6 +218,7 @@ def log_snuba_info(content): "messaging.destination.name": "attr_str[sentry.messaging.destination.name]", "messaging.message.id": "attr_str[sentry.messaging.message.id]", "span.status_code": "attr_str[sentry.status_code]", + "profile.id": "attr_str[sentry.profile_id]", "replay.id": "attr_str[sentry.replay_id]", "span.ai.pipeline.group": "attr_str[sentry.ai_pipeline_group]", "trace.status": "attr_str[sentry.trace.status]", @@ -222,7 +226,9 @@ def log_snuba_info(content): "ai.total_tokens.used": "attr_num[ai_total_tokens_used]", "ai.total_cost": "attr_num[ai_total_cost]", "sdk.name": "attr_str[sentry.sdk.name]", + "sdk.version": "attr_str[sentry.sdk.version]", "release": "attr_str[sentry.release]", + "environment": "attr_str[sentry.environment]", "user": "attr_str[sentry.user]", "user.id": "attr_str[sentry.user.id]", "user.email": "attr_str[sentry.user.email]", @@ -230,22 +236,9 @@ def log_snuba_info(content): "user.ip": "attr_str[sentry.user.ip]", "user.geo.subregion": "attr_str[sentry.user.geo.subregion]", "user.geo.country_code": "attr_str[sentry.user.geo.country_code]", -} - -METRICS_SUMMARIES_COLUMN_MAP = { - "project": "project_id", - "project.id": "project_id", - "id": "span_id", - "trace": "trace_id", - "metric": "metric_mri", - "timestamp": "end_timestamp", - "segment.id": "segment_id", - "span.duration": "duration_ms", - "span.group": "group", - "min_metric": "min", - "max_metric": "max", - "sum_metric": "sum", - "count_metric": "count", + "http.decoded_response_content_length": "attr_num[http.decoded_response_content_length]", + "http.response_content_length": "attr_num[http.response_content_length]", + "http.response_transfer_size": "attr_num[http.response_transfer_size]", } SPAN_COLUMN_MAP.update( @@ -298,7 +291,6 @@ def log_snuba_info(content): Dataset.Discover: DISCOVER_COLUMN_MAP, Dataset.Sessions: SESSIONS_SNUBA_MAP, Dataset.Metrics: METRICS_COLUMN_MAP, - Dataset.MetricsSummaries: METRICS_SUMMARIES_COLUMN_MAP, Dataset.PerformanceMetrics: METRICS_COLUMN_MAP, Dataset.SpansIndexed: SPAN_COLUMN_MAP, Dataset.EventsAnalyticsPlatform: SPAN_EAP_COLUMN_MAP, @@ -317,7 +309,6 @@ def log_snuba_info(content): Dataset.IssuePlatform: list(ISSUE_PLATFORM_MAP.values()), Dataset.SpansIndexed: list(SPAN_COLUMN_MAP.values()), Dataset.EventsAnalyticsPlatform: list(SPAN_EAP_COLUMN_MAP.values()), - Dataset.MetricsSummaries: list(METRICS_SUMMARIES_COLUMN_MAP.values()), } SNUBA_OR = "or" @@ -497,7 +488,13 @@ class RetrySkipTimeout(urllib3.Retry): """ def increment( - self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None + self, + method=None, + url=None, + response=None, + error=None, + _pool=None, + _stacktrace=None, ): """ Just rely on the parent class unless we have a read timeout. In that case @@ -636,7 +633,9 @@ def get_organization_id_from_project_ids(project_ids: Sequence[int]) -> int: return organization_id -def infer_project_ids_from_related_models(filter_keys: Mapping[str, Sequence[int]]) -> list[int]: +def infer_project_ids_from_related_models( + filter_keys: Mapping[str, Sequence[int]], +) -> list[int]: ids = [set(get_related_project_ids(k, filter_keys[k])) for k in filter_keys] return list(set.union(*ids)) @@ -956,7 +955,10 @@ def raw_snql_query( # other functions do here. It does not add any automatic conditions, format # results, nothing. Use at your own risk. return bulk_snuba_queries( - requests=[request], referrer=referrer, use_cache=use_cache, query_source=query_source + requests=[request], + referrer=referrer, + use_cache=use_cache, + query_source=query_source, )[0] @@ -1095,7 +1097,9 @@ def _apply_cache_and_build_results( for result, (query_pos, _, opt_cache_key) in zip(query_results, to_query): if opt_cache_key: cache.set( - opt_cache_key, json.dumps(result), settings.SENTRY_SNUBA_CACHE_TTL_SECONDS + opt_cache_key, + json.dumps(result), + settings.SENTRY_SNUBA_CACHE_TTL_SECONDS, ) results.append((query_pos, result)) @@ -1164,7 +1168,8 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: except ValueError: if response.status != 200: logger.exception( - "snuba.query.invalid-json", extra={"response.data": response.data} + "snuba.query.invalid-json", + extra={"response.data": response.data}, ) raise SnubaError("Failed to parse snuba error response") raise UnexpectedResponseError(f"Could not decode JSON response: {response.data!r}") @@ -1441,7 +1446,6 @@ def _resolve_column(col): # Some dataset specific logic: if dataset == Dataset.Discover: - if isinstance(col, (list, tuple)) or col in ("project_id", "group_id"): return col elif dataset == Dataset.EventsAnalyticsPlatform: @@ -1674,6 +1678,9 @@ def resolve_complex_column(col, resolve_func, ignored): "UInt16": "integer", "UInt32": "integer", "UInt64": "integer", + "Int16": "integer", + "Int32": "integer", + "Int64": "integer", "Float32": "number", "Float64": "number", "DateTime": "date", @@ -1821,7 +1828,11 @@ def replace(d, key, val): reverse = compose( reverse, lambda row: ( - replace(row, "bucketed_end", int(parse_datetime(row["bucketed_end"]).timestamp())) + replace( + row, + "bucketed_end", + int(parse_datetime(row["bucketed_end"]).timestamp()), + ) if "bucketed_end" in row else row ), @@ -1922,8 +1933,6 @@ def is_duration_measurement(key): "measurements.fid", "measurements.ttfb", "measurements.ttfb.requesttime", - "measurements.time_to_initial_display", - "measurements.time_to_full_display", "measurements.app_start_cold", "measurements.app_start_warm", "measurements.time_to_full_display", @@ -1985,3 +1994,21 @@ def get_array_column_field(array_column, internal_key): if array_column == "span_op_breakdowns": return get_span_op_breakdown_key_name(internal_key) return internal_key + + +def process_value(value: None | str | int | float | list[str] | list[int] | list[float]): + if isinstance(value, float): + # 0 for nan, and none for inf were chosen arbitrarily, nan and inf are + # invalid json so needed to pick something valid to use instead + if math.isnan(value): + value = 0 + elif math.isinf(value): + value = None + + if isinstance(value, list): + for i, v in enumerate(value): + if isinstance(v, float): + value[i] = process_value(v) + return value + + return value diff --git a/src/sentry/utils/snuba_rpc.py b/src/sentry/utils/snuba_rpc.py index 073e765e953c69..afdb03b9b2e031 100644 --- a/src/sentry/utils/snuba_rpc.py +++ b/src/sentry/utils/snuba_rpc.py @@ -11,6 +11,13 @@ CreateSubscriptionRequest, CreateSubscriptionResponse, ) +from sentry_protos.snuba.v1.endpoint_time_series_pb2 import TimeSeriesRequest, TimeSeriesResponse +from sentry_protos.snuba.v1.endpoint_trace_item_attributes_pb2 import ( + TraceItemAttributeNamesRequest, + TraceItemAttributeNamesResponse, + TraceItemAttributeValuesRequest, + TraceItemAttributeValuesResponse, +) from sentry_protos.snuba.v1.endpoint_trace_item_table_pb2 import ( TraceItemTableRequest, TraceItemTableResponse, @@ -55,12 +62,33 @@ def meta( def table_rpc(req: TraceItemTableRequest) -> TraceItemTableResponse: - resp = _make_rpc_request("EndpointTraceItemTable", "v1", req) + resp = _make_rpc_request("EndpointTraceItemTable", "v1", req.meta.referrer, req) response = TraceItemTableResponse() response.ParseFromString(resp.data) return response +def timeseries_rpc(req: TimeSeriesRequest) -> TimeSeriesResponse: + resp = _make_rpc_request("EndpointTimeSeries", "v1", req.meta.referrer, req) + response = TimeSeriesResponse() + response.ParseFromString(resp.data) + return response + + +def attribute_names_rpc(req: TraceItemAttributeNamesRequest) -> TraceItemAttributeNamesResponse: + resp = _make_rpc_request("EndpointTraceItemAttributeNames", "v1", req.meta.referrer, req) + response = TraceItemAttributeNamesResponse() + response.ParseFromString(resp.data) + return response + + +def attribute_values_rpc(req: TraceItemAttributeValuesRequest) -> TraceItemAttributeValuesResponse: + resp = _make_rpc_request("AttributeValuesRequest", "v1", req.meta.referrer, req) + response = TraceItemAttributeValuesResponse() + response.ParseFromString(resp.data) + return response + + def rpc( req: SnubaRPCRequest, resp_type: type[RPCResponseType], @@ -99,7 +127,7 @@ def rpc( cls = req.__class__ endpoint_name = cls.__name__ class_version = cls.__module__.split(".", 3)[2] - http_resp = _make_rpc_request(endpoint_name, class_version, req) + http_resp = _make_rpc_request(endpoint_name, class_version, req.meta.referrer, req) resp = resp_type() resp.ParseFromString(http_resp.data) return resp @@ -108,9 +136,9 @@ def rpc( def _make_rpc_request( endpoint_name: str, class_version: str, + referrer: str | None, req: SnubaRPCRequest | CreateSubscriptionRequest, ) -> BaseHTTPResponse: - referrer = req.meta.referrer if hasattr(req, "meta") else None if SNUBA_INFO: from google.protobuf.json_format import MessageToJson @@ -143,7 +171,7 @@ def create_subscription(req: CreateSubscriptionRequest) -> CreateSubscriptionRes cls = req.__class__ endpoint_name = cls.__name__ class_version = cls.__module__.split(".", 3)[2] - http_resp = _make_rpc_request(endpoint_name, class_version, req) + http_resp = _make_rpc_request(endpoint_name, class_version, None, req) resp = CreateSubscriptionResponse() resp.ParseFromString(http_resp.data) return resp diff --git a/src/sentry/web/debug_urls.py b/src/sentry/web/debug_urls.py index 200e13f9954eac..5afc8bcdb669a3 100644 --- a/src/sentry/web/debug_urls.py +++ b/src/sentry/web/debug_urls.py @@ -21,7 +21,6 @@ from sentry.web.frontend.debug.debug_error_embed import DebugErrorPageEmbedView from sentry.web.frontend.debug.debug_feedback_issue import DebugFeedbackIssueEmailView from sentry.web.frontend.debug.debug_generic_issue import DebugGenericIssueEmailView -from sentry.web.frontend.debug.debug_incident_activity_email import DebugIncidentActivityEmailView from sentry.web.frontend.debug.debug_incident_trigger_email import DebugIncidentTriggerEmailView from sentry.web.frontend.debug.debug_incident_trigger_email_activated_alert import ( DebugIncidentActivatedAlertTriggerEmailView, @@ -148,7 +147,6 @@ re_path( r"^debug/mail/sso-unlinked/no-password/$", DebugSsoUnlinkedNoPasswordEmailView.as_view() ), - re_path(r"^debug/mail/incident-activity/$", DebugIncidentActivityEmailView.as_view()), re_path(r"^debug/mail/incident-trigger/$", DebugIncidentTriggerEmailView.as_view()), re_path( r"^debug/mail/activated-incident-trigger/$", diff --git a/src/sentry/web/frontend/auth_login.py b/src/sentry/web/frontend/auth_login.py index 592b1dba1a7bfc..4fb8f4eb2fdbde 100644 --- a/src/sentry/web/frontend/auth_login.py +++ b/src/sentry/web/frontend/auth_login.py @@ -95,7 +95,7 @@ def handle(self, request: HttpRequest, *args, **kwargs) -> HttpResponseBase: Hooks in to the django view dispatch which delegates request to GET/POST/PUT/DELETE. Base view overwrites dispatch to include functionality for csrf, superuser, customer domains, etc. """ - return super().handle(request=request, *args, **kwargs) + return super().handle(request, *args, **kwargs) def get(self, request: Request, **kwargs) -> HttpResponseBase: next_uri = self.get_next_uri(request=request) diff --git a/src/sentry/web/frontend/base.py b/src/sentry/web/frontend/base.py index 7eb53b1c374ac9..d8474a83ca9d8d 100644 --- a/src/sentry/web/frontend/base.py +++ b/src/sentry/web/frontend/base.py @@ -467,8 +467,7 @@ def get_not_2fa_compliant_url(self, request: HttpRequest, *args: Any, **kwargs: return reverse("sentry-account-settings-security") def get_context_data(self, request: HttpRequest, **kwargs: Any) -> dict[str, Any]: - context = csrf(request) - return context + return csrf(request) def respond( self, template: str, context: dict[str, Any] | None = None, status: int = 200 diff --git a/src/sentry/web/frontend/debug/debug_incident_activity_email.py b/src/sentry/web/frontend/debug/debug_incident_activity_email.py deleted file mode 100644 index 926376b9378c75..00000000000000 --- a/src/sentry/web/frontend/debug/debug_incident_activity_email.py +++ /dev/null @@ -1,28 +0,0 @@ -from django.http import HttpRequest, HttpResponse -from django.views.generic import View - -from sentry.incidents.models.incident import Incident, IncidentActivity, IncidentActivityType -from sentry.incidents.tasks import generate_incident_activity_email -from sentry.models.organization import Organization -from sentry.users.models.user import User - -from .mail import MailPreview - - -class DebugIncidentActivityEmailView(View): - def get(self, request: HttpRequest) -> HttpResponse: - organization = Organization(slug="myorg") - user = User(id=1235, name="Hello There") - incident = Incident( - id=2, identifier=123, organization=organization, title="Something broke" - ) - activity = IncidentActivity( - incident=incident, - user_id=user.id, - type=IncidentActivityType.COMMENT.value, - comment="hi", - ) - email = generate_incident_activity_email(activity, user) - return MailPreview( - html_template=email.html_template, text_template=email.template, context=email.context - ).render(request) diff --git a/src/sentry/web/frontend/js_sdk_loader.py b/src/sentry/web/frontend/js_sdk_loader.py index acc1ae6f66c25b..ad99e0ebc2333c 100644 --- a/src/sentry/web/frontend/js_sdk_loader.py +++ b/src/sentry/web/frontend/js_sdk_loader.py @@ -1,7 +1,7 @@ from __future__ import annotations import time -from typing import NotRequired, TypedDict +from typing import Any, NotRequired, TypedDict from django.conf import settings from django.http import HttpRequest, HttpResponse @@ -55,6 +55,10 @@ class JavaScriptSdkLoader(BaseView): def determine_active_organization(self, request: HttpRequest, organization_slug=None) -> None: pass + # Same as above + def get_context_data(self, request: HttpRequest, **kwargs) -> dict[str, Any]: + return {} + def _get_loader_config( self, key: ProjectKey | None, sdk_version: Version | None ) -> LoaderInternalConfig: diff --git a/src/sentry/web/frontend/oauth_authorize.py b/src/sentry/web/frontend/oauth_authorize.py index 8617bdbd5a465f..e9ed3e3543fc99 100644 --- a/src/sentry/web/frontend/oauth_authorize.py +++ b/src/sentry/web/frontend/oauth_authorize.py @@ -127,17 +127,25 @@ def get(self, request: HttpRequest, **kwargs) -> HttpResponseBase: err_response="client_id", ) - # TODO (athena): Clean up this so scopes are always coming from the model - # This change is temporarily needed before we migrate existing applications - # to have the correct scopes - if application.requires_org_level_access: - scopes = application.scopes + scopes = request.GET.get("scope") + if scopes: + scopes = scopes.split(" ") else: - scopes = request.GET.get("scope") - if scopes: - scopes = scopes.split(" ") - else: - scopes = [] + scopes = [] + if application.requires_org_level_access: + # Applications that require org level access have a maximum scope limit set + # in admin that should not pass + max_scopes = application.scopes + for scope in scopes: + if scope not in max_scopes: + return self.error( + request=request, + client_id=client_id, + response_type=response_type, + redirect_uri=redirect_uri, + name="invalid_scope", + state=state, + ) for scope in scopes: if scope not in settings.SENTRY_SCOPES: @@ -163,7 +171,9 @@ def get(self, request: HttpRequest, **kwargs) -> HttpResponseBase: if not request.user.is_authenticated: return super().get(request, application=application) - if not force_prompt: + # If the application expects org level access, we need to prompt the user to choose which + # organization they want to give access to every time. We should not presume the user intention + if not (force_prompt or application.requires_org_level_access): try: existing_auth = ApiAuthorization.objects.get( user_id=request.user.id, application=application diff --git a/src/sentry/web/frontend/react_page.py b/src/sentry/web/frontend/react_page.py index a849caa248dffd..81e5c4d26b8051 100644 --- a/src/sentry/web/frontend/react_page.py +++ b/src/sentry/web/frontend/react_page.py @@ -22,6 +22,7 @@ ) from sentry.users.services.user.model import RpcUser from sentry.utils.http import is_using_customer_domain, query_string +from sentry.web.client_config import get_client_config from sentry.web.frontend.base import BaseView, ControlSiloOrganizationView from sentry.web.helpers import render_to_response @@ -55,13 +56,13 @@ def resolve_redirect_url(request: HttpRequest | Request, org_slug: str, user_id= def resolve_activeorg_redirect_url(request: HttpRequest | Request) -> str | None: user: AnonymousUser | RpcUser | None = getattr(request, "user", None) if not user or isinstance(user, AnonymousUser): - return + return None session = request.session if not session: - return + return None last_active_org = session.get("activeorg", None) if not last_active_org: - return + return None return resolve_redirect_url(request=request, org_slug=last_active_org, user_id=user.id) @@ -86,6 +87,7 @@ def dns_prefetch(self) -> list[str]: return domains def handle_react(self, request: Request, **kwargs) -> HttpResponse: + org_context = getattr(self, "active_organization", None) context = { "CSRF_COOKIE_NAME": settings.CSRF_COOKIE_NAME, "meta_tags": [ @@ -97,7 +99,8 @@ def handle_react(self, request: Request, **kwargs) -> HttpResponse: # Rendering the layout requires serializing the active organization. # Since we already have it here from the OrganizationMixin, we can # save some work and render it faster. - "org_context": getattr(self, "active_organization", None), + "org_context": org_context, + "react_config": get_client_config(request, org_context), } # Force a new CSRF token to be generated and set in user's diff --git a/src/sentry/workflow_engine/apps.py b/src/sentry/workflow_engine/apps.py index 44d4741e9cd977..abdb88a6e562d9 100644 --- a/src/sentry/workflow_engine/apps.py +++ b/src/sentry/workflow_engine/apps.py @@ -5,4 +5,6 @@ class Config(AppConfig): name = "sentry.workflow_engine" def ready(self): + # Import our base DataConditionHandlers for the workflow engine platform + import sentry.workflow_engine.handlers # NOQA from sentry.workflow_engine.endpoints import serializers # NOQA diff --git a/src/sentry/workflow_engine/endpoints/project_detector_index.py b/src/sentry/workflow_engine/endpoints/project_detector_index.py new file mode 100644 index 00000000000000..5e4ca73e560438 --- /dev/null +++ b/src/sentry/workflow_engine/endpoints/project_detector_index.py @@ -0,0 +1,126 @@ +from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema +from rest_framework import status +from rest_framework.exceptions import ValidationError +from rest_framework.response import Response + +from sentry.api.api_owners import ApiOwner +from sentry.api.api_publish_status import ApiPublishStatus +from sentry.api.base import region_silo_endpoint +from sentry.api.bases import ProjectAlertRulePermission, ProjectEndpoint +from sentry.api.serializers import serialize +from sentry.apidocs.constants import ( + RESPONSE_BAD_REQUEST, + RESPONSE_FORBIDDEN, + RESPONSE_NOT_FOUND, + RESPONSE_UNAUTHORIZED, +) +from sentry.apidocs.parameters import GlobalParams +from sentry.issues import grouptype +from sentry.workflow_engine.endpoints.serializers import DetectorSerializer +from sentry.workflow_engine.models import Detector + + +@region_silo_endpoint +@extend_schema(tags=["Workflows"]) +class ProjectDetectorIndexEndpoint(ProjectEndpoint): + publish_status = { + "POST": ApiPublishStatus.EXPERIMENTAL, + "GET": ApiPublishStatus.EXPERIMENTAL, + } + owner = ApiOwner.ISSUES + + # TODO: We probably need a specific permission for detectors. Possibly specific detectors have different perms + # too? + permission_classes = (ProjectAlertRulePermission,) + + def _get_validator(self, request, project, group_type_slug): + detector_type = grouptype.registry.get_by_slug(group_type_slug) + if detector_type is None: + raise ValidationError({"groupType": ["Unknown group type"]}) + + if detector_type.detector_validator is None: + raise ValidationError({"groupType": ["Group type not compatible with detectors"]}) + + return detector_type.detector_validator( + context={ + "project": project, + "organization": project.organization, + "request": request, + "access": request.access, + }, + data=request.data, + ) + + @extend_schema( + operation_id="Fetch a Detector", + parameters=[ + GlobalParams.ORG_ID_OR_SLUG, + GlobalParams.PROJECT_ID_OR_SLUG, + ], + responses={ + 201: DetectorSerializer, + 400: RESPONSE_BAD_REQUEST, + 401: RESPONSE_UNAUTHORIZED, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + ) + def get(self, request, project): + """ + List a Project's Detectors + ````````````````````````` + Return a list of detectors for a given project. + """ + queryset = Detector.objects.filter( + organization_id=project.organization_id, + ).order_by("id") + + return self.paginate( + request=request, + queryset=queryset, + order_by="id", + on_results=lambda x: serialize(x, request.user), + ) + + @extend_schema( + operation_id="Create a Detector", + parameters=[ + GlobalParams.ORG_ID_OR_SLUG, + GlobalParams.PROJECT_ID_OR_SLUG, + ], + request=PolymorphicProxySerializer( + "GenericDetectorSerializer", + serializers=[ + gt.detector_validator for gt in grouptype.registry.all() if gt.detector_validator + ], + resource_type_field_name=None, + ), + responses={ + 201: DetectorSerializer, + 400: RESPONSE_BAD_REQUEST, + 401: RESPONSE_UNAUTHORIZED, + 403: RESPONSE_FORBIDDEN, + 404: RESPONSE_NOT_FOUND, + }, + ) + def post(self, request, project): + """ + Create a Detector + ```````````````` + Create a new detector for a project. + + :param string name: The name of the detector + :param string group_type: The type of detector to create + :param object data_source: Configuration for the data source + :param array data_conditions: List of conditions to trigger the detector + """ + group_type = request.data.get("group_type") + if not group_type: + raise ValidationError({"groupType": ["This field is required."]}) + + validator = self._get_validator(request, project, group_type) + if not validator.is_valid(): + return Response(validator.errors, status=status.HTTP_400_BAD_REQUEST) + + detector = validator.save() + return Response(serialize(detector, request.user), status=status.HTTP_201_CREATED) diff --git a/src/sentry/workflow_engine/endpoints/urls.py b/src/sentry/workflow_engine/endpoints/urls.py new file mode 100644 index 00000000000000..be4d46ce9083b1 --- /dev/null +++ b/src/sentry/workflow_engine/endpoints/urls.py @@ -0,0 +1,11 @@ +from django.urls import re_path + +from .project_detector_index import ProjectDetectorIndexEndpoint + +urlpatterns = [ + re_path( + r"^(?P[^\/]+)/(?P[^\/]+)/detectors/$", + ProjectDetectorIndexEndpoint.as_view(), + name="sentry-api-0-project-detector-index", + ), +] diff --git a/src/sentry/workflow_engine/handlers/__init__.py b/src/sentry/workflow_engine/handlers/__init__.py new file mode 100644 index 00000000000000..4fc3428a0ce1e4 --- /dev/null +++ b/src/sentry/workflow_engine/handlers/__init__.py @@ -0,0 +1,5 @@ +# Export any handlers we want to include into the registry +__all__ = ["NotificationActionHandler", "GroupEventConditionHandler"] + +from .action import NotificationActionHandler +from .condition import GroupEventConditionHandler diff --git a/src/sentry/workflow_engine/handlers/action/__init__.py b/src/sentry/workflow_engine/handlers/action/__init__.py new file mode 100644 index 00000000000000..1251be1906e9b6 --- /dev/null +++ b/src/sentry/workflow_engine/handlers/action/__init__.py @@ -0,0 +1,5 @@ +__all__ = [ + "NotificationActionHandler", +] + +from .notification import NotificationActionHandler diff --git a/src/sentry/workflow_engine/handlers/action/notification.py b/src/sentry/workflow_engine/handlers/action/notification.py new file mode 100644 index 00000000000000..97e8cf84f39818 --- /dev/null +++ b/src/sentry/workflow_engine/handlers/action/notification.py @@ -0,0 +1,16 @@ +from sentry.eventstore.models import GroupEvent +from sentry.workflow_engine.models import Action, Detector +from sentry.workflow_engine.registry import action_handler_registry +from sentry.workflow_engine.types import ActionHandler + + +@action_handler_registry.register(Action.Type.NOTIFICATION) +class NotificationActionHandler(ActionHandler): + @staticmethod + def execute( + evt: GroupEvent, + action: Action, + detector: Detector, + ) -> None: + # TODO: Implment this in milestone 2 + pass diff --git a/src/sentry/workflow_engine/handlers/condition/__init__.py b/src/sentry/workflow_engine/handlers/condition/__init__.py new file mode 100644 index 00000000000000..85a4596d38b75e --- /dev/null +++ b/src/sentry/workflow_engine/handlers/condition/__init__.py @@ -0,0 +1,5 @@ +__all__ = [ + "GroupEventConditionHandler", +] + +from .group_event import GroupEventConditionHandler diff --git a/src/sentry/workflow_engine/handlers/condition/group_event.py b/src/sentry/workflow_engine/handlers/condition/group_event.py new file mode 100644 index 00000000000000..e392db084cfdd9 --- /dev/null +++ b/src/sentry/workflow_engine/handlers/condition/group_event.py @@ -0,0 +1,29 @@ +from typing import Any + +from sentry.eventstore.models import GroupEvent +from sentry.workflow_engine.models.data_condition import Condition +from sentry.workflow_engine.registry import condition_handler_registry +from sentry.workflow_engine.types import DataConditionHandler + + +def get_nested_value(data: Any, path: str, default: Any = None) -> Any | None: + try: + value = data + for part in path.split("."): + if hasattr(value, part): + value = getattr(value, part) + elif hasattr(value, "get"): + value = value.get(part) + else: + return default + return value + except Exception: + return default + + +@condition_handler_registry.register(Condition.GROUP_EVENT_ATTR_COMPARISON) +class GroupEventConditionHandler(DataConditionHandler[GroupEvent]): + @staticmethod + def evaluate_value(data: GroupEvent, comparison: Any, data_filter: str) -> bool: + event_value = get_nested_value(data, data_filter) + return event_value == comparison diff --git a/src/sentry/workflow_engine/handlers/detector/__init__.py b/src/sentry/workflow_engine/handlers/detector/__init__.py new file mode 100644 index 00000000000000..dbe4b9ce3d1441 --- /dev/null +++ b/src/sentry/workflow_engine/handlers/detector/__init__.py @@ -0,0 +1,9 @@ +__all__ = [ + "DetectorHandler", + "DetectorEvaluationResult", + "DetectorStateData", + "StatefulDetectorHandler", +] + +from .base import DetectorEvaluationResult, DetectorHandler, DetectorStateData +from .stateful import StatefulDetectorHandler diff --git a/src/sentry/workflow_engine/handlers/detector/base.py b/src/sentry/workflow_engine/handlers/detector/base.py new file mode 100644 index 00000000000000..d0bbc183da0439 --- /dev/null +++ b/src/sentry/workflow_engine/handlers/detector/base.py @@ -0,0 +1,69 @@ +import abc +import dataclasses +import logging +from typing import Any, Generic, TypeVar + +from sentry.issues.issue_occurrence import IssueOccurrence +from sentry.issues.status_change_message import StatusChangeMessage +from sentry.workflow_engine.models import DataConditionGroup, DataPacket, Detector +from sentry.workflow_engine.types import DetectorGroupKey, DetectorPriorityLevel + +logger = logging.getLogger(__name__) +T = TypeVar("T") + + +@dataclasses.dataclass(frozen=True) +class DetectorEvaluationResult: + group_key: DetectorGroupKey + # TODO: Are these actually necessary? We're going to produce the occurrence in the detector, so we probably don't + # need to know the other results externally + is_active: bool + priority: DetectorPriorityLevel + # TODO: This is only temporarily optional. We should always have a value here if returning a result + result: IssueOccurrence | StatusChangeMessage | None = None + # Event data to supplement the `IssueOccurrence`, if passed. + event_data: dict[str, Any] | None = None + + +@dataclasses.dataclass(frozen=True) +class DetectorStateData: + group_key: DetectorGroupKey + active: bool + status: DetectorPriorityLevel + # Stateful detectors always process data packets in order. Once we confirm that a data packet has been fully + # processed and all workflows have been done, this value will be used by the stateful detector to prevent + # reprocessing + dedupe_value: int + # Stateful detectors allow various counts to be tracked. We need to update these after we process workflows, so + # include the updates in the state. + # This dictionary is in the format {counter_name: counter_value, ...} + # If a counter value is `None` it means to unset the value + counter_updates: dict[str, int | None] + + +class DetectorHandler(abc.ABC, Generic[T]): + def __init__(self, detector: Detector): + self.detector = detector + if detector.workflow_condition_group_id is not None: + try: + group = DataConditionGroup.objects.get_from_cache( + id=detector.workflow_condition_group_id + ) + self.condition_group: DataConditionGroup | None = group + except DataConditionGroup.DoesNotExist: + logger.exception( + "Failed to find the data condition group for detector", + extra={"detector_id": detector.id}, + ) + self.condition_group = None + else: + self.condition_group = None + + @abc.abstractmethod + def evaluate( + self, data_packet: DataPacket[T] + ) -> dict[DetectorGroupKey, DetectorEvaluationResult]: + pass + + def commit_state_updates(self): + pass diff --git a/src/sentry/workflow_engine/handlers/detector/stateful.py b/src/sentry/workflow_engine/handlers/detector/stateful.py new file mode 100644 index 00000000000000..8171787ad38d46 --- /dev/null +++ b/src/sentry/workflow_engine/handlers/detector/stateful.py @@ -0,0 +1,312 @@ +import abc +from datetime import timedelta +from typing import Any, TypeVar + +from django.conf import settings +from django.db.models import Q +from sentry_redis_tools.retrying_cluster import RetryingRedisCluster + +from sentry.issues.issue_occurrence import IssueOccurrence +from sentry.issues.status_change_message import StatusChangeMessage +from sentry.models.group import GroupStatus +from sentry.types.group import PriorityLevel +from sentry.utils import metrics, redis +from sentry.utils.iterators import chunked +from sentry.workflow_engine.handlers.detector.base import ( + DetectorEvaluationResult, + DetectorHandler, + DetectorStateData, +) +from sentry.workflow_engine.models import DataPacket, Detector, DetectorState +from sentry.workflow_engine.processors.data_condition_group import evaluate_condition_group +from sentry.workflow_engine.types import DetectorGroupKey, DetectorPriorityLevel + +T = TypeVar("T") +REDIS_TTL = int(timedelta(days=7).total_seconds()) + + +def get_redis_client() -> RetryingRedisCluster: + cluster_key = settings.SENTRY_WORKFLOW_ENGINE_REDIS_CLUSTER + return redis.redis_clusters.get(cluster_key) # type: ignore[return-value] + + +class StatefulDetectorHandler(DetectorHandler[T], abc.ABC): + def __init__(self, detector: Detector): + super().__init__(detector) + self.dedupe_updates: dict[DetectorGroupKey, int] = {} + self.counter_updates: dict[DetectorGroupKey, dict[str, int | None]] = {} + self.state_updates: dict[DetectorGroupKey, tuple[bool, DetectorPriorityLevel]] = {} + + @property + @abc.abstractmethod + def counter_names(self) -> list[str]: + """ + The names of counters that this detector is going to keep track of. + """ + pass + + @abc.abstractmethod + def get_dedupe_value(self, data_packet: DataPacket[T]) -> int: + """ + Extracts the deduplication value from a passed data packet. + TODO: This might belong on the `DataPacket` instead. + """ + pass + + @abc.abstractmethod + def get_group_key_values(self, data_packet: DataPacket[T]) -> dict[str, int]: + """ + Extracts the values for all the group keys that exist in the given data packet, + and returns then as a dict keyed by group_key. + """ + pass + + @abc.abstractmethod + def build_occurrence_and_event_data( + self, group_key: DetectorGroupKey, value: int, new_status: PriorityLevel + ) -> tuple[IssueOccurrence, dict[str, Any]]: + pass + + def build_fingerprint(self, group_key) -> list[str]: + """ + Builds a fingerprint to uniquely identify a detected issue + """ + return [f"{self.detector.id}{':' + group_key if group_key is not None else ''}"] + + def get_state_data( + self, group_keys: list[DetectorGroupKey] + ) -> dict[DetectorGroupKey, DetectorStateData]: + """ + Fetches state data associated with this detector for the associated `group_keys`. + Returns a dict keyed by each group_key with the fetched `DetectorStateData`. + If data isn't currently stored, falls back to default values. + """ + group_key_detectors = self.bulk_get_detector_state(group_keys) + dedupe_keys = [self.build_dedupe_value_key(gk) for gk in group_keys] + pipeline = get_redis_client().pipeline() + for dk in dedupe_keys: + pipeline.get(dk) + group_key_dedupe_values = { + gk: int(dv) if dv else 0 for gk, dv in zip(group_keys, pipeline.execute()) + } + pipeline.reset() + counter_updates = {} + if self.counter_names: + counter_keys = [ + self.build_counter_value_key(gk, name) + for gk in group_keys + for name in self.counter_names + ] + for ck in counter_keys: + pipeline.get(ck) + vals = [int(val) if val is not None else val for val in pipeline.execute()] + counter_updates = { + gk: dict(zip(self.counter_names, values)) + for gk, values in zip(group_keys, chunked(vals, len(self.counter_names))) + } + + results = {} + for gk in group_keys: + detector_state = group_key_detectors.get(gk) + results[gk] = DetectorStateData( + group_key=gk, + active=detector_state.active if detector_state else False, + status=( + DetectorPriorityLevel(int(detector_state.state)) + if detector_state + else DetectorPriorityLevel.OK + ), + dedupe_value=group_key_dedupe_values[gk], + counter_updates=counter_updates[gk], + ) + return results + + def evaluate( + self, data_packet: DataPacket[T] + ) -> dict[DetectorGroupKey, DetectorEvaluationResult]: + """ + Evaluates a given data packet and returns a list of `DetectorEvaluationResult`. + There will be one result for each group key result in the packet, unless the + evaluation is skipped due to various rules. + """ + dedupe_value = self.get_dedupe_value(data_packet) + group_values = self.get_group_key_values(data_packet) + all_state_data = self.get_state_data(list(group_values.keys())) + results = {} + for group_key, group_value in group_values.items(): + result = self.evaluate_group_key_value( + group_key, group_value, all_state_data[group_key], dedupe_value + ) + if result: + results[result.group_key] = result + return results + + def evaluate_group_key_value( + self, + group_key: DetectorGroupKey, + value: int, + state_data: DetectorStateData, + dedupe_value: int, + ) -> DetectorEvaluationResult | None: + """ + Evaluates a value associated with a given `group_key` and returns a `DetectorEvaluationResult` with the results + and any state changes that need to be made. + + Checks that we haven't already processed this datapacket for this group_key, and skips evaluation if we have. + """ + if dedupe_value <= state_data.dedupe_value: + # TODO: Does it actually make more sense to just do this at the data packet level rather than the group + # key level? + metrics.incr("workflow_engine.detector.skipping_already_processed_update") + return None + + self.enqueue_dedupe_update(group_key, dedupe_value) + + if not self.condition_group: + metrics.incr("workflow_engine.detector.skipping_invalid_condition_group") + return None + + # TODO: We need to handle tracking consecutive evaluations before emitting a result here. We're able to + # store these in `DetectorStateData.counter_updates`, but we don't have anywhere to set the required + # thresholds at the moment. Probably should be a field on the Detector? Could also be on the condition + # level, but usually we want to set this at a higher level. + new_status = DetectorPriorityLevel.OK + is_group_condition_met, condition_results = evaluate_condition_group( + self.condition_group, value + ) + + if is_group_condition_met: + validated_condition_results: list[DetectorPriorityLevel] = [ + result + for result in condition_results + if result is not None and isinstance(result, DetectorPriorityLevel) + ] + + new_status = max(new_status, *validated_condition_results) + + # TODO: We'll increment and change these later, but for now they don't change so just pass an empty dict + self.enqueue_counter_update(group_key, {}) + + if state_data.status != new_status: + is_active = new_status != DetectorPriorityLevel.OK + self.enqueue_state_update(group_key, is_active, new_status) + event_data = None + result: StatusChangeMessage | IssueOccurrence + if new_status == DetectorPriorityLevel.OK: + # If we've determined that we're now ok, we just want to resolve the issue + result = StatusChangeMessage( + fingerprint=self.build_fingerprint(group_key), + project_id=self.detector.project_id, + new_status=GroupStatus.RESOLVED, + new_substatus=None, + ) + else: + result, event_data = self.build_occurrence_and_event_data( + group_key, value, PriorityLevel(new_status) + ) + return DetectorEvaluationResult( + group_key=group_key, + is_active=is_active, + priority=new_status, + result=result, + event_data=event_data, + ) + return None + + def enqueue_dedupe_update(self, group_key: DetectorGroupKey, dedupe_value: int): + self.dedupe_updates[group_key] = dedupe_value + + def enqueue_counter_update( + self, group_key: DetectorGroupKey, counter_updates: dict[str, int | None] + ): + self.counter_updates[group_key] = counter_updates + + def enqueue_state_update( + self, group_key: DetectorGroupKey, is_active: bool, priority: DetectorPriorityLevel + ): + self.state_updates[group_key] = (is_active, priority) + + def build_dedupe_value_key(self, group_key: DetectorGroupKey) -> str: + if group_key is None: + group_key = "" + return f"{self.detector.id}:{group_key}:dedupe_value" + + def build_counter_value_key(self, group_key: DetectorGroupKey, counter_name: str) -> str: + if group_key is None: + group_key = "" + return f"{self.detector.id}:{group_key}:{counter_name}" + + def bulk_get_detector_state( + self, group_keys: list[DetectorGroupKey] + ) -> dict[DetectorGroupKey, DetectorState]: + """ + Bulk fetches detector state for the passed `group_keys`. Returns a dict keyed by each + `group_key` with the fetched `DetectorStateData`. + + If there's no `DetectorState` row for a `detector`/`group_key` pair then we'll exclude + the group_key from the returned dict. + """ + # TODO: Cache this query (or individual fetches, then bulk fetch anything missing) + query_filter = Q( + detector_group_key__in=[group_key for group_key in group_keys if group_key is not None] + ) + if None in group_keys: + query_filter |= Q(detector_group_key__isnull=True) + + return { + detector_state.detector_group_key: detector_state + for detector_state in self.detector.detectorstate_set.filter(query_filter) + } + + def commit_state_updates(self): + self._bulk_commit_detector_state() + self._bulk_commit_redis_state() + + def _bulk_commit_redis_state(self): + pipeline = get_redis_client().pipeline() + if self.dedupe_updates: + for group_key, dedupe_value in self.dedupe_updates.items(): + pipeline.set(self.build_dedupe_value_key(group_key), dedupe_value, ex=REDIS_TTL) + + if self.counter_updates: + for group_key, counter_updates in self.counter_updates.items(): + for counter_name, counter_value in counter_updates.items(): + key_name = self.build_counter_value_key(group_key, counter_name) + if counter_value is None: + pipeline.delete(key_name) + else: + pipeline.set(key_name, counter_value, ex=REDIS_TTL) + + pipeline.execute() + self.dedupe_updates.clear() + self.counter_updates.clear() + + def _bulk_commit_detector_state(self): + # TODO: We should already have these loaded from earlier, figure out how to cache and reuse + detector_state_lookup = self.bulk_get_detector_state( + [update for update in self.state_updates.keys()] + ) + created_detector_states = [] + updated_detector_states = [] + for group_key, (active, priority) in self.state_updates.items(): + detector_state = detector_state_lookup.get(group_key) + if not detector_state: + created_detector_states.append( + DetectorState( + detector_group_key=group_key, + detector=self.detector, + active=active, + state=priority, + ) + ) + elif active != detector_state.active or priority != detector_state.state: + detector_state.active = active + detector_state.state = priority + updated_detector_states.append(detector_state) + + if created_detector_states: + DetectorState.objects.bulk_create(created_detector_states) + + if updated_detector_states: + DetectorState.objects.bulk_update(updated_detector_states, ["active", "state"]) + self.state_updates.clear() diff --git a/src/sentry/workflow_engine/migrations/0010_detector_state_unique_group.py b/src/sentry/workflow_engine/migrations/0010_detector_state_unique_group.py index 51288a9ed8f74e..8d5a41873bbe77 100644 --- a/src/sentry/workflow_engine/migrations/0010_detector_state_unique_group.py +++ b/src/sentry/workflow_engine/migrations/0010_detector_state_unique_group.py @@ -21,6 +21,8 @@ class Migration(CheckedMigration): is_post_deployment = False + allow_run_sql = True + dependencies = [ ("workflow_engine", "0009_detector_type"), ] diff --git a/src/sentry/workflow_engine/migrations/0013_related_name_conditions_on_dcg.py b/src/sentry/workflow_engine/migrations/0013_related_name_conditions_on_dcg.py new file mode 100644 index 00000000000000..90c205e701e213 --- /dev/null +++ b/src/sentry/workflow_engine/migrations/0013_related_name_conditions_on_dcg.py @@ -0,0 +1,38 @@ +# Generated by Django 5.1.1 on 2024-11-15 03:05 + +import django.db.models.deletion +from django.db import migrations, models + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("workflow_engine", "0012_data_source_type_change"), + ] + + operations = [ + migrations.AlterField( + model_name="datacondition", + name="condition_group", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="conditions", + to="workflow_engine.dataconditiongroup", + ), + ), + ] diff --git a/src/sentry/workflow_engine/migrations/0014_model_additions_for_milestones.py b/src/sentry/workflow_engine/migrations/0014_model_additions_for_milestones.py new file mode 100644 index 00000000000000..00ab4ba74e3098 --- /dev/null +++ b/src/sentry/workflow_engine/migrations/0014_model_additions_for_milestones.py @@ -0,0 +1,120 @@ +# Generated by Django 5.1.1 on 2024-11-21 21:05 + +import django.db.models.deletion +from django.db import migrations, models + +import sentry.db.models.fields.foreignkey +import sentry.db.models.fields.hybrid_cloud_foreign_key +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. + # This should only be used for operations where it's safe to run the migration after your + # code has deployed. So this should not be used for most operations that alter the schema + # of a table. + # Here are some things that make sense to mark as post deployment: + # - Large data migrations. Typically we want these to be run manually so that they can be + # monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # run this outside deployments so that we don't block them. Note that while adding an index + # is a schema change, it's completely safe to run the operation after the code has deployed. + # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment + + is_post_deployment = False + + dependencies = [ + ("sentry", "0792_add_unique_index_apiauthorization"), + ("workflow_engine", "0013_related_name_conditions_on_dcg"), + ] + + operations = [ + migrations.AddField( + model_name="detector", + name="config", + field=models.JSONField(db_default={}), + ), + migrations.AddField( + model_name="detector", + name="created_by_id", + field=sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.User", db_index=True, null=True, on_delete="SET_NULL" + ), + ), + migrations.AddField( + model_name="detector", + name="description", + field=models.TextField(null=True), + ), + migrations.AddField( + model_name="detector", + name="enabled", + field=models.BooleanField(db_default=True), + ), + migrations.AddField( + model_name="detector", + name="project", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.project" + ), + ), + migrations.AddField( + model_name="workflow", + name="config", + field=models.JSONField(db_default={}), + ), + migrations.AddField( + model_name="workflow", + name="created_by_id", + field=sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.User", db_index=True, null=True, on_delete="SET_NULL" + ), + ), + migrations.AddField( + model_name="workflow", + name="enabled", + field=models.BooleanField(db_default=True), + ), + migrations.AddField( + model_name="workflow", + name="environment", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.environment" + ), + ), + migrations.AddField( + model_name="workflow", + name="owner_team", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + null=True, on_delete=django.db.models.deletion.SET_NULL, to="sentry.team" + ), + ), + migrations.AddField( + model_name="workflow", + name="owner_user_id", + field=sentry.db.models.fields.hybrid_cloud_foreign_key.HybridCloudForeignKey( + "sentry.User", db_index=True, null=True, on_delete="SET_NULL" + ), + ), + migrations.AlterField( + model_name="action", + name="required", + field=models.BooleanField(default=False, null=True), + ), + migrations.AlterField( + model_name="detector", + name="organization", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.organization" + ), + ), + migrations.AlterField( + model_name="workflow", + name="when_condition_group", + field=sentry.db.models.fields.foreignkey.FlexibleForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="workflow_engine.dataconditiongroup", + ), + ), + ] diff --git a/src/sentry/workflow_engine/models/action.py b/src/sentry/workflow_engine/models/action.py index bd2e5e3694daf3..027ef5cf11c933 100644 --- a/src/sentry/workflow_engine/models/action.py +++ b/src/sentry/workflow_engine/models/action.py @@ -1,9 +1,19 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + from django.db import models from sentry.backup.scopes import RelocationScope from sentry.db.models import DefaultFieldsModel, region_silo_model, sane_repr from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey -from sentry.models.notificationaction import ActionTarget +from sentry.eventstore.models import GroupEvent +from sentry.notifications.models.notificationaction import ActionTarget +from sentry.workflow_engine.registry import action_handler_registry +from sentry.workflow_engine.types import ActionHandler + +if TYPE_CHECKING: + from sentry.workflow_engine.models import Detector @region_silo_model @@ -17,24 +27,19 @@ class Action(DefaultFieldsModel): """ __relocation_scope__ = RelocationScope.Excluded - __repr__ = sane_repr("workflow_id", "type") + __repr__ = sane_repr("id", "type") - # TODO (@saponifi3d): Don't hardcode these values, and these are incomplete values class Type(models.TextChoices): - Notification = "SendNotificationAction" - TriggerWorkflow = "TriggerWorkflowAction" - - """ - Required actions cannot be disabled by the user, and will not be displayed in the UI. - These actions will be used internally, to trigger other aspects of the system. - For example, creating a new issue in the Issue Platform or a detector emitting an event. - """ - required = models.BooleanField(default=False) + NOTIFICATION = "notification" + WEBHOOK = "webhook" # The type field is used to denote the type of action we want to trigger type = models.TextField(choices=Type.choices) data = models.JSONField(default=dict) + # TODO - finish removing this field + required = models.BooleanField(default=False, null=True) + # LEGACY: The integration_id is used to map the integration_id found in the AlertRuleTriggerAction # This allows us to map the way we're saving the notification channels to the action. integration_id = HybridCloudForeignKey( @@ -49,3 +54,12 @@ class Type(models.TextChoices): # LEGACY: This is used to denote if the Notification is going to a user, team, sentry app, etc target_type = models.SmallIntegerField(choices=ActionTarget.as_choices(), null=True) + + def get_handler(self) -> ActionHandler: + action_type = Action.Type(self.type) + return action_handler_registry.get(action_type) + + def trigger(self, evt: GroupEvent, detector: Detector) -> None: + # get the handler for the action type + handler = self.get_handler() + handler.execute(evt, self, detector) diff --git a/src/sentry/workflow_engine/models/data_condition.py b/src/sentry/workflow_engine/models/data_condition.py index a39ddc5d41646a..9ed3efc66c5c55 100644 --- a/src/sentry/workflow_engine/models/data_condition.py +++ b/src/sentry/workflow_engine/models/data_condition.py @@ -1,13 +1,20 @@ import logging import operator +from collections.abc import Callable from enum import StrEnum +from typing import Any, TypeVar, cast from django.db import models from sentry.backup.scopes import RelocationScope from sentry.db.models import DefaultFieldsModel, region_silo_model, sane_repr -from sentry.workflow_engine.models.data_condition_group import DataConditionGroup -from sentry.workflow_engine.types import DataConditionResult, DetectorPriorityLevel +from sentry.utils.registry import NoRegistrationExistsError +from sentry.workflow_engine.registry import condition_handler_registry +from sentry.workflow_engine.types import ( + DataConditionHandler, + DataConditionResult, + DetectorPriorityLevel, +) logger = logging.getLogger(__name__) @@ -19,6 +26,7 @@ class Condition(StrEnum): LESS_OR_EQUAL = "lte" LESS = "lt" NOT_EQUAL = "ne" + GROUP_EVENT_ATTR_COMPARISON = "group_event_attr_comparison" condition_ops = { @@ -30,6 +38,8 @@ class Condition(StrEnum): Condition.NOT_EQUAL: operator.ne, } +T = TypeVar("T") + @region_silo_model class DataCondition(DefaultFieldsModel): @@ -38,7 +48,7 @@ class DataCondition(DefaultFieldsModel): """ __relocation_scope__ = RelocationScope.Organization - __repr__ = sane_repr("type", "condition") + __repr__ = sane_repr("type", "condition", "condition_group") # The condition is the logic condition that needs to be met, gt, lt, eq, etc. condition = models.CharField(max_length=200) @@ -53,7 +63,8 @@ class DataCondition(DefaultFieldsModel): type = models.CharField(max_length=200) condition_group = models.ForeignKey( - DataConditionGroup, + "workflow_engine.DataConditionGroup", + related_name="conditions", on_delete=models.CASCADE, ) @@ -74,32 +85,44 @@ def get_condition_result(self) -> DataConditionResult: return None - def evaluate_value(self, value: float | int) -> DataConditionResult: - # TODO: This logic should be in a condition class that we get from `self.type` - # TODO: This evaluation logic should probably go into the condition class, and we just produce a condition - # class from this model + def get_condition_handler(self) -> DataConditionHandler[T] | None: try: - condition = Condition(self.condition) + condition_type = Condition(self.type) except ValueError: - logger.exception( - "Invalid condition", extra={"condition": self.condition, "id": self.id} - ) - return None + # If the type isn't in the condition, then it won't be in the registry either. + raise NoRegistrationExistsError(f"No registration exists for {self.type}") - op = condition_ops.get(condition) - if op is None: - logger.error("Invalid condition", extra={"condition": self.condition, "id": self.id}) - return None + return condition_handler_registry.get(condition_type) + + def evaluate_value(self, value: T) -> DataConditionResult: + condition_handler: DataConditionHandler[T] | None = None + op: Callable | None = None try: - comparison = float(self.comparison) - except ValueError: - logger.exception( - "Invalid comparison value", extra={"comparison": self.comparison, "id": self.id} + # Use a custom hanler + condition_handler = self.get_condition_handler() + except NoRegistrationExistsError: + # If it's not a custom handler, use the default operators + condition = Condition(self.condition) + op = condition_ops.get(condition, None) + + if condition_handler is not None: + result = condition_handler.evaluate_value(value, self.comparison, self.condition) + elif op is not None: + result = op(cast(Any, value), self.comparison) + else: + logger.error( + "Invalid Data Condition Evaluation", + extra={ + "id": self.id, + "type": self.type, + "condition": self.condition, + }, ) + return None - if op(value, comparison): + if result: return self.get_condition_result() return None diff --git a/src/sentry/workflow_engine/models/data_condition_group.py b/src/sentry/workflow_engine/models/data_condition_group.py index 3e29aca4b5e69e..78c3177cffd2ad 100644 --- a/src/sentry/workflow_engine/models/data_condition_group.py +++ b/src/sentry/workflow_engine/models/data_condition_group.py @@ -1,7 +1,10 @@ +from typing import ClassVar, Self + from django.db import models from sentry.backup.scopes import RelocationScope from sentry.db.models import DefaultFieldsModel, region_silo_model, sane_repr +from sentry.db.models.manager.base import BaseManager @region_silo_model @@ -10,12 +13,22 @@ class DataConditionGroup(DefaultFieldsModel): A data group is a way to specify a group of conditions that must be met for a workflow action to execute """ + objects: ClassVar[BaseManager[Self]] = BaseManager(cache_fields=["id"]) + __relocation_scope__ = RelocationScope.Organization __repr__ = sane_repr("logic_type") class Type(models.TextChoices): + # ANY will evaluate all conditions, and return true if any of those are met ANY = "any" + + # ANY_SHORT_CIRCUIT will stop evaluating conditions as soon as one is met + ANY_SHORT_CIRCUIT = "any-short" + + # ALL will evaluate all conditions, and return true if all of those are met ALL = "all" + + # NONE will return true if none of the conditions are met, will return false immediately if any are met NONE = "none" logic_type = models.CharField(max_length=200, choices=Type.choices, default=Type.ANY) diff --git a/src/sentry/workflow_engine/models/detector.py b/src/sentry/workflow_engine/models/detector.py index 8a17ee38e8ea56..c11a3851b608ce 100644 --- a/src/sentry/workflow_engine/models/detector.py +++ b/src/sentry/workflow_engine/models/detector.py @@ -4,26 +4,33 @@ import logging from typing import TYPE_CHECKING, Any +from django.conf import settings from django.db import models from django.db.models import UniqueConstraint from sentry.backup.scopes import RelocationScope from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model +from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey from sentry.issues import grouptype from sentry.issues.grouptype import GroupType from sentry.models.owner_base import OwnerModel +from .json_config import JSONConfigBase + if TYPE_CHECKING: - from sentry.workflow_engine.processors.detector import DetectorHandler + from sentry.workflow_engine.handlers.detector import DetectorHandler logger = logging.getLogger(__name__) @region_silo_model -class Detector(DefaultFieldsModel, OwnerModel): +class Detector(DefaultFieldsModel, OwnerModel, JSONConfigBase): __relocation_scope__ = RelocationScope.Organization - organization = FlexibleForeignKey("sentry.Organization") + # TODO - Finish removing this field + organization = FlexibleForeignKey("sentry.Organization", on_delete=models.CASCADE, null=True) + + project = FlexibleForeignKey("sentry.Project", on_delete=models.CASCADE, null=True) name = models.CharField(max_length=200) # The data sources that the detector is watching @@ -31,7 +38,12 @@ class Detector(DefaultFieldsModel, OwnerModel): "workflow_engine.DataSource", through="workflow_engine.DataSourceDetector" ) - # The conditions that must be met for the detector to be considered 'active' + # If the detector is not enabled, it will not be evaluated. This is how we "snooze" a detector + enabled = models.BooleanField(db_default=True) + + # Optionally set a description of the detector, this will be used in notifications + description = models.TextField(null=True) + # This will emit an event for the workflow to process workflow_condition_group = FlexibleForeignKey( "workflow_engine.DataConditionGroup", @@ -40,8 +52,18 @@ class Detector(DefaultFieldsModel, OwnerModel): unique=True, on_delete=models.SET_NULL, ) + + # The type of detector that is being used, this is used to determine the class + # to load for the detector type = models.CharField(max_length=200) + # The user that created the detector + created_by_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete="SET_NULL") + + @property + def CONFIG_SCHEMA(self) -> dict[str, Any]: + raise NotImplementedError('Subclasses must define a "CONFIG_SCHEMA" attribute') + class Meta(OwnerModel.Meta): constraints = OwnerModel.Meta.constraints + [ UniqueConstraint( @@ -50,11 +72,6 @@ class Meta(OwnerModel.Meta): ) ] - @property - def project_id(self): - # XXX: Temporary property until we add `project_id` to the model. - return 1 - @property def group_type(self) -> builtins.type[GroupType] | None: return grouptype.registry.get_by_slug(self.type) diff --git a/src/sentry/workflow_engine/models/json_config.py b/src/sentry/workflow_engine/models/json_config.py new file mode 100644 index 00000000000000..1b353ccf18cbcb --- /dev/null +++ b/src/sentry/workflow_engine/models/json_config.py @@ -0,0 +1,22 @@ +from abc import abstractproperty +from typing import Any + +from django.db import models +from jsonschema import ValidationError, validate + + +class JSONConfigBase(models.Model): + config = models.JSONField(db_default={}) + + @abstractproperty + def CONFIG_SCHEMA(self) -> dict[str, Any]: + pass + + def validate_config(self) -> None: + try: + validate(self.config, self.CONFIG_SCHEMA) + except ValidationError as e: + raise ValidationError(f"Invalid config: {e.message}") + + class Meta: + abstract = True diff --git a/src/sentry/workflow_engine/models/workflow.py b/src/sentry/workflow_engine/models/workflow.py index 4cb8fde6721cf4..13483e258f1532 100644 --- a/src/sentry/workflow_engine/models/workflow.py +++ b/src/sentry/workflow_engine/models/workflow.py @@ -1,13 +1,20 @@ +from typing import Any + +from django.conf import settings from django.db import models from sentry.backup.scopes import RelocationScope from sentry.db.models import DefaultFieldsModel, FlexibleForeignKey, region_silo_model, sane_repr +from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey +from sentry.eventstore.models import GroupEvent +from sentry.models.owner_base import OwnerModel +from sentry.workflow_engine.processors.data_condition_group import evaluate_condition_group -from .data_condition_group import DataConditionGroup +from .json_config import JSONConfigBase @region_silo_model -class Workflow(DefaultFieldsModel): +class Workflow(DefaultFieldsModel, OwnerModel, JSONConfigBase): """ A workflow is a way to execute actions in a specified order. Workflows are initiated after detectors have been processed, driven by changes to their state. @@ -17,8 +24,19 @@ class Workflow(DefaultFieldsModel): name = models.CharField(max_length=200) organization = FlexibleForeignKey("sentry.Organization") + # If the workflow is not enabled, it will not be evaluated / invoke actions. This is how we "snooze" a workflow + enabled = models.BooleanField(db_default=True) + # Required as the 'when' condition for the workflow, this evalutes states emitted from the detectors - when_condition_group = FlexibleForeignKey(DataConditionGroup, blank=True, null=True) + when_condition_group = FlexibleForeignKey("workflow_engine.DataConditionGroup", null=True) + + environment = FlexibleForeignKey("sentry.Environment", null=True) + + created_by_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, null=True, on_delete="SET_NULL") + + @property + def CONFIG_SCHEMA(self) -> dict[str, Any]: + raise NotImplementedError('Subclasses must define a "CONFIG_SCHEMA" attribute') __repr__ = sane_repr("name", "organization_id") @@ -31,3 +49,14 @@ class Meta: fields=["name", "organization"], name="unique_workflow_name_per_org" ) ] + + def evaluate_trigger_conditions(self, evt: GroupEvent) -> bool: + """ + Evaluate the conditions for the workflow trigger and return if the evaluation was successful. + If there aren't any workflow trigger conditions, the workflow is considered triggered. + """ + if self.when_condition_group is None: + return True + + evaluation, _ = evaluate_condition_group(self.when_condition_group, evt) + return evaluation diff --git a/src/sentry/workflow_engine/processors/action.py b/src/sentry/workflow_engine/processors/action.py new file mode 100644 index 00000000000000..0e57ee44441aea --- /dev/null +++ b/src/sentry/workflow_engine/processors/action.py @@ -0,0 +1,28 @@ +from sentry.db.models.manager.base_query_set import BaseQuerySet +from sentry.eventstore.models import GroupEvent +from sentry.workflow_engine.models import Action, DataConditionGroup, Workflow +from sentry.workflow_engine.processors.data_condition_group import evaluate_condition_group + + +def evaluate_workflow_action_filters( + workflows: set[Workflow], evt: GroupEvent +) -> BaseQuerySet[Action]: + filtered_action_groups: set[DataConditionGroup] = set() + + # gets the list of the workflow ids, and then get the workflow_data_condition_groups for those workflows + workflow_ids = {workflow.id for workflow in workflows} + + action_conditions = DataConditionGroup.objects.filter( + workflowdataconditiongroup__workflow_id__in=workflow_ids + ).distinct() + + for action_condition in action_conditions: + evaluation, result = evaluate_condition_group(action_condition, evt) + + if evaluation: + filtered_action_groups.add(action_condition) + + # get the actions for any of the triggered data condition groups + return Action.objects.filter( + dataconditiongroupaction__condition_group__in=filtered_action_groups + ).distinct() diff --git a/src/sentry/workflow_engine/processors/data_condition_group.py b/src/sentry/workflow_engine/processors/data_condition_group.py new file mode 100644 index 00000000000000..637e91c5a6b34d --- /dev/null +++ b/src/sentry/workflow_engine/processors/data_condition_group.py @@ -0,0 +1,87 @@ +import logging +from typing import Any, TypeVar + +from sentry.utils.function_cache import cache_func_for_models +from sentry.workflow_engine.models import DataCondition, DataConditionGroup +from sentry.workflow_engine.types import ProcessedDataConditionResult + +logger = logging.getLogger(__name__) + +T = TypeVar("T") + + +@cache_func_for_models( + [(DataCondition, lambda condition: (condition.condition_group_id,))], + recalculate=False, +) +def get_data_conditions_for_group(data_condition_group_id: int) -> list[DataCondition]: + return list(DataCondition.objects.filter(condition_group_id=data_condition_group_id)) + + +def evaluate_condition_group( + data_condition_group: DataConditionGroup, + value: T, +) -> ProcessedDataConditionResult: + """ + Evaluate the conditions for a given group and value. + """ + results = [] + conditions = get_data_conditions_for_group(data_condition_group.id) + + # TODO - @saponifi3d + # Split the conditions into fast and slow conditions + # Evaluate the fast conditions first, if any are met, return early + # Enqueue the slow conditions to be evaluated later + + if len(conditions) == 0: + # if we don't have any conditions, always return True + return True, [] + + for condition in conditions: + evaluation_result = condition.evaluate_value(value) + is_condition_triggered = evaluation_result is not None + + if is_condition_triggered: + # Check for short-circuiting evaluations + if data_condition_group.logic_type == data_condition_group.Type.ANY_SHORT_CIRCUIT: + return is_condition_triggered, [evaluation_result] + + if data_condition_group.logic_type == data_condition_group.Type.NONE: + return False, [] + + results.append((is_condition_triggered, evaluation_result)) + + if data_condition_group.logic_type == data_condition_group.Type.NONE: + # if we get to this point, no conditions were met + return True, [] + elif data_condition_group.logic_type == data_condition_group.Type.ANY: + is_any_condition_met = any([result[0] for result in results]) + + if is_any_condition_met: + condition_results = [result[1] for result in results if result[0]] + return is_any_condition_met, condition_results + elif data_condition_group.logic_type == data_condition_group.Type.ALL: + conditions_met = [result[0] for result in results] + is_all_conditions_met = all(conditions_met) + + if is_all_conditions_met: + condition_results = [result[1] for result in results if result[0]] + return is_all_conditions_met, condition_results + + return False, [] + + +def process_data_condition_group( + data_condition_group_id: int, + value: Any, +) -> ProcessedDataConditionResult: + try: + group = DataConditionGroup.objects.get_from_cache(id=data_condition_group_id) + except DataConditionGroup.DoesNotExist: + logger.exception( + "DataConditionGroup does not exist", + extra={"id": data_condition_group_id}, + ) + return False, [] + + return evaluate_condition_group(group, value) diff --git a/src/sentry/workflow_engine/processors/detector.py b/src/sentry/workflow_engine/processors/detector.py index 995d31a949c185..60bb85e0988190 100644 --- a/src/sentry/workflow_engine/processors/detector.py +++ b/src/sentry/workflow_engine/processors/detector.py @@ -1,73 +1,27 @@ from __future__ import annotations -import abc -import dataclasses import logging -from datetime import timedelta -from typing import Any, Generic, TypeVar - -from django.conf import settings -from django.db.models import Q -from sentry_redis_tools.retrying_cluster import RetryingRedisCluster +from sentry.eventstore.models import GroupEvent from sentry.issues.issue_occurrence import IssueOccurrence from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka -from sentry.issues.status_change_message import StatusChangeMessage -from sentry.models.group import GroupStatus -from sentry.types.group import PriorityLevel -from sentry.utils import metrics, redis -from sentry.utils.function_cache import cache_func_for_models -from sentry.utils.iterators import chunked -from sentry.workflow_engine.models import ( - DataCondition, - DataConditionGroup, - DataPacket, - Detector, - DetectorState, -) -from sentry.workflow_engine.types import DetectorGroupKey, DetectorPriorityLevel +from sentry.workflow_engine.handlers.detector import DetectorEvaluationResult +from sentry.workflow_engine.models import DataPacket, Detector +from sentry.workflow_engine.types import DetectorGroupKey, DetectorType logger = logging.getLogger(__name__) -REDIS_TTL = int(timedelta(days=7).total_seconds()) - - -@dataclasses.dataclass(frozen=True) -class DetectorEvaluationResult: - group_key: DetectorGroupKey - # TODO: Are these actually necessary? We're going to produce the occurrence in the detector, so we probably don't - # need to know the other results externally - is_active: bool - priority: DetectorPriorityLevel - # TODO: This is only temporarily optional. We should always have a value here if returning a result - result: IssueOccurrence | StatusChangeMessage | None = None - # Event data to supplement the `IssueOccurrence`, if passed. - event_data: dict[str, Any] | None = None - - -def process_detectors( - data_packet: DataPacket, detectors: list[Detector] -) -> list[tuple[Detector, dict[DetectorGroupKey, DetectorEvaluationResult]]]: - results = [] - - for detector in detectors: - handler = detector.detector_handler - - if not handler: - continue - detector_results = handler.evaluate(data_packet) - - for result in detector_results.values(): - if result.result is not None: - create_issue_occurrence_from_result(result) - if detector_results: - results.append((detector, detector_results)) +# TODO - cache these by evt.group_id? :thinking: +def get_detector_by_event(evt: GroupEvent) -> Detector: + issue_occurrence = evt.occurrence - # Now that we've processed all results for this detector, commit any state changes - handler.commit_state_updates() + if issue_occurrence is None: + detector = Detector.objects.get(project_id=evt.project_id, type=DetectorType.ERROR) + else: + detector = Detector.objects.get(id=issue_occurrence.evidence_data.get("detector_id", None)) - return results + return detector def create_issue_occurrence_from_result(result: DetectorEvaluationResult): @@ -87,344 +41,30 @@ def create_issue_occurrence_from_result(result: DetectorEvaluationResult): ) -def get_redis_client() -> RetryingRedisCluster: - cluster_key = settings.SENTRY_WORKFLOW_ENGINE_REDIS_CLUSTER - return redis.redis_clusters.get(cluster_key) # type: ignore[return-value] - - -@dataclasses.dataclass(frozen=True) -class DetectorStateData: - group_key: DetectorGroupKey - active: bool - status: DetectorPriorityLevel - # Stateful detectors always process data packets in order. Once we confirm that a data packet has been fully - # processed and all workflows have been done, this value will be used by the stateful detector to prevent - # reprocessing - dedupe_value: int - # Stateful detectors allow various counts to be tracked. We need to update these after we process workflows, so - # include the updates in the state. - # This dictionary is in the format {counter_name: counter_value, ...} - # If a counter value is `None` it means to unset the value - counter_updates: dict[str, int | None] - - -T = TypeVar("T") - - -class DetectorHandler(abc.ABC, Generic[T]): - def __init__(self, detector: Detector): - self.detector = detector - if detector.workflow_condition_group_id is not None: - results = get_data_group_conditions_and_group(detector.workflow_condition_group_id) - self.condition_group: DataConditionGroup | None = results[0] - self.conditions: list[DataCondition] = results[1] - else: - self.condition_group = None - self.conditions = [] - - @abc.abstractmethod - def evaluate( - self, data_packet: DataPacket[T] - ) -> dict[DetectorGroupKey, DetectorEvaluationResult]: - pass - - def commit_state_updates(self): - pass - - -class StatefulDetectorHandler(DetectorHandler[T], abc.ABC): - def __init__(self, detector: Detector): - super().__init__(detector) - self.dedupe_updates: dict[DetectorGroupKey, int] = {} - self.counter_updates: dict[DetectorGroupKey, dict[str, int | None]] = {} - self.state_updates: dict[DetectorGroupKey, tuple[bool, DetectorPriorityLevel]] = {} - - @property - @abc.abstractmethod - def counter_names(self) -> list[str]: - """ - The names of counters that this detector is going to keep track of. - """ - pass - - @abc.abstractmethod - def get_dedupe_value(self, data_packet: DataPacket[T]) -> int: - """ - Extracts the deduplication value from a passed data packet. - TODO: This might belong on the `DataPacket` instead. - """ - pass - - @abc.abstractmethod - def get_group_key_values(self, data_packet: DataPacket[T]) -> dict[str, int]: - """ - Extracts the values for all the group keys that exist in the given data packet, - and returns then as a dict keyed by group_key. - """ - pass - - @abc.abstractmethod - def build_occurrence_and_event_data( - self, group_key: DetectorGroupKey, value: int, new_status: PriorityLevel - ) -> tuple[IssueOccurrence, dict[str, Any]]: - pass - - def build_fingerprint(self, group_key) -> list[str]: - """ - Builds a fingerprint to uniquely identify a detected issue - """ - return [f"{self.detector.id}{':' + group_key if group_key is not None else ''}"] - - def get_state_data( - self, group_keys: list[DetectorGroupKey] - ) -> dict[DetectorGroupKey, DetectorStateData]: - """ - Fetches state data associated with this detector for the associated `group_keys`. - Returns a dict keyed by each group_key with the fetched `DetectorStateData`. - If data isn't currently stored, falls back to default values. - """ - group_key_detectors = self.bulk_get_detector_state(group_keys) - dedupe_keys = [self.build_dedupe_value_key(gk) for gk in group_keys] - pipeline = get_redis_client().pipeline() - for dk in dedupe_keys: - pipeline.get(dk) - group_key_dedupe_values = { - gk: int(dv) if dv else 0 for gk, dv in zip(group_keys, pipeline.execute()) - } - pipeline.reset() - counter_updates = {} - if self.counter_names: - counter_keys = [ - self.build_counter_value_key(gk, name) - for gk in group_keys - for name in self.counter_names - ] - for ck in counter_keys: - pipeline.get(ck) - vals = [int(val) if val is not None else val for val in pipeline.execute()] - counter_updates = { - gk: dict(zip(self.counter_names, values)) - for gk, values in zip(group_keys, chunked(vals, len(self.counter_names))) - } - - results = {} - for gk in group_keys: - detector_state = group_key_detectors.get(gk) - results[gk] = DetectorStateData( - group_key=gk, - active=detector_state.active if detector_state else False, - status=( - DetectorPriorityLevel(int(detector_state.state)) - if detector_state - else DetectorPriorityLevel.OK - ), - dedupe_value=group_key_dedupe_values[gk], - counter_updates=counter_updates[gk], - ) - return results - - def evaluate( - self, data_packet: DataPacket[T] - ) -> dict[DetectorGroupKey, DetectorEvaluationResult]: - """ - Evaluates a given data packet and returns a list of `DetectorEvaluationResult`. - There will be one result for each group key result in the packet, unless the - evaluation is skipped due to various rules. - """ - dedupe_value = self.get_dedupe_value(data_packet) - group_values = self.get_group_key_values(data_packet) - all_state_data = self.get_state_data(list(group_values.keys())) - results = {} - for group_key, group_value in group_values.items(): - result = self.evaluate_group_key_value( - group_key, group_value, all_state_data[group_key], dedupe_value - ) - if result: - results[result.group_key] = result - return results - - def evaluate_group_key_value( - self, - group_key: DetectorGroupKey, - value: int, - state_data: DetectorStateData, - dedupe_value: int, - ) -> DetectorEvaluationResult | None: - """ - Evaluates a value associated with a given `group_key` and returns a `DetectorEvaluationResult` with the results - and any state changes that need to be made. - - Checks that we haven't already processed this datapacket for this group_key, and skips evaluation if we have. - """ - if dedupe_value <= state_data.dedupe_value: - # TODO: Does it actually make more sense to just do this at the data packet level rather than the group - # key level? - metrics.incr("workflow_engine.detector.skipping_already_processed_update") - return None - - self.enqueue_dedupe_update(group_key, dedupe_value) - - if not self.condition_group: - metrics.incr("workflow_engine.detector.skipping_invalid_condition_group") - return None - - new_status = DetectorPriorityLevel.OK - - for condition in self.conditions: - # TODO: We need to handle tracking consecutive evaluations before emitting a result here. We're able to - # store these in `DetectorStateData.counter_updates`, but we don't have anywhere to set the required - # thresholds at the moment. Probably should be a field on the Detector? Could also be on the condition - # level, but usually we want to set this at a higher level. - evaluation = condition.evaluate_value(value) - - # ensures that the result is a DetectorPriorityLevel, and then uses the highest priority - if isinstance(evaluation, DetectorPriorityLevel): - new_status = max(new_status, evaluation) - - # TODO: We'll increment and change these later, but for now they don't change so just pass an empty dict - self.enqueue_counter_update(group_key, {}) - - if state_data.status != new_status: - is_active = new_status != DetectorPriorityLevel.OK - self.enqueue_state_update(group_key, is_active, new_status) - event_data = None - result: StatusChangeMessage | IssueOccurrence - if new_status == DetectorPriorityLevel.OK: - # If we've determined that we're now ok, we just want to resolve the issue - result = StatusChangeMessage( - fingerprint=self.build_fingerprint(group_key), - project_id=self.detector.project_id, - new_status=GroupStatus.RESOLVED, - new_substatus=None, - ) - else: - result, event_data = self.build_occurrence_and_event_data( - group_key, value, PriorityLevel(new_status) - ) - return DetectorEvaluationResult( - group_key=group_key, - is_active=is_active, - priority=new_status, - result=result, - event_data=event_data, - ) - return None - - def enqueue_dedupe_update(self, group_key: DetectorGroupKey, dedupe_value: int): - self.dedupe_updates[group_key] = dedupe_value - - def enqueue_counter_update( - self, group_key: DetectorGroupKey, counter_updates: dict[str, int | None] - ): - self.counter_updates[group_key] = counter_updates - - def enqueue_state_update( - self, group_key: DetectorGroupKey, is_active: bool, priority: DetectorPriorityLevel - ): - self.state_updates[group_key] = (is_active, priority) - - def build_dedupe_value_key(self, group_key: DetectorGroupKey) -> str: - if group_key is None: - group_key = "" - return f"{self.detector.id}:{group_key}:dedupe_value" - - def build_counter_value_key(self, group_key: DetectorGroupKey, counter_name: str) -> str: - if group_key is None: - group_key = "" - return f"{self.detector.id}:{group_key}:{counter_name}" - - def bulk_get_detector_state( - self, group_keys: list[DetectorGroupKey] - ) -> dict[DetectorGroupKey, DetectorState]: - """ - Bulk fetches detector state for the passed `group_keys`. Returns a dict keyed by each - `group_key` with the fetched `DetectorStateData`. - - If there's no `DetectorState` row for a `detector`/`group_key` pair then we'll exclude - the group_key from the returned dict. - """ - # TODO: Cache this query (or individual fetches, then bulk fetch anything missing) - query_filter = Q( - detector_group_key__in=[group_key for group_key in group_keys if group_key is not None] - ) - if None in group_keys: - query_filter |= Q(detector_group_key__isnull=True) - - return { - detector_state.detector_group_key: detector_state - for detector_state in self.detector.detectorstate_set.filter(query_filter) - } - - def commit_state_updates(self): - self._bulk_commit_detector_state() - self._bulk_commit_redis_state() - - def _bulk_commit_redis_state(self): - pipeline = get_redis_client().pipeline() - if self.dedupe_updates: - for group_key, dedupe_value in self.dedupe_updates.items(): - pipeline.set(self.build_dedupe_value_key(group_key), dedupe_value, ex=REDIS_TTL) +# TODO - Add metrics / logging here +def process_detectors( + data_packet: DataPacket, detectors: list[Detector] +) -> list[tuple[Detector, dict[DetectorGroupKey, DetectorEvaluationResult]]]: + results = [] - if self.counter_updates: - for group_key, counter_updates in self.counter_updates.items(): - for counter_name, counter_value in counter_updates.items(): - key_name = self.build_counter_value_key(group_key, counter_name) - if counter_value is None: - pipeline.delete(key_name) - else: - pipeline.set(key_name, counter_value, ex=REDIS_TTL) + for detector in detectors: + handler = detector.detector_handler - pipeline.execute() - self.dedupe_updates.clear() - self.counter_updates.clear() + if not handler: + continue - def _bulk_commit_detector_state(self): - # TODO: We should already have these loaded from earlier, figure out how to cache and reuse - detector_state_lookup = self.bulk_get_detector_state( - [update for update in self.state_updates.keys()] - ) - created_detector_states = [] - updated_detector_states = [] - for group_key, (active, priority) in self.state_updates.items(): - detector_state = detector_state_lookup.get(group_key) - if not detector_state: - created_detector_states.append( - DetectorState( - detector_group_key=group_key, - detector=self.detector, - active=active, - state=priority, - ) - ) - elif active != detector_state.active or priority != detector_state.state: - detector_state.active = active - detector_state.state = priority - updated_detector_states.append(detector_state) + # TODO add metric here for detector processing + detector_results = handler.evaluate(data_packet) - if created_detector_states: - DetectorState.objects.bulk_create(created_detector_states) + for result in detector_results.values(): + if result.result is not None: + create_issue_occurrence_from_result(result) - if updated_detector_states: - DetectorState.objects.bulk_update(updated_detector_states, ["active", "state"]) - self.state_updates.clear() + if detector_results: + # TODO - Add metrics / logging here for successful result + results.append((detector, detector_results)) + # Now that we've processed all results for this detector, commit any state changes + handler.commit_state_updates() -@cache_func_for_models( - [ - (DataConditionGroup, lambda group: (group.id,)), - (DataCondition, lambda condition: (condition.condition_group_id,)), - ], - # There shouldn't be stampedes to fetch this data, and we might update multiple `DataConditionGroup`s at the same - # time, so we'd prefer to avoid re-fetching this many times. Just bust the cache and re-fetch lazily. - recalculate=False, -) -def get_data_group_conditions_and_group( - data_condition_group_id: int, -) -> tuple[DataConditionGroup | None, list[DataCondition]]: - try: - group = DataConditionGroup.objects.get(id=data_condition_group_id) - conditions = list(group.datacondition_set.all()) - except DataConditionGroup.DoesNotExist: - group = None - conditions = [] - return group, conditions + return results diff --git a/src/sentry/workflow_engine/processors/workflow.py b/src/sentry/workflow_engine/processors/workflow.py new file mode 100644 index 00000000000000..effc18173780a6 --- /dev/null +++ b/src/sentry/workflow_engine/processors/workflow.py @@ -0,0 +1,49 @@ +import logging + +import sentry_sdk + +from sentry.eventstore.models import GroupEvent +from sentry.utils import metrics +from sentry.workflow_engine.models import Detector, Workflow +from sentry.workflow_engine.processors.action import evaluate_workflow_action_filters +from sentry.workflow_engine.processors.detector import get_detector_by_event + +logger = logging.getLogger(__name__) + + +def evaluate_workflow_triggers(workflows: set[Workflow], evt: GroupEvent) -> set[Workflow]: + triggered_workflows: set[Workflow] = set() + + for workflow in workflows: + if workflow.evaluate_trigger_conditions(evt): + triggered_workflows.add(workflow) + + return triggered_workflows + + +def process_workflows(evt: GroupEvent) -> set[Workflow]: + """ + This method will get the detector based on the event, and then gather the associated workflows. + Next, it will evaluate the "when" (or trigger) conditions for each workflow, if the conditions are met, + the workflow will be added to a unique list of triggered workflows. + + Finally, each of the triggered workflows will have their actions evaluated and executed. + """ + # Check to see if the GroupEvent has an issue occurrence + try: + detector = get_detector_by_event(evt) + except Detector.DoesNotExist: + metrics.incr("workflow_engine.process_workflows.error") + logger.exception("Detector not found for event", extra={"event_id": evt.event_id}) + return set() + + # Get the workflows, evaluate the when_condition_group, finally evaluate the actions for workflows that are triggered + workflows = set(Workflow.objects.filter(detectorworkflow__detector_id=detector.id).distinct()) + triggered_workflows = evaluate_workflow_triggers(workflows, evt) + actions = evaluate_workflow_action_filters(triggered_workflows, evt) + + with sentry_sdk.start_span(op="workflow_engine.process_workflows.trigger_actions"): + for action in actions: + action.trigger(evt, detector) + + return triggered_workflows diff --git a/src/sentry/workflow_engine/registry.py b/src/sentry/workflow_engine/registry.py index 6b9a490ccde13f..2af6b070a1bcf8 100644 --- a/src/sentry/workflow_engine/registry.py +++ b/src/sentry/workflow_engine/registry.py @@ -1,4 +1,8 @@ +from typing import Any + from sentry.utils.registry import Registry -from sentry.workflow_engine.types import DataSourceTypeHandler +from sentry.workflow_engine.types import ActionHandler, DataConditionHandler, DataSourceTypeHandler data_source_type_registry = Registry[type[DataSourceTypeHandler]]() +condition_handler_registry = Registry[DataConditionHandler[Any]]() +action_handler_registry = Registry[ActionHandler]() diff --git a/src/sentry/workflow_engine/types.py b/src/sentry/workflow_engine/types.py index bb41d772427e7e..50dc15a7010f22 100644 --- a/src/sentry/workflow_engine/types.py +++ b/src/sentry/workflow_engine/types.py @@ -1,10 +1,14 @@ from __future__ import annotations -from enum import IntEnum -from typing import Generic, TypeVar +from enum import IntEnum, StrEnum +from typing import TYPE_CHECKING, Any, Generic, TypeVar from sentry.types.group import PriorityLevel +if TYPE_CHECKING: + from sentry.eventstore.models import GroupEvent + from sentry.workflow_engine.models import Action, Detector + T = TypeVar("T") @@ -21,9 +25,26 @@ class DetectorPriorityLevel(IntEnum): DetectorGroupKey = str | None DataConditionResult = DetectorPriorityLevel | int | float | bool | None +ProcessedDataConditionResult = tuple[bool, list[DataConditionResult]] + + +class ActionHandler: + @staticmethod + def execute(group_event: GroupEvent, action: Action, detector: Detector) -> None: + raise NotImplementedError class DataSourceTypeHandler(Generic[T]): @staticmethod def bulk_get_query_object(data_sources) -> dict[int, T | None]: raise NotImplementedError + + +class DataConditionHandler(Generic[T]): + @staticmethod + def evaluate_value(value: T, comparison: Any, condition: str) -> DataConditionResult: + raise NotImplementedError + + +class DetectorType(StrEnum): + ERROR = "ErrorDetector" diff --git a/static/app/actionCreators/dashboards.tsx b/static/app/actionCreators/dashboards.tsx index 091381e7ac1b12..845fe1c8227db6 100644 --- a/static/app/actionCreators/dashboards.tsx +++ b/static/app/actionCreators/dashboards.tsx @@ -97,6 +97,36 @@ export function updateDashboardVisit( return promise; } +export async function updateDashboardFavorite( + api: Client, + orgId: string, + dashboardId: string | string[], + isFavorited: boolean +): Promise { + try { + await api.requestPromise( + `/organizations/${orgId}/dashboards/${dashboardId}/favorite/`, + { + method: 'PUT', + data: { + isFavorited, + }, + } + ); + } catch (response) { + const errorResponse = response?.responseJSON ?? null; + if (errorResponse) { + const errors = flattenErrors(errorResponse, {}); + addErrorMessage(errors[Object.keys(errors)[0]] as string); + } else if (isFavorited) { + addErrorMessage(t('Unable to favorite dashboard')); + } else { + addErrorMessage(t('Unable to unfavorite dashboard')); + } + throw response; + } +} + export function fetchDashboard( api: Client, orgId: string, @@ -220,7 +250,7 @@ export function validateWidgetRequest( export function updateDashboardPermissions( api: Client, orgId: string, - dashboard: DashboardDetails + dashboard: DashboardDetails | DashboardListItem ): Promise { const {permissions} = dashboard; const data = { diff --git a/static/app/actionCreators/events.spec.tsx b/static/app/actionCreators/events.spec.tsx index cc8123d8ae7e7e..54150c082eda5c 100644 --- a/static/app/actionCreators/events.spec.tsx +++ b/static/app/actionCreators/events.spec.tsx @@ -52,6 +52,27 @@ describe('Events ActionCreator', function () { ); }); + it('sets useRpc param', function () { + doEventsRequest(api, { + ...opts, + includePrevious: false, + period: '7d', + partial: true, + useRpc: true, + }); + + expect(mock).toHaveBeenLastCalledWith( + '/organizations/org-slug/events-stats/', + expect.objectContaining({ + query: expect.objectContaining({ + project: [parseInt(project.id, 10)], + environment: [], + statsPeriod: '7d', + }), + }) + ); + }); + it('requests events stats with relative period including previous period', function () { doEventsRequest(api, { ...opts, diff --git a/static/app/actionCreators/events.tsx b/static/app/actionCreators/events.tsx index 07c6961908909f..8f514dc09f7146 100644 --- a/static/app/actionCreators/events.tsx +++ b/static/app/actionCreators/events.tsx @@ -56,6 +56,7 @@ type Options = { start?: DateString; team?: Readonly; topEvents?: number; + useRpc?: boolean; withoutZerofill?: boolean; yAxis?: string | string[]; }; @@ -109,6 +110,7 @@ export const doEventsRequest = ( excludeOther, includeAllArgs, dataset, + useRpc, }: EventsStatsOptions ): IncludeAllArgsType extends true ? Promise< @@ -137,6 +139,7 @@ export const doEventsRequest = ( referrer: referrer ? referrer : 'api.organization-event-stats', excludeOther: excludeOther ? '1' : undefined, dataset, + useRpc: useRpc ? '1' : undefined, }).filter(([, value]) => typeof value !== 'undefined') ); @@ -175,6 +178,7 @@ export type EventQuery = { referrer?: string; sort?: string | string[]; team?: string | string[]; + useRpc?: '1'; }; export type TagSegment = { diff --git a/static/app/actionCreators/prompts.tsx b/static/app/actionCreators/prompts.tsx index 081bf8377453d5..51a16f12459a65 100644 --- a/static/app/actionCreators/prompts.tsx +++ b/static/app/actionCreators/prompts.tsx @@ -2,6 +2,7 @@ import {useCallback} from 'react'; import type {Client} from 'sentry/api'; import type {Organization, OrganizationSummary} from 'sentry/types/organization'; +import {defined} from 'sentry/utils'; import {promptIsDismissed} from 'sentry/utils/promptIsDismissed'; import type {ApiQueryKey, UseApiQueryOptions} from 'sentry/utils/queryClient'; import {setApiQueryData, useApiQuery, useQueryClient} from 'sentry/utils/queryClient'; @@ -41,7 +42,7 @@ type PromptCheckParams = { * The prompt feature name */ feature: string | string[]; - organization: OrganizationSummary; + organization: OrganizationSummary | null; /** * The numeric project ID as a string */ @@ -89,10 +90,10 @@ export async function promptsCheck( ): Promise { const query = { feature: params.feature, - organization_id: params.organization.id, + organization_id: params.organization?.id, ...(params.projectId === undefined ? {} : {project_id: params.projectId}), }; - const url = `/organizations/${params.organization.slug}/prompts-activity/`; + const url = `/organizations/${params.organization?.slug}/prompts-activity/`; const response: PromptResponse = await api.requestPromise(url, { query, }); @@ -112,22 +113,23 @@ export const makePromptsCheckQueryKey = ({ organization, projectId, }: PromptCheckParams): ApiQueryKey => { - const url = `/organizations/${organization.slug}/prompts-activity/`; + const url = `/organizations/${organization?.slug}/prompts-activity/`; return [ url, - {query: {feature, organization_id: organization.id, project_id: projectId}}, + {query: {feature, organization_id: organization?.id, project_id: projectId}}, ]; }; export function usePromptsCheck( {feature, organization, projectId}: PromptCheckParams, - options: Partial> = {} + {enabled = true, ...options}: Partial> = {} ) { return useApiQuery( makePromptsCheckQueryKey({feature, organization, projectId}), { staleTime: 120000, retry: false, + enabled: defined(organization) && enabled, ...options, } ); @@ -141,7 +143,7 @@ export function usePrompt({ options, }: { feature: string; - organization: Organization; + organization: Organization | null; daysToSnooze?: number; options?: Partial>; projectId?: string; @@ -150,18 +152,20 @@ export function usePrompt({ const prompt = usePromptsCheck({feature, organization, projectId}, options); const queryClient = useQueryClient(); - const isPromptDismissed = - prompt.isSuccess && prompt.data.data - ? promptIsDismissed( - { - dismissedTime: prompt.data.data.dismissed_ts, - snoozedTime: prompt.data.data.snoozed_ts, - }, - daysToSnooze - ) - : undefined; + const isPromptDismissed = prompt.isSuccess + ? promptIsDismissed( + { + dismissedTime: prompt.data?.data?.dismissed_ts, + snoozedTime: prompt.data?.data?.snoozed_ts, + }, + daysToSnooze + ) + : undefined; const dismissPrompt = useCallback(() => { + if (!organization) { + return; + } promptsUpdate(api, { organization, projectId, @@ -189,6 +193,9 @@ export function usePrompt({ }, [api, feature, organization, projectId, queryClient]); const snoozePrompt = useCallback(() => { + if (!organization) { + return; + } promptsUpdate(api, { organization, projectId, @@ -216,6 +223,9 @@ export function usePrompt({ }, [api, feature, organization, projectId, queryClient]); const showPrompt = useCallback(() => { + if (!organization) { + return; + } promptsUpdate(api, { organization, projectId, diff --git a/static/app/actionCreators/release.spec.tsx b/static/app/actionCreators/release.spec.tsx deleted file mode 100644 index e24d7511146729..00000000000000 --- a/static/app/actionCreators/release.spec.tsx +++ /dev/null @@ -1,122 +0,0 @@ -import {getProjectRelease, getReleaseDeploys} from 'sentry/actionCreators/release'; -import ReleaseStore, {getReleaseStoreKey} from 'sentry/stores/releaseStore'; - -describe('ReleaseActionCreator', function () { - const orgSlug = 'myOrg'; - const projectSlug = 'myProject'; - const releaseVersion = 'myRelease'; - const releaseKey = getReleaseStoreKey(projectSlug, releaseVersion); - - const api = new MockApiClient(); - const mockData = {id: '1'}; - let mockResponse: jest.Mock; - - describe('getProjectRelease', () => { - const releaseUrl = `/projects/${orgSlug}/${projectSlug}/releases/${encodeURIComponent( - releaseVersion - )}/`; - - beforeEach(() => { - MockApiClient.clearMockResponses(); - mockResponse = MockApiClient.addMockResponse({ - url: releaseUrl, - body: mockData, - }); - - ReleaseStore.reset(); - - jest.restoreAllMocks(); - jest.spyOn(ReleaseStore, 'loadRelease'); - jest.spyOn(ReleaseStore, 'loadReleaseSuccess'); - - // XXX(leedongwei): We cannot spy on ReleaseStore at all - // See repositories.spec.jsx beforeEach method for the reason - }); - - it('fetches a Release and emits actions', async () => { - getProjectRelease(api, {orgSlug, projectSlug, releaseVersion}); - expect(ReleaseStore.loadRelease).toHaveBeenCalledWith( - orgSlug, - projectSlug, - releaseVersion - ); - expect(ReleaseStore.loadReleaseSuccess).not.toHaveBeenCalled(); - - await tick(); // Run Store.loadRelease and fire Action.loadReleaseSuccess - await tick(); // Run Store.loadReleaseSuccess - - expect(mockResponse).toHaveBeenCalledWith(releaseUrl, expect.anything()); - expect(ReleaseStore.loadReleaseSuccess).toHaveBeenCalledWith( - projectSlug, - releaseVersion, - mockData - ); - - expect(ReleaseStore.state.release[releaseKey]).toEqual(mockData); - expect(ReleaseStore.state.releaseLoading[releaseKey]).toEqual(false); - expect(ReleaseStore.state.releaseError[releaseKey]).toEqual(undefined); - }); - - it('short-circuits the JS event loop when fetching Release', () => { - expect(ReleaseStore.state.releaseLoading[releaseKey]).toEqual(undefined); - - getProjectRelease(api, {orgSlug, projectSlug, releaseVersion}); - expect(ReleaseStore.loadRelease).toHaveBeenCalled(); - // expect(ReleaseStore.loadRelease).not.toHaveBeenCalled(); // See above for comment on ReleaseStore - expect(ReleaseStore.state.releaseLoading[releaseKey]).toEqual(true); - }); - }); - - describe('getReleaseDeploys', () => { - const deploysUrl = `/organizations/${orgSlug}/releases/${encodeURIComponent( - releaseVersion - )}/deploys/`; - - beforeEach(() => { - MockApiClient.clearMockResponses(); - mockResponse = MockApiClient.addMockResponse({ - url: deploysUrl, - body: [mockData], - }); - - ReleaseStore.reset(); - - jest.restoreAllMocks(); - jest.spyOn(ReleaseStore, 'loadDeploys'); - jest.spyOn(ReleaseStore, 'loadDeploysSuccess'); - }); - - it('fetch Deploys and emit an action', async () => { - getReleaseDeploys(api, {orgSlug, projectSlug, releaseVersion}); - expect(ReleaseStore.loadDeploys).toHaveBeenCalledWith( - orgSlug, - projectSlug, - releaseVersion - ); - expect(ReleaseStore.loadDeploysSuccess).not.toHaveBeenCalled(); - - await tick(); // Run Store.loadDeploys and fire Action.loadDeploysSuccess - await tick(); // Run Store.loadDeploysSuccess - - expect(mockResponse).toHaveBeenCalledWith(deploysUrl, expect.anything()); - expect(ReleaseStore.loadDeploysSuccess).toHaveBeenCalledWith( - projectSlug, - releaseVersion, - [mockData] - ); - - expect(ReleaseStore.state.deploys[releaseKey]).toEqual([mockData]); - expect(ReleaseStore.state.deploysLoading[releaseKey]).toEqual(false); - expect(ReleaseStore.state.deploysError[releaseKey]).toEqual(undefined); - }); - - it('short-circuits the JS event loop when fetching Deploys', () => { - expect(ReleaseStore.state.deploysLoading[releaseKey]).toEqual(undefined); - - getReleaseDeploys(api, {orgSlug, projectSlug, releaseVersion}); - expect(ReleaseStore.loadDeploys).toHaveBeenCalled(); - // expect(ReleaseStore.loadDeploys).not.toHaveBeenCalled(); // See above for comment on ReleaseStore - expect(ReleaseStore.state.deploysLoading[releaseKey]).toEqual(true); - }); - }); -}); diff --git a/static/app/actionCreators/release.tsx b/static/app/actionCreators/release.tsx index d0974d6e5e43be..86d2839e4001c4 100644 --- a/static/app/actionCreators/release.tsx +++ b/static/app/actionCreators/release.tsx @@ -1,5 +1,3 @@ -import * as Sentry from '@sentry/react'; - import { addErrorMessage, addLoadingMessage, @@ -7,8 +5,6 @@ import { } from 'sentry/actionCreators/indicator'; import type {Client} from 'sentry/api'; import {t} from 'sentry/locale'; -import ReleaseStore, {getReleaseStoreKey} from 'sentry/stores/releaseStore'; -import type {Deploy, Release} from 'sentry/types/release'; import {ReleaseStatus} from 'sentry/types/release'; type ParamsGet = { @@ -17,80 +13,9 @@ type ParamsGet = { releaseVersion: string; }; -export function getProjectRelease(api: Client, params: ParamsGet) { - const {orgSlug, projectSlug, releaseVersion} = params; - const path = `/projects/${orgSlug}/${projectSlug}/releases/${encodeURIComponent( - releaseVersion - )}/`; - // HACK(leedongwei): Actions fired by the ActionCreators are queued to - // the back of the event loop, allowing another getRelease for the same - // release to be fired before the loading state is updated in store. - // This hack short-circuits that and update the state immediately. - ReleaseStore.state.releaseLoading[getReleaseStoreKey(projectSlug, releaseVersion)] = - true; - ReleaseStore.loadRelease(orgSlug, projectSlug, releaseVersion); - - return api - .requestPromise(path, { - method: 'GET', - }) - .then((res: Release) => { - ReleaseStore.loadReleaseSuccess(projectSlug, releaseVersion, res); - }) - .catch(err => { - // This happens when a Project is not linked to a specific Release - if (err.status === 404) { - ReleaseStore.loadReleaseSuccess(projectSlug, releaseVersion, null); - return; - } - - ReleaseStore.loadReleaseError(projectSlug, releaseVersion, err); - Sentry.withScope(scope => { - scope.setLevel('warning'); - scope.setFingerprint(['getRelease-action-creator']); - Sentry.captureException(err); - }); - }); -} - -export function getReleaseDeploys(api: Client, params: ParamsGet) { - const {orgSlug, projectSlug, releaseVersion} = params; - const path = `/organizations/${orgSlug}/releases/${encodeURIComponent( - releaseVersion - )}/deploys/`; - - // HACK(leedongwei): Same as above - ReleaseStore.state.deploysLoading[getReleaseStoreKey(projectSlug, releaseVersion)] = - true; - ReleaseStore.loadDeploys(orgSlug, projectSlug, releaseVersion); - - return api - .requestPromise(path, { - method: 'GET', - }) - .then((res: Deploy[]) => { - ReleaseStore.loadDeploysSuccess(projectSlug, releaseVersion, res); - }) - .catch(err => { - // This happens when a Project is not linked to a specific Release - if (err.status === 404) { - ReleaseStore.loadDeploysSuccess(projectSlug, releaseVersion, null); - return; - } - - ReleaseStore.loadDeploysError(projectSlug, releaseVersion, err); - Sentry.withScope(scope => { - scope.setLevel('warning'); - scope.setFingerprint(['getReleaseDeploys-action-creator']); - Sentry.captureException(err); - }); - }); -} - export function archiveRelease(api: Client, params: ParamsGet) { - const {orgSlug, projectSlug, releaseVersion} = params; + const {orgSlug, releaseVersion} = params; - ReleaseStore.loadRelease(orgSlug, projectSlug, releaseVersion); addLoadingMessage(t('Archiving Release\u2026')); return api @@ -102,12 +27,10 @@ export function archiveRelease(api: Client, params: ParamsGet) { version: releaseVersion, }, }) - .then((release: Release) => { - ReleaseStore.loadReleaseSuccess(projectSlug, releaseVersion, release); + .then(() => { addSuccessMessage(t('Release was successfully archived.')); }) .catch(error => { - ReleaseStore.loadReleaseError(projectSlug, releaseVersion, error); addErrorMessage( error.responseJSON?.detail ?? t('Release could not be be archived.') ); @@ -116,9 +39,8 @@ export function archiveRelease(api: Client, params: ParamsGet) { } export function restoreRelease(api: Client, params: ParamsGet) { - const {orgSlug, projectSlug, releaseVersion} = params; + const {orgSlug, releaseVersion} = params; - ReleaseStore.loadRelease(orgSlug, projectSlug, releaseVersion); addLoadingMessage(t('Restoring Release\u2026')); return api @@ -130,12 +52,10 @@ export function restoreRelease(api: Client, params: ParamsGet) { version: releaseVersion, }, }) - .then((release: Release) => { - ReleaseStore.loadReleaseSuccess(projectSlug, releaseVersion, release); + .then(() => { addSuccessMessage(t('Release was successfully restored.')); }) .catch(error => { - ReleaseStore.loadReleaseError(projectSlug, releaseVersion, error); addErrorMessage( error.responseJSON?.detail ?? t('Release could not be be restored.') ); diff --git a/static/app/bootstrap/processInitQueue.tsx b/static/app/bootstrap/processInitQueue.tsx index dd793d47fd4c6a..f85212d1803d12 100644 --- a/static/app/bootstrap/processInitQueue.tsx +++ b/static/app/bootstrap/processInitQueue.tsx @@ -1,3 +1,6 @@ +import {createRoot} from 'react-dom/client'; +import throttle from 'lodash/throttle'; + import {exportedGlobals} from 'sentry/bootstrap/exportGlobals'; import type {OnSentryInitConfiguration} from 'sentry/types/system'; import {SentryInitRenderReactComponent} from 'sentry/types/system'; @@ -32,15 +35,23 @@ async function processItem(initConfig: OnSentryInitConfiguration) { if (!input || !element) { return; } + const inputElem = document.querySelector(input); + const rootEl = document.querySelector(element); + if (!inputElem || !rootEl) { + return; + } - const passwordStrength = await import( + const {PasswordStrength} = await import( /* webpackChunkName: "PasswordStrength" */ 'sentry/components/passwordStrength' ); - passwordStrength.attachTo({ - input: document.querySelector(input), - element: document.querySelector(element), - }); + const root = createRoot(rootEl); + inputElem.addEventListener( + 'input', + throttle(e => { + root.render(); + }) + ); return; } diff --git a/static/app/components/activity/note/inputWithStorage.tsx b/static/app/components/activity/note/inputWithStorage.tsx index b828e14b03ec8e..cef5b960a8eb1c 100644 --- a/static/app/components/activity/note/inputWithStorage.tsx +++ b/static/app/components/activity/note/inputWithStorage.tsx @@ -6,7 +6,7 @@ import {NoteInput} from 'sentry/components/activity/note/input'; import type {MentionChangeEvent} from 'sentry/components/activity/note/types'; import type {NoteType} from 'sentry/types/alerts'; import localStorage from 'sentry/utils/localStorage'; -import {StreamlinedNoteInput} from 'sentry/views/issueDetails/streamline/note'; +import {StreamlinedNoteInput} from 'sentry/views/issueDetails/streamline/sidebar/note'; import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils'; type InputProps = React.ComponentProps; @@ -14,6 +14,7 @@ type InputProps = React.ComponentProps; type Props = { itemKey: string; storageKey: string; + onCancel?: () => void; onLoad?: (data: string) => string; onSave?: (data: string) => string; source?: string; @@ -145,6 +146,7 @@ function NoteInputWithStorage({ placeholder={props.placeholder} noteId={props.noteId} onUpdate={props.onUpdate} + onCancel={props.onCancel} /> ); } diff --git a/static/app/components/ai/SeerIcon.tsx b/static/app/components/ai/SeerIcon.tsx new file mode 100644 index 00000000000000..13f272ec4355cd --- /dev/null +++ b/static/app/components/ai/SeerIcon.tsx @@ -0,0 +1,35 @@ +import {forwardRef} from 'react'; +import styled from '@emotion/styled'; + +import type {SVGIconProps} from 'sentry/icons/svgIcon'; +import {SvgIcon} from 'sentry/icons/svgIcon'; + +const SeerIcon = forwardRef((props, ref) => { + return ( + + + + + + ); +}); + +SeerIcon.displayName = 'SeerIcon'; + +export {SeerIcon}; + +const StyledPath = styled('path')` + fill: none; + stroke: currentColor; + stroke-linecap: round; + stroke-linejoin: round; + stroke-width: 2.25px; +`; + +const StyledLine = styled('line')` + fill: none; + stroke: currentColor; + stroke-linecap: round; + stroke-linejoin: round; + stroke-width: 2.25px; +`; diff --git a/static/app/components/assigneeSelectorDropdown.tsx b/static/app/components/assigneeSelectorDropdown.tsx index 476271fb237d55..62ccdd19f9a339 100644 --- a/static/app/components/assigneeSelectorDropdown.tsx +++ b/static/app/components/assigneeSelectorDropdown.tsx @@ -68,6 +68,10 @@ export interface AssigneeSelectorDropdownProps { * If true, there will be a loading indicator in the menu header. */ loading: boolean; + /** + * Additional items to render in the menu footer + */ + additionalMenuFooterItems?: React.ReactNode; /** * Additional styles to apply to the dropdown */ @@ -213,6 +217,7 @@ export default function AssigneeSelectorDropdown({ owners, sizeLimit = 150, trigger, + additionalMenuFooterItems, }: AssigneeSelectorDropdownProps) { const memberLists = useLegacyStore(MemberListStore); const sessionUser = useUser(); @@ -536,18 +541,21 @@ export default function AssigneeSelectorDropdown({ }; const footerInviteButton = ( - + + + {additionalMenuFooterItems} + ); return ( @@ -611,3 +619,9 @@ const TooltipSubExternalLink = styled(ExternalLink)` const TooltipSubtext = styled('div')` color: ${p => p.theme.subText}; `; + +const FooterWrapper = styled('div')` + display: flex; + gap: ${space(1)}; + align-items: center; +`; diff --git a/static/app/components/assistant/getGuidesContent.tsx b/static/app/components/assistant/getGuidesContent.tsx index 63d10fb873058c..a49fadbc6f25d0 100644 --- a/static/app/components/assistant/getGuidesContent.tsx +++ b/static/app/components/assistant/getGuidesContent.tsx @@ -359,7 +359,7 @@ function getDemoModeGuides(): GuidesContent { requiredTargets: ['release_version'], steps: [ { - title: t('Release-specfic trends'), + title: t('Release-specific trends'), target: 'release_version', description: t( `Select the latest release to review new and regressed issues, and business critical metrics like crash rate, and user adoption.` diff --git a/static/app/components/avatar/avatarList.tsx b/static/app/components/avatar/avatarList.tsx index e397f51a9927c6..df9932016668b3 100644 --- a/static/app/components/avatar/avatarList.tsx +++ b/static/app/components/avatar/avatarList.tsx @@ -1,11 +1,12 @@ import {forwardRef} from 'react'; -import {css} from '@emotion/react'; +import {css, type Theme} from '@emotion/react'; import styled from '@emotion/styled'; import TeamAvatar from 'sentry/components/avatar/teamAvatar'; import UserAvatar from 'sentry/components/avatar/userAvatar'; import {Tooltip} from 'sentry/components/tooltip'; import {space} from 'sentry/styles/space'; +import type {Actor} from 'sentry/types/core'; import type {Team} from 'sentry/types/organization'; import type {AvatarUser} from 'sentry/types/user'; import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils'; @@ -17,10 +18,11 @@ type Props = { className?: string; maxVisibleAvatars?: number; renderTooltip?: UserAvatarProps['renderTooltip']; + renderUsersFirst?: boolean; teams?: Team[]; tooltipOptions?: UserAvatarProps['tooltipOptions']; typeAvatars?: string; - users?: AvatarUser[]; + users?: Array; }; const CollapsedAvatars = forwardRef(function CollapsedAvatars( @@ -51,6 +53,7 @@ function AvatarList({ className, users = [], teams = [], + renderUsersFirst = false, renderTooltip, }: Props) { const numTeams = teams.length; @@ -87,25 +90,48 @@ function AvatarList({ )} - {visibleUserAvatars.map(user => ( - - ))} - {visibleTeamAvatars.map(team => ( - - ))} + + {renderUsersFirst + ? visibleTeamAvatars.map(team => ( + + )) + : visibleUserAvatars.map(user => ( + + ))} + + {!renderUsersFirst + ? visibleTeamAvatars.map(team => ( + + )) + : visibleUserAvatars.map(user => ( + + ))} ); } @@ -119,7 +145,7 @@ export const AvatarListWrapper = styled('div')` flex-direction: row-reverse; `; -const AvatarStyle = p => css` +const AvatarStyle = (p: {theme: Theme}) => css` border: 2px solid ${p.theme.background}; margin-left: -8px; cursor: default; diff --git a/static/app/components/avatar/baseAvatar.tsx b/static/app/components/avatar/baseAvatar.tsx index 0b73327b1a4fd0..22d015700cb4b0 100644 --- a/static/app/components/avatar/baseAvatar.tsx +++ b/static/app/components/avatar/baseAvatar.tsx @@ -130,6 +130,7 @@ function BaseAvatar({ suggested={!!suggested} style={{...sizeStyle, ...style}} title={title} + hasTooltip={hasTooltip} {...props} > {hasError ? backup : imageAvatar} @@ -150,6 +151,7 @@ export {BaseAvatar, type BaseAvatarProps}; // Note: Avatar will not always be a child of a flex layout, but this seems like a // sensible default. const StyledBaseAvatar = styled('span')<{ + hasTooltip: boolean; round: boolean; suggested: boolean; }>` @@ -157,6 +159,9 @@ const StyledBaseAvatar = styled('span')<{ border-radius: ${p => (p.round ? '50%' : '3px')}; border: ${p => (p.suggested ? `1px dashed ${p.theme.subText}` : 'none')}; background-color: ${p => (p.suggested ? p.theme.background : 'none')}; + :hover { + pointer-events: ${p => (p.hasTooltip ? 'none' : 'auto')}; + } `; const ImageAvatar = styled('img')` diff --git a/static/app/components/badge/deployBadge.spec.tsx b/static/app/components/badge/deployBadge.spec.tsx index 59eb59631ed1ac..74178731e69f5e 100644 --- a/static/app/components/badge/deployBadge.spec.tsx +++ b/static/app/components/badge/deployBadge.spec.tsx @@ -1,19 +1,12 @@ +import {DeployFixture} from 'sentry-fixture/deploy'; + import {render, screen} from 'sentry-test/reactTestingLibrary'; import DeployBadge from 'sentry/components/badge/deployBadge'; -import type {Deploy} from 'sentry/types/release'; - -const deploy: Deploy = { - name: '85fedddce5a61a58b160fa6b3d6a1a8451e94eb9 to prod', - url: '', - environment: 'production', - dateStarted: '2020-05-11T18:12:00.025928Z', - dateFinished: '2020-05-11T18:12:00.025928Z', - version: '4.2.0', - id: '6348842', -}; describe('DeployBadge', () => { + const deploy = DeployFixture(); + it('renders with link', () => { const projectId = 1; render( diff --git a/static/app/components/charts/eventsRequest.spec.tsx b/static/app/components/charts/eventsRequest.spec.tsx index 9449e67436ac81..b21207bdc9455c 100644 --- a/static/app/components/charts/eventsRequest.spec.tsx +++ b/static/app/components/charts/eventsRequest.spec.tsx @@ -72,6 +72,21 @@ describe('EventsRequest', function () { expect(doEventsRequest).toHaveBeenCalled(); }); + it('sets use RPC param', async function () { + render( + + {mock} + + ); + await waitFor(() => expect(doEventsRequest).toHaveBeenCalledTimes(1)); + expect(doEventsRequest).toHaveBeenCalledWith( + expect.anything(), + expect.objectContaining({ + useRpc: true, + }) + ); + }); + it('makes a new request if projects prop changes', async function () { const {rerender} = render({mock}); (doEventsRequest as jest.Mock).mockClear(); diff --git a/static/app/components/charts/eventsRequest.tsx b/static/app/components/charts/eventsRequest.tsx index 581123181cad71..2462b5251c5793 100644 --- a/static/app/components/charts/eventsRequest.tsx +++ b/static/app/components/charts/eventsRequest.tsx @@ -216,6 +216,10 @@ type EventsRequestPartialProps = { * This is a temporary flag to allow us to test on demand metrics */ useOnDemandMetrics?: boolean; + /** + * Whether or not to use RPCs instead of SnQL requests in the backend. + */ + useRpc?: boolean; /** * Whether or not to zerofill results */ diff --git a/static/app/components/devtoolbar/components/featureFlags/customOverride.tsx b/static/app/components/devtoolbar/components/featureFlags/customOverride.tsx index 6b60507f2efa2e..8b06cf74e6d3dd 100644 --- a/static/app/components/devtoolbar/components/featureFlags/customOverride.tsx +++ b/static/app/components/devtoolbar/components/featureFlags/customOverride.tsx @@ -1,6 +1,7 @@ import {useContext, useState} from 'react'; +import {css} from '@emotion/react'; -import {Button} from 'sentry/components/button'; +import {resetButtonCss} from 'sentry/components/devtoolbar/styles/reset'; import Input from 'sentry/components/input'; import Switch from 'sentry/components/switchButton'; import {IconAdd} from 'sentry/icons'; @@ -59,9 +60,18 @@ export default function CustomOverride({ }} css={{background: 'white'}} /> - + ); } diff --git a/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx b/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx index 234a0d20432b3c..205425bcfb35ab 100644 --- a/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx +++ b/static/app/components/devtoolbar/components/featureFlags/featureFlagsPanel.tsx @@ -1,6 +1,6 @@ import {type Dispatch, Fragment, type SetStateAction, useState} from 'react'; +import {css} from '@emotion/react'; -import {Button} from 'sentry/components/button'; import {resetButtonCss, resetFlexRowCss} from 'sentry/components/devtoolbar/styles/reset'; import Input from 'sentry/components/input'; import {PanelTable} from 'sentry/components/panels/panelTable'; @@ -203,9 +203,13 @@ function FlagTable({prefilter, searchTerm}: {prefilter: Prefilter; searchTerm: s {display: 'block', textAlign: 'center'}, ]} > - + )} diff --git a/static/app/components/events/aiSuggestedSolution/banner.tsx b/static/app/components/events/aiSuggestedSolution/banner.tsx deleted file mode 100644 index c1b2818e29ea46..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/banner.tsx +++ /dev/null @@ -1,157 +0,0 @@ -import styled from '@emotion/styled'; - -import bannerBackground from 'sentry-images/spot/ai-suggestion-banner-background.svg'; -import bannerSentaur from 'sentry-images/spot/ai-suggestion-banner-sentaur.svg'; -import bannerStars from 'sentry-images/spot/ai-suggestion-banner-stars.svg'; - -import {Button} from 'sentry/components/button'; -import ExternalLink from 'sentry/components/links/externalLink'; -import Panel from 'sentry/components/panels/panel'; -import PanelBody from 'sentry/components/panels/panelBody'; -import QuestionTooltip from 'sentry/components/questionTooltip'; -import {t, tct} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; -import TextBlock from 'sentry/views/settings/components/text/textBlock'; - -import {ExperimentalFeatureBadge} from './experimentalFeatureBadge'; - -type Props = { - onViewSuggestion: () => void; -}; - -export function Banner({onViewSuggestion}: Props) { - return ( - - -
- - {t('AI Solutions')} - <MoreInfoTooltip - isHoverable - size="sm" - title={tct( - 'This is an OpenAI generated solution that suggests a fix for this issue. Be aware that this may not be accurate. [learnMore:Learn more]', - { - learnMore: ( - <ExternalLink href="https://docs.sentry.io/product/issues/issue-details/ai-suggested-solution/" /> - ), - } - )} - /> - <ExperimentalFeatureBadge /> - - - {t('You might get lucky, but again, maybe not\u2026')} - -
- - - - - - {t('View Suggestion')} - - - -
- ); -} - -const Wrapper = styled(Panel)` - margin-bottom: 0; - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - height: 80px; - } -`; - -const Body = styled(PanelBody)` - display: flex; - align-items: center; - flex-wrap: wrap; - gap: ${space(1)}; - - > *:first-child { - flex: 1; - } - - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - display: grid; - grid-template-columns: 42% 1fr; - } -`; - -const Title = styled('div')` - font-size: ${p => p.theme.fontSizeSmall}; - text-transform: uppercase; - color: ${p => p.theme.gray300}; - display: flex; - align-items: center; - /* to be consistent with the feature badge size */ - height: ${space(2)}; - line-height: ${space(2)}; - white-space: nowrap; -`; - -const Description = styled(TextBlock)` - margin: ${space(1)} 0 0 0; -`; - -const Action = styled('div')` - display: flex; - justify-content: flex-end; - align-items: center; -`; - -const Sentaur = styled('img')` - display: none; - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - display: block; - height: 8.563rem; - position: absolute; - bottom: 0; - right: 6.608rem; - object-fit: cover; - z-index: 1; - pointer-events: none; - } -`; - -const Background = styled('img')` - display: none; - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - display: block; - position: absolute; - top: 0; - right: 0; - object-fit: cover; - max-width: 100%; - height: 100%; - border-radius: ${p => p.theme.panelBorderRadius}; - } -`; - -const Stars = styled('img')` - display: none; - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - display: block; - height: 8.563rem; - position: absolute; - right: -1rem; - bottom: -0.125rem; - object-fit: cover; - /* workaround to remove a extra svg on the bottom right */ - border-radius: ${p => p.theme.panelBorderRadius}; - } -`; - -const ViewSuggestionButton = styled(Button)` - @media (min-width: ${p => p.theme.breakpoints.xlarge}) { - position: absolute; - right: 1rem; - top: 1.5rem; - } -`; - -const MoreInfoTooltip = styled(QuestionTooltip)` - margin-left: ${space(0.5)}; -`; diff --git a/static/app/components/events/aiSuggestedSolution/experimentalFeatureBadge.tsx b/static/app/components/events/aiSuggestedSolution/experimentalFeatureBadge.tsx deleted file mode 100644 index 02833aeef3d376..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/experimentalFeatureBadge.tsx +++ /dev/null @@ -1,12 +0,0 @@ -import styled from '@emotion/styled'; - -import FeatureBadge from 'sentry/components/badge/featureBadge'; -import {space} from 'sentry/styles/space'; - -export function ExperimentalFeatureBadge() { - return ; -} - -const CenteredFeatureBadge = styled(FeatureBadge)` - height: ${space(2)}; -`; diff --git a/static/app/components/events/aiSuggestedSolution/index.tsx b/static/app/components/events/aiSuggestedSolution/index.tsx deleted file mode 100644 index a50c084eae7794..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/index.tsx +++ /dev/null @@ -1,53 +0,0 @@ -import {useState} from 'react'; - -import type {Event} from 'sentry/types/event'; -import type {Project} from 'sentry/types/project'; -import {trackAnalytics} from 'sentry/utils/analytics'; -import {getAnalyticsDataForEvent} from 'sentry/utils/events'; -import useOrganization from 'sentry/utils/useOrganization'; - -import {Banner} from './banner'; -import {Suggestion} from './suggestion'; - -type Props = { - event: Event; - projectSlug: Project['slug']; -}; - -export function AiSuggestedSolution({projectSlug, event}: Props) { - const organization = useOrganization(); - - const [openSuggestion, setOpenSuggestion] = useState(false); - - return ( -
- {!openSuggestion ? ( - { - trackAnalytics('ai_suggested_solution.view_suggestion_button_clicked', { - organization, - project_id: event.projectID, - group_id: event.groupID, - ...getAnalyticsDataForEvent(event), - }); - setOpenSuggestion(true); - }} - /> - ) : ( - { - trackAnalytics('ai_suggested_solution.hide_suggestion_button_clicked', { - organization, - project_id: event.projectID, - group_id: event.groupID, - ...getAnalyticsDataForEvent(event), - }); - setOpenSuggestion(false); - }} - /> - )} -
- ); -} diff --git a/static/app/components/events/aiSuggestedSolution/suggestion.tsx b/static/app/components/events/aiSuggestedSolution/suggestion.tsx deleted file mode 100644 index 66d8bf583cdd07..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/suggestion.tsx +++ /dev/null @@ -1,320 +0,0 @@ -import {useCallback, useState} from 'react'; -import styled from '@emotion/styled'; - -import {addSuccessMessage} from 'sentry/actionCreators/indicator'; -import {Button, LinkButton} from 'sentry/components/button'; -import ButtonBar from 'sentry/components/buttonBar'; -import EmptyMessage from 'sentry/components/emptyMessage'; -import LoadingError from 'sentry/components/loadingError'; -import Panel from 'sentry/components/panels/panel'; -import PanelBody from 'sentry/components/panels/panelBody'; -import PanelFooter from 'sentry/components/panels/panelFooter'; -import PanelHeader from 'sentry/components/panels/panelHeader'; -import {IconFile, IconFlag, IconHappy, IconMeh, IconSad} from 'sentry/icons'; -import {t} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; -import type {Event} from 'sentry/types/event'; -import type {Project} from 'sentry/types/project'; -import {trackAnalytics} from 'sentry/utils/analytics'; -import {getAnalyticsDataForEvent} from 'sentry/utils/events'; -import {isActiveSuperuser} from 'sentry/utils/isActiveSuperuser'; -import {limitedMarked} from 'sentry/utils/marked'; -import {useApiQuery} from 'sentry/utils/queryClient'; -import {useIsSentryEmployee} from 'sentry/utils/useIsSentryEmployee'; -import useOrganization from 'sentry/utils/useOrganization'; - -import {ExperimentalFeatureBadge} from './experimentalFeatureBadge'; -import {SuggestionLoaderMessage} from './suggestionLoaderMessage'; -import {useOpenAISuggestionLocalStorage} from './useOpenAISuggestionLocalStorage'; - -type Props = { - event: Event; - onHideSuggestion: () => void; - projectSlug: Project['slug']; -}; - -function ErrorDescription({ - restriction, - organizationSlug, - onRefetch, - onSetIndividualConsent, - onHideSuggestion, -}: { - onHideSuggestion: () => void; - onRefetch: () => void; - onSetIndividualConsent: (consent: boolean) => void; - organizationSlug: string; - restriction?: 'subprocessor' | 'individual_consent'; -}) { - if (restriction === 'subprocessor') { - return ( - } - title={t('OpenAI Subprocessor Acknowledgment')} - description={t( - 'In order to use this feature, your organization needs to accept the OpenAI Subprocessor Acknowledgment.' - )} - action={ - - - - {t('Accept in Settings')} - - - } - /> - ); - } - - if (restriction === 'individual_consent') { - const activeSuperUser = isActiveSuperuser(); - return ( - } - title={t('We need your consent')} - description={t( - 'By using this feature, you agree that OpenAI is a subprocessor and may process the data that you’ve chosen to submit. Sentry makes no guarantees as to the accuracy of the feature’s AI-generated recommendations.' - )} - action={ - - - - - } - /> - ); - } - - return ; -} - -export function Suggestion({onHideSuggestion, projectSlug, event}: Props) { - const organization = useOrganization(); - const [suggestedSolutionLocalConfig, setSuggestedSolutionLocalConfig] = - useOpenAISuggestionLocalStorage(); - const [piiCertified, setPiiCertified] = useState(false); - const [feedbackProvided, setFeedbackProvided] = useState(false); - const isSentryEmployee = useIsSentryEmployee(); - - const { - data, - isPending: dataIsLoading, - isError: dataIsError, - refetch: dataRefetch, - error, - } = useApiQuery<{suggestion: string}>( - [ - `/projects/${organization.slug}/${projectSlug}/events/${event.eventID}/ai-fix-suggest/`, - { - query: { - consent: suggestedSolutionLocalConfig.individualConsent ? 'yes' : undefined, - pii_certified: isSentryEmployee ? (piiCertified ? 'yes' : 'no') : undefined, - }, - }, - ], - { - enabled: isSentryEmployee ? (piiCertified ? true : false) : true, - staleTime: Infinity, - retry: false, - } - ); - - const handleFeedbackClick = useCallback(() => { - addSuccessMessage('Thank you for your feedback!'); - setFeedbackProvided(true); - }, []); - - if (isSentryEmployee && !piiCertified) { - return ( - } - title={t('PII Certification Required')} - description={t( - 'Before using this feature, please confirm that there is no personally identifiable information in this event.' - )} - action={ - - - - } - /> - ); - } - - return ( - -
- - {t('AI Solution')} - <ExperimentalFeatureBadge /> - - -
- - {dataIsLoading ? ( - -
- - - ) : dataIsError ? ( - - setSuggestedSolutionLocalConfig({individualConsent: true}) - } - restriction={error?.responseJSON?.restriction as any} - onHideSuggestion={onHideSuggestion} - /> - ) : ( - - )} - - {!dataIsLoading && !dataIsError && !feedbackProvided && ( - - - {t('Was this helpful?')} - - - - - - - - )} - - ); -} - -const Header = styled(PanelHeader)` - background: transparent; - padding: ${space(1)} ${space(2)}; - align-items: center; - color: ${p => p.theme.gray300}; -`; - -const Feedback = styled('div')` - padding: ${space(1)} ${space(2)}; - display: grid; - grid-template-columns: 1fr; - align-items: center; - text-align: left; - gap: ${space(1)}; - font-size: ${p => p.theme.fontSizeSmall}; - @media (min-width: ${p => p.theme.breakpoints.small}) { - grid-template-columns: 1fr max-content; - text-align: right; - gap: ${space(2)}; - } -`; - -const SuggestionLoadingError = styled(LoadingError)` - margin-bottom: 0; - border: none; - /* This is just to be consitent with other */ - /* padding-top and padding-bottom we are using in the empty state component */ - padding-top: ${space(4)}; - padding-bottom: ${space(4)}; -`; - -const LoaderWrapper = styled('div')` - padding: ${space(4)} 0; - text-align: center; - gap: ${space(2)}; - display: flex; - flex-direction: column; -`; - -const Content = styled('div')` - padding: ${space(2)}; - /* hack until we update backend to send us other heading */ - h4 { - font-size: ${p => p.theme.fontSizeExtraLarge}; - margin-bottom: ${space(1)}; - } -`; - -const Title = styled('div')` - /* to be consistent with the feature badge size */ - height: ${space(2)}; - line-height: ${space(2)}; - display: flex; - align-items: center; -`; diff --git a/static/app/components/events/aiSuggestedSolution/suggestionLoaderMessage.tsx b/static/app/components/events/aiSuggestedSolution/suggestionLoaderMessage.tsx deleted file mode 100644 index aa63118f819bb7..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/suggestionLoaderMessage.tsx +++ /dev/null @@ -1,55 +0,0 @@ -import {useEffect, useState} from 'react'; -import styled from '@emotion/styled'; -import shuffle from 'lodash/shuffle'; - -import {t} from 'sentry/locale'; - -const LOADING_MESSAGES = [ - t('Heating up them GPUs'), - t('Engineering a prompt'), - t('Demonstrating value'), - t('Moving the needle'), - t('Preventing prompt injection attacks'), - t('Remove traces of depression from answers'), - t('Reticulating splines or whatever'), - t('Loading marketing material'), - t('Wiping node_modules'), - t('Installing dependencies'), - t('Searching StackOverflow'), - t('Googling for solutions'), - t('Running spell checker'), - t('Searching for the perfect emoji'), - t('Adding trace amounts of human touch'), - t("Don't be like Sydney, don't be like Sydney"), - t('Initiating quantum leap'), - t('Charging flux capacitors'), - t('Summoning a demon'), -]; - -export function SuggestionLoaderMessage() { - const [messages] = useState(() => shuffle(LOADING_MESSAGES)); - const [messageIndex, setMessageIndex] = useState(0); - - useEffect(() => { - const id = setInterval( - () => { - if (messageIndex < messages.length - 1) { - setMessageIndex(messageIndex + 1); - } - }, - Math.random() * 700 + 800 - ); - return () => clearInterval(id); - }); - - return ( -
- {`${messages[messageIndex]}\u2026`} -
- ); -} - -const Message = styled('div')` - color: ${p => p.theme.gray300}; - font-size: ${p => p.theme.fontSizeLarge}; -`; diff --git a/static/app/components/events/aiSuggestedSolution/useOpenAISuggestionLocalStorage.tsx b/static/app/components/events/aiSuggestedSolution/useOpenAISuggestionLocalStorage.tsx deleted file mode 100644 index 96b7bc66486db7..00000000000000 --- a/static/app/components/events/aiSuggestedSolution/useOpenAISuggestionLocalStorage.tsx +++ /dev/null @@ -1,32 +0,0 @@ -import {useCallback} from 'react'; - -import {useLocalStorageState} from 'sentry/utils/useLocalStorageState'; -import {useUser} from 'sentry/utils/useUser'; - -type LocalState = { - individualConsent: boolean; -}; - -export function useOpenAISuggestionLocalStorage(): [ - LocalState, - (newState: Partial) => void, -] { - const user = useUser(); - - const [localStorageState, setLocalStorageState] = useLocalStorageState( - `open-ai-suggestion:${user.id}`, - { - // agree forward data to OpenAI - individualConsent: false, - } - ); - - const setSuggestedSolutionLocalConfig = useCallback( - (newState: Partial) => { - setLocalStorageState({...localStorageState, ...newState}); - }, - [localStorageState, setLocalStorageState] - ); - - return [localStorageState, setSuggestedSolutionLocalConfig]; -} diff --git a/static/app/components/events/autofix/autofixDiff.tsx b/static/app/components/events/autofix/autofixDiff.tsx index 26e7c6ce442579..c076e645adeed3 100644 --- a/static/app/components/events/autofix/autofixDiff.tsx +++ b/static/app/components/events/autofix/autofixDiff.tsx @@ -9,12 +9,13 @@ import { DiffLineType, type FilePatch, } from 'sentry/components/events/autofix/types'; +import {makeAutofixQueryKey} from 'sentry/components/events/autofix/useAutofix'; import TextArea from 'sentry/components/forms/controls/textarea'; import InteractionStateLayer from 'sentry/components/interactionStateLayer'; import {IconChevron, IconClose, IconDelete, IconEdit} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import {useMutation} from 'sentry/utils/queryClient'; +import {useMutation, useQueryClient} from 'sentry/utils/queryClient'; import useApi from 'sentry/utils/useApi'; type AutofixDiffProps = { @@ -101,7 +102,7 @@ function HunkHeader({lines, sectionHeader}: {lines: DiffLine[]; sectionHeader: s function useUpdateHunk({groupId, runId}: {groupId: string; runId: string}) { const api = useApi({persistInFlight: true}); - + const queryClient = useQueryClient(); return useMutation({ mutationFn: (params: { fileName: string; @@ -123,6 +124,9 @@ function useUpdateHunk({groupId, runId}: {groupId: string; runId: string}) { }, }); }, + onSuccess: _ => { + queryClient.invalidateQueries({queryKey: makeAutofixQueryKey(groupId)}); + }, onError: () => { addErrorMessage(t('Something went wrong when updating changes.')); }, diff --git a/static/app/components/events/autofix/autofixInsightCards.spec.tsx b/static/app/components/events/autofix/autofixInsightCards.spec.tsx index 7a464c32667dd6..864c4f89462e14 100644 --- a/static/app/components/events/autofix/autofixInsightCards.spec.tsx +++ b/static/app/components/events/autofix/autofixInsightCards.spec.tsx @@ -164,17 +164,29 @@ describe('AutofixInsightCards', () => { renderComponent(); const rethinkButton = screen.getByRole('button', {name: 'Rethink from here'}); await userEvent.click(rethinkButton); - expect(screen.getByPlaceholderText('Say something...')).toBeInTheDocument(); + expect( + screen.getByPlaceholderText( + 'You should know X... Dive deeper into Y... Look at Z...' + ) + ).toBeInTheDocument(); }); it('hides rethink input overlay when clicked outside', async () => { renderComponent(); const rethinkButton = screen.getByRole('button', {name: 'Rethink from here'}); await userEvent.click(rethinkButton); - expect(screen.getByPlaceholderText('Say something...')).toBeInTheDocument(); + expect( + screen.getByPlaceholderText( + 'You should know X... Dive deeper into Y... Look at Z...' + ) + ).toBeInTheDocument(); await userEvent.click(document.body); - expect(screen.queryByPlaceholderText('Say something...')).not.toBeInTheDocument(); + expect( + screen.queryByPlaceholderText( + 'You should know X... Dive deeper into Y... Look at Z...' + ) + ).not.toBeInTheDocument(); }); it('submits rethink request when form is submitted', async () => { @@ -187,7 +199,9 @@ describe('AutofixInsightCards', () => { const rethinkButton = screen.getByRole('button', {name: 'Rethink from here'}); await userEvent.click(rethinkButton); - const input = screen.getByPlaceholderText('Say something...'); + const input = screen.getByPlaceholderText( + 'You should know X... Dive deeper into Y... Look at Z...' + ); await userEvent.type(input, 'Rethink this part'); const submitButton = screen.getByLabelText( @@ -222,7 +236,9 @@ describe('AutofixInsightCards', () => { const rethinkButton = screen.getByRole('button', {name: 'Rethink from here'}); await userEvent.click(rethinkButton); - const input = screen.getByPlaceholderText('Say something...'); + const input = screen.getByPlaceholderText( + 'You should know X... Dive deeper into Y... Look at Z...' + ); await userEvent.type(input, 'Rethink this part'); const submitButton = screen.getByLabelText( @@ -246,7 +262,9 @@ describe('AutofixInsightCards', () => { const rethinkButton = screen.getByRole('button', {name: 'Rethink from here'}); await userEvent.click(rethinkButton); - const input = screen.getByPlaceholderText('Say something...'); + const input = screen.getByPlaceholderText( + 'You should know X... Dive deeper into Y... Look at Z...' + ); await userEvent.type(input, 'Rethink this part'); const submitButton = screen.getByLabelText( diff --git a/static/app/components/events/autofix/autofixInsightCards.tsx b/static/app/components/events/autofix/autofixInsightCards.tsx index 63858a9b96c318..52a896e384f74a 100644 --- a/static/app/components/events/autofix/autofixInsightCards.tsx +++ b/static/app/components/events/autofix/autofixInsightCards.tsx @@ -1,4 +1,6 @@ -import {useEffect, useRef, useState} from 'react'; +import {useCallback, useEffect, useState} from 'react'; +import {createPortal} from 'react-dom'; +import {usePopper} from 'react-popper'; import styled from '@emotion/styled'; import {AnimatePresence, type AnimationProps, motion} from 'framer-motion'; @@ -13,6 +15,7 @@ import type { AutofixRepository, BreadcrumbContext, } from 'sentry/components/events/autofix/types'; +import {makeAutofixQueryKey} from 'sentry/components/events/autofix/useAutofix'; import BreadcrumbItemContent from 'sentry/components/events/breadcrumbs/breadcrumbItemContent'; import { BreadcrumbIcon, @@ -27,7 +30,6 @@ import { IconArrow, IconChevron, IconCode, - IconEdit, IconFire, IconRefresh, IconSpan, @@ -37,7 +39,7 @@ import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import {BreadcrumbLevelType, BreadcrumbType} from 'sentry/types/breadcrumbs'; import {singleLineRenderer} from 'sentry/utils/marked'; -import {useMutation} from 'sentry/utils/queryClient'; +import {useMutation, useQueryClient} from 'sentry/utils/queryClient'; import testableTransition from 'sentry/utils/testableTransition'; import useApi from 'sentry/utils/useApi'; @@ -84,13 +86,15 @@ export function ExpandableInsightContext({ title, icon, rounded, + expandByDefault = false, }: { children: React.ReactNode; title: string; + expandByDefault?: boolean; icon?: React.ReactNode; rounded?: boolean; }) { - const [expanded, setExpanded] = useState(false); + const [expanded, setExpanded] = useState(expandByDefault); const toggleExpand = () => { setExpanded(oldState => !oldState); @@ -134,6 +138,8 @@ interface AutofixInsightCardProps { repos: AutofixRepository[]; runId: string; stepIndex: number; + isLastInsightInStep?: boolean; + shouldHighlightRethink?: boolean; } function AutofixInsightCard({ @@ -145,6 +151,8 @@ function AutofixInsightCard({ stepIndex, groupId, runId, + shouldHighlightRethink, + isLastInsightInStep, }: AutofixInsightCardProps) { const isUserMessage = insight.justification === 'USER'; @@ -164,6 +172,7 @@ function AutofixInsightCard({ stepIndex={stepIndex} groupId={groupId} runId={runId} + isHighlighted={shouldHighlightRethink} /> )} {!isUserMessage && ( @@ -287,6 +296,8 @@ function AutofixInsightCard({ stepIndex={stepIndex} groupId={groupId} runId={runId} + isHighlighted={shouldHighlightRethink} + isLastCard={isLastInsightInStep} /> )} @@ -303,6 +314,7 @@ interface AutofixInsightCardsProps { repos: AutofixRepository[]; runId: string; stepIndex: number; + shouldHighlightRethink?: boolean; } function AutofixInsightCards({ @@ -313,6 +325,7 @@ function AutofixInsightCards({ stepIndex, groupId, runId, + shouldHighlightRethink, }: AutofixInsightCardsProps) { return ( @@ -329,6 +342,8 @@ function AutofixInsightCards({ stepIndex={stepIndex} groupId={groupId} runId={runId} + isLastInsightInStep={index === insights.length - 1} + shouldHighlightRethink={shouldHighlightRethink} /> ) ) @@ -343,12 +358,16 @@ function AutofixInsightCards({

) : hasStepBelow ? ( - + + + ) : null}
); @@ -356,6 +375,7 @@ function AutofixInsightCards({ export function useUpdateInsightCard({groupId, runId}: {groupId: string; runId: string}) { const api = useApi({persistInFlight: true}); + const queryClient = useQueryClient(); return useMutation({ mutationFn: (params: { @@ -377,6 +397,7 @@ export function useUpdateInsightCard({groupId, runId}: {groupId: string; runId: }); }, onSuccess: _ => { + queryClient.invalidateQueries({queryKey: makeAutofixQueryKey(groupId)}); addSuccessMessage(t('Thanks, rethinking this...')); }, onError: () => { @@ -390,22 +411,54 @@ function ChainLink({ runId, stepIndex, insightCardAboveIndex, + isHighlighted, + isLastCard, }: { groupId: string; insightCardAboveIndex: number | null; runId: string; stepIndex: number; + isHighlighted?: boolean; + isLastCard?: boolean; }) { const [showOverlay, setShowOverlay] = useState(false); - const overlayRef = useRef(null); + const [referenceElement, setReferenceElement] = useState< + HTMLAnchorElement | HTMLButtonElement | null + >(null); + const [popperElement, setPopperElement] = useState(null); const [comment, setComment] = useState(''); const {mutate: send} = useUpdateInsightCard({groupId, runId}); - const handleClickOutside = event => { - if (overlayRef.current && !overlayRef.current.contains(event.target)) { + const {styles, attributes} = usePopper(referenceElement, popperElement, { + placement: 'left-start', + modifiers: [ + { + name: 'offset', + options: { + offset: [-16, 8], + }, + }, + { + name: 'flip', + options: { + fallbackPlacements: ['right-start', 'bottom-start'], + }, + }, + ], + }); + + const handleClickOutside = useCallback( + (event: MouseEvent) => { + if ( + referenceElement?.contains(event.target as Node) || + popperElement?.contains(event.target as Node) + ) { + return; + } setShowOverlay(false); - } - }; + }, + [popperElement, referenceElement] + ); useEffect(() => { if (showOverlay) { @@ -416,57 +469,82 @@ function ChainLink({ return () => { document.removeEventListener('mousedown', handleClickOutside); }; - }, [showOverlay]); - - // Determine if this is the first chain link (before first insight) - const isFirstLink = insightCardAboveIndex === null || insightCardAboveIndex < 0; + }, [showOverlay, handleClickOutside]); return ( - } - size="zero" - className="rethink-button" - title={t('Rethink from here')} - aria-label={t('Rethink from here')} - onClick={() => setShowOverlay(true)} - /> - - {showOverlay && ( - -
{ - e.preventDefault(); - setShowOverlay(false); - setComment(''); - send({ - message: comment, - step_index: stepIndex, - retain_insight_card_index: insightCardAboveIndex, - }); - }} - className="row-form" + + + {isLastCard && isHighlighted && ( + + Not satisfied? + + )} + + } + size="zero" + className="rethink-button" + title={t('Rethink from here')} + aria-label={t('Rethink from here')} + onClick={() => setShowOverlay(true)} + isHighlighted={isHighlighted} + /> + + + {showOverlay && + createPortal( + - setComment(e.target.value)} - size="md" - autoFocus - /> - ; + }), + LinkButton: jest.fn(props => { + return {props.children}; + }), +})); + +const mockButton = Button as jest.MockedFunction; + +describe('AutofixMessageBox Analytics', () => { + const defaultProps = { + displayText: 'Test display text', + groupId: '123', + runId: '456', + actionText: 'Send', + allowEmptyMessage: false, + responseRequired: false, + step: null, + onSend: null, + }; + + const changesStepProps = { + ...defaultProps, + isChangesStep: true, + step: AutofixStepFixture({ + type: AutofixStepType.CHANGES, + changes: [AutofixCodebaseChangeData()], + }), + }; + + beforeEach(() => { + MockApiClient.clearMockResponses(); + mockButton.mockClear(); + }); + + it('passes correct analytics props for suggested root cause without instructions', () => { + const onSendMock = jest.fn(); + render( + + ); + + expect(mockButton).toHaveBeenLastCalledWith( + expect.objectContaining({ + analyticsEventKey: 'autofix.create_fix_clicked', + analyticsEventName: 'Autofix: Create Fix Clicked', + analyticsParams: { + group_id: '123', + type: 'suggested', + }, + }), + expect.anything() + ); + }); + + it('passes correct analytics props for suggested root cause with instructions', async () => { + const onSendMock = jest.fn(); + render( + + ); + + const input = screen.getByPlaceholderText( + '(Optional) Provide any instructions for the fix...' + ); + await userEvent.type(input, 'Some instructions'); + + expect(mockButton).toHaveBeenLastCalledWith( + expect.objectContaining({ + analyticsEventKey: 'autofix.create_fix_clicked', + analyticsEventName: 'Autofix: Create Fix Clicked', + analyticsParams: { + group_id: '123', + type: 'suggested_with_instructions', + }, + }), + expect.anything() + ); + }); + + it('passes correct analytics props for custom root cause', async () => { + const onSendMock = jest.fn(); + render( + + ); + + await userEvent.click(screen.getAllByText('Propose your own root cause')[0]); + const customInput = screen.getByPlaceholderText('Propose your own root cause...'); + await userEvent.type(customInput, 'Custom root cause'); + + expect(mockButton).toHaveBeenLastCalledWith( + expect.objectContaining({ + analyticsEventKey: 'autofix.create_fix_clicked', + analyticsEventName: 'Autofix: Create Fix Clicked', + analyticsParams: { + group_id: '123', + type: 'custom', + }, + }), + expect.anything() + ); + }); + + it('passes correct analytics props for Create PR button', async () => { + MockApiClient.addMockResponse({ + url: '/issues/123/autofix/setup/?check_write_access=true', + method: 'GET', + body: { + genAIConsent: {ok: true}, + integration: {ok: true}, + githubWriteIntegration: { + repos: [{ok: true, owner: 'owner', name: 'hello-world', id: 100}], + }, + }, + }); + + render(); + + await userEvent.click(screen.getByRole('radio', {name: 'Approve changes'})); + + // Find the last call to Button that matches our Create PR button + const createPRButtonCall = mockButton.mock.calls.find( + call => call[0]?.analyticsEventKey === 'autofix.create_pr_clicked' + ); + expect(createPRButtonCall?.[0]).toEqual( + expect.objectContaining({ + analyticsEventKey: 'autofix.create_pr_clicked', + analyticsEventName: 'Autofix: Create PR Clicked', + analyticsParams: {group_id: '123'}, + }) + ); + }); + + it('passes correct analytics props for Create PR Setup button', async () => { + MockApiClient.addMockResponse({ + url: '/issues/123/autofix/setup/?check_write_access=true', + method: 'GET', + body: { + genAIConsent: {ok: true}, + integration: {ok: true}, + githubWriteIntegration: { + repos: [{ok: false, owner: 'owner', name: 'hello-world', id: 100}], + }, + }, + }); + + render(); + + await userEvent.click(screen.getByRole('radio', {name: 'Approve changes'})); + + // Find the last call to Button that matches our Setup button + const setupButtonCall = mockButton.mock.calls.find( + call => call[0].children === 'Create PRs' + ); + expect(setupButtonCall?.[0]).toEqual( + expect.objectContaining({ + analyticsEventKey: 'autofix.create_pr_setup_clicked', + analyticsEventName: 'Autofix: Create PR Setup Clicked', + analyticsParams: { + group_id: '123', + }, + }) + ); + }); +}); diff --git a/static/app/components/events/autofix/autofixMessageBox.spec.tsx b/static/app/components/events/autofix/autofixMessageBox.spec.tsx index 2fbed4f245bbd7..1954cd41683551 100644 --- a/static/app/components/events/autofix/autofixMessageBox.spec.tsx +++ b/static/app/components/events/autofix/autofixMessageBox.spec.tsx @@ -12,7 +12,7 @@ import { import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator'; import AutofixMessageBox from 'sentry/components/events/autofix/autofixMessageBox'; -import {AutofixStepType} from 'sentry/components/events/autofix/types'; +import {AutofixStatus, AutofixStepType} from 'sentry/components/events/autofix/types'; jest.mock('sentry/actionCreators/indicator'); @@ -41,7 +41,7 @@ describe('AutofixMessageBox', () => { ...changesStepProps, step: AutofixStepFixture({ type: AutofixStepType.CHANGES, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, changes: [AutofixCodebaseChangeData()], }), }; @@ -50,7 +50,7 @@ describe('AutofixMessageBox', () => { ...changesStepProps, step: AutofixStepFixture({ type: AutofixStepType.CHANGES, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, changes: [ AutofixCodebaseChangeData({ repo_name: 'example/repo1', @@ -80,7 +80,9 @@ describe('AutofixMessageBox', () => { render(); expect(screen.getByText('Test display text')).toBeInTheDocument(); - expect(screen.getByPlaceholderText('Say something...')).toBeInTheDocument(); + expect( + screen.getByPlaceholderText('Share helpful context or feedback...') + ).toBeInTheDocument(); expect(screen.getByRole('button', {name: 'Send'})).toBeInTheDocument(); }); @@ -88,7 +90,7 @@ describe('AutofixMessageBox', () => { const onSendMock = jest.fn(); render(); - const input = screen.getByPlaceholderText('Say something...'); + const input = screen.getByPlaceholderText('Share helpful context or feedback...'); await userEvent.type(input, 'Test message'); await userEvent.click(screen.getByRole('button', {name: 'Send'})); @@ -104,7 +106,7 @@ describe('AutofixMessageBox', () => { render(); - const input = screen.getByPlaceholderText('Say something...'); + const input = screen.getByPlaceholderText('Share helpful context or feedback...'); await userEvent.type(input, 'Test message'); await userEvent.click(screen.getByRole('button', {name: 'Send'})); @@ -125,7 +127,7 @@ describe('AutofixMessageBox', () => { render(); - const input = screen.getByPlaceholderText('Say something...'); + const input = screen.getByPlaceholderText('Share helpful context or feedback...'); await userEvent.type(input, 'Test message'); await userEvent.click(screen.getByRole('button', {name: 'Send'})); @@ -196,17 +198,18 @@ describe('AutofixMessageBox', () => { it('shows feedback input when "Give feedback" is selected', () => { render(); - expect(screen.getByPlaceholderText('Say something...')).toBeInTheDocument(); + expect( + screen.getByPlaceholderText('Share helpful context or feedback...') + ).toBeInTheDocument(); expect(screen.getByRole('button', {name: 'Send'})).toBeInTheDocument(); }); it('shows "Create PR" button when "Approve changes" is selected', async () => { MockApiClient.addMockResponse({ - url: '/issues/123/autofix/setup/', + url: '/issues/123/autofix/setup/?check_write_access=true', method: 'GET', body: { genAIConsent: {ok: true}, - codebaseIndexing: {ok: true}, integration: {ok: true}, githubWriteIntegration: { repos: [{ok: true, owner: 'owner', name: 'hello-world', id: 100}], @@ -226,11 +229,10 @@ describe('AutofixMessageBox', () => { it('shows "Create PRs" button with correct text for multiple changes', async () => { MockApiClient.addMockResponse({ - url: '/issues/123/autofix/setup/', + url: '/issues/123/autofix/setup/?check_write_access=true', method: 'GET', body: { genAIConsent: {ok: true}, - codebaseIndexing: {ok: true}, integration: {ok: true}, githubWriteIntegration: { repos: [{ok: true, owner: 'owner', name: 'hello-world', id: 100}], @@ -283,11 +285,10 @@ describe('AutofixMessageBox', () => { it('shows "Create PRs" button that opens setup modal when setup is incomplete', async () => { MockApiClient.addMockResponse({ - url: '/issues/123/autofix/setup/', + url: '/issues/123/autofix/setup/?check_write_access=true', method: 'GET', body: { genAIConsent: {ok: true}, - codebaseIndexing: {ok: true}, integration: {ok: true}, githubWriteIntegration: { repos: [ @@ -296,6 +297,14 @@ describe('AutofixMessageBox', () => { }, }, }); + MockApiClient.addMockResponse({ + url: '/issues/123/autofix/setup/', + method: 'GET', + body: { + genAIConsent: {ok: true}, + integration: {ok: true}, + }, + }); render(); diff --git a/static/app/components/events/autofix/autofixMessageBox.tsx b/static/app/components/events/autofix/autofixMessageBox.tsx index 238092cf7f0b4f..4f2f77958f41e1 100644 --- a/static/app/components/events/autofix/autofixMessageBox.tsx +++ b/static/app/components/events/autofix/autofixMessageBox.tsx @@ -8,11 +8,11 @@ import {Button, LinkButton} from 'sentry/components/button'; import {AutofixSetupWriteAccessModal} from 'sentry/components/events/autofix/autofixSetupWriteAccessModal'; import { type AutofixCodebaseChange, + AutofixStatus, type AutofixStep, AutofixStepType, } from 'sentry/components/events/autofix/types'; import { - type AutofixResponse, makeAutofixQueryKey, useAutofixData, } from 'sentry/components/events/autofix/useAutofix'; @@ -33,12 +33,13 @@ import { import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import {singleLineRenderer} from 'sentry/utils/marked'; -import {setApiQueryData, useMutation, useQueryClient} from 'sentry/utils/queryClient'; +import {useMutation, useQueryClient} from 'sentry/utils/queryClient'; import testableTransition from 'sentry/utils/testableTransition'; import useApi from 'sentry/utils/useApi'; function useSendMessage({groupId, runId}: {groupId: string; runId: string}) { const api = useApi({persistInFlight: true}); + const queryClient = useQueryClient(); return useMutation({ mutationFn: (params: {message: string}) => { @@ -54,6 +55,7 @@ function useSendMessage({groupId, runId}: {groupId: string; runId: string}) { }); }, onSuccess: _ => { + queryClient.invalidateQueries({queryKey: makeAutofixQueryKey(groupId)}); addSuccessMessage('Thanks for the input.'); }, onError: () => { @@ -113,23 +115,7 @@ function CreatePRsButton({ }, onSuccess: () => { addSuccessMessage(t('Created pull requests.')); - setApiQueryData( - queryClient, - makeAutofixQueryKey(groupId), - data => { - if (!data || !data.autofix) { - return data; - } - - return { - ...data, - autofix: { - ...data.autofix, - status: 'PROCESSING', - }, - }; - } - ); + queryClient.invalidateQueries({queryKey: makeAutofixQueryKey(groupId)}); }, onError: () => { setHasClickedCreatePr(false); @@ -161,7 +147,7 @@ function SetupAndCreatePRsButton({ changes: AutofixCodebaseChange[]; groupId: string; }) { - const {data: setupData} = useAutofixSetup({groupId}); + const {data: setupData} = useAutofixSetup({groupId, checkWriteAccess: true}); if ( !changes.every( @@ -190,6 +176,102 @@ function SetupAndCreatePRsButton({ return ; } +interface RootCauseAndFeedbackInputAreaProps { + actionText: string; + changesMode: 'give_feedback' | 'add_tests' | 'create_prs'; + groupId: string; + handleSend: (e: FormEvent) => void; + isRootCauseSelectionStep: boolean; + message: string; + primaryAction: boolean; + responseRequired: boolean; + rootCauseMode: 'suggested_root_cause' | 'custom_root_cause'; + setMessage: (message: string) => void; +} + +function RootCauseAndFeedbackInputArea({ + handleSend, + isRootCauseSelectionStep, + message, + rootCauseMode, + responseRequired, + setMessage, + groupId, + actionText, + primaryAction, + changesMode, +}: RootCauseAndFeedbackInputAreaProps) { + return ( +
+ + {!responseRequired ? ( + + setMessage(e.target.value)} + placeholder={ + !isRootCauseSelectionStep + ? 'Share helpful context or feedback...' + : rootCauseMode === 'suggested_root_cause' + ? '(Optional) Provide any instructions for the fix...' + : 'Propose your own root cause...' + } + /> + {isRootCauseSelectionStep ? ( + + ) : ( + + )} + + ) : ( + + setMessage(e.target.value)} + placeholder={'Please answer to continue...'} + /> + + + )} + +
+ ); +} + function StepIcon({step}: {step: AutofixStep}) { if (step.type === AutofixStepType.CHANGES) { if (step.changes?.length === 0) { @@ -213,15 +295,15 @@ function StepIcon({step}: {step: AutofixStep}) { } switch (step.status) { - case 'WAITING_FOR_USER_RESPONSE': + case AutofixStatus.WAITING_FOR_USER_RESPONSE: return ; - case 'PROCESSING': + case AutofixStatus.PROCESSING: return ; - case 'CANCELLED': + case AutofixStatus.CANCELLED: return ; - case 'ERROR': + case AutofixStatus.ERROR: return ; - case 'COMPLETED': + case AutofixStatus.COMPLETED: return ; default: return null; @@ -266,12 +348,12 @@ function AutofixMessageBox({ const changes = isChangesStep && step?.type === AutofixStepType.CHANGES ? step.changes : []; const prsMade = - step?.status === 'COMPLETED' && + step?.status === AutofixStatus.COMPLETED && changes.length >= 1 && changes.every(change => change.pull_request); const isDisabled = - step?.status === 'ERROR' || + step?.status === AutofixStatus.ERROR || (step?.type === AutofixStepType.ROOT_CAUSE_ANALYSIS && step.causes?.length === 0); useEffect(() => { @@ -297,7 +379,7 @@ function AutofixMessageBox({ let text = message; if (isChangesStep && changesMode === 'add_tests') { text = - 'Please write a unit test that reproduces the issue to make sure it is fixed.'; + 'Please write a unit test that reproduces the issue to make sure it is fixed. Put it in the appropriate test file in the codebase. If there is none, create one.'; } if (text.trim() !== '' || allowEmptyMessage) { @@ -399,44 +481,18 @@ function AutofixMessageBox({ {(!isChangesStep || changesMode === 'give_feedback') && !prsMade && !isDisabled && ( -
- - {!responseRequired ? ( - - setMessage(e.target.value)} - placeholder={ - !isRootCauseSelectionStep - ? 'Say something...' - : rootCauseMode === 'suggested_root_cause' - ? '(Optional) Provide any instructions for the fix...' - : 'Propose your own root cause...' - } - /> - - - ) : ( - - setMessage(e.target.value)} - placeholder={'Please answer to continue...'} - /> - - - )} - -
+ )} {isChangesStep && changesMode === 'add_tests' && !prsMade && (
diff --git a/static/app/components/events/autofix/autofixRootCause.spec.tsx b/static/app/components/events/autofix/autofixRootCause.spec.tsx index 045d0209ee9655..96c376b83d814f 100644 --- a/static/app/components/events/autofix/autofixRootCause.spec.tsx +++ b/static/app/components/events/autofix/autofixRootCause.spec.tsx @@ -13,7 +13,7 @@ describe('AutofixRootCause', function () { repos: [], }; - it('can view a relevant code snippet', async function () { + it('can view a relevant code snippet', function () { render(); // Displays all root cause and code context info @@ -23,11 +23,6 @@ describe('AutofixRootCause', function () { screen.getByText('This is the description of a root cause.') ).toBeInTheDocument(); - await userEvent.click( - screen.getByRole('button', { - name: 'Relevant code', - }) - ); expect( screen.getByText('Snippet #1: This is the title of a relevant code snippet.') ).toBeInTheDocument(); @@ -53,7 +48,7 @@ describe('AutofixRootCause', function () { ).toBeInTheDocument(); }); - it('shows hyperlink when matching GitHub repo available', async function () { + it('shows hyperlink when matching GitHub repo available', function () { render( ); - await userEvent.click( - screen.getByRole('button', { - name: 'Relevant code', - }) - ); - expect(screen.queryByRole('link', {name: 'GitHub'})).toBeInTheDocument(); expect(screen.queryByRole('link', {name: 'GitHub'})).toHaveAttribute( 'href', diff --git a/static/app/components/events/autofix/autofixRootCause.tsx b/static/app/components/events/autofix/autofixRootCause.tsx index 637ff995070ec4..2a301cef677d0f 100644 --- a/static/app/components/events/autofix/autofixRootCause.tsx +++ b/static/app/components/events/autofix/autofixRootCause.tsx @@ -15,6 +15,7 @@ import { type AutofixRootCauseCodeContext, type AutofixRootCauseData, type AutofixRootCauseSelection, + AutofixStatus, AutofixStepType, type CodeSnippetContext, } from 'sentry/components/events/autofix/types'; @@ -101,7 +102,7 @@ export function useSelectCause({groupId, runId}: {groupId: string; runId: string ...data, autofix: { ...data.autofix, - status: 'PROCESSING', + status: AutofixStatus.PROCESSING, steps: data.autofix.steps?.map(step => { if (step.type !== AutofixStepType.ROOT_CAUSE_ANALYSIS) { return step; @@ -229,6 +230,7 @@ function RootCauseContext({ icon={} title={'Relevant code'} rounded + expandByDefault > @@ -278,7 +280,11 @@ export function SuggestedFixSnippet({ if (!repo) { return undefined; } - return `${repo.url}/blob/${repo.default_branch}/${snippet.file_path}`; + return `${repo.url}/blob/${repo.default_branch}/${snippet.file_path}${ + snippet.start_line && snippet.end_line + ? `#L${snippet.start_line}-L${snippet.end_line}` + : '' + }`; } const extension = getFileExtension(snippet.file_path); const language = extension ? getPrismLanguage(extension) : undefined; @@ -297,7 +303,7 @@ export function SuggestedFixSnippet({ {sourceLink && ( - + {getIntegrationIcon('github', 'sm')} diff --git a/static/app/components/events/autofix/autofixSetupModal.spec.tsx b/static/app/components/events/autofix/autofixSetupModal.spec.tsx index b64f03d837e04c..a845bfeefde222 100644 --- a/static/app/components/events/autofix/autofixSetupModal.spec.tsx +++ b/static/app/components/events/autofix/autofixSetupModal.spec.tsx @@ -1,6 +1,6 @@ import {ProjectFixture} from 'sentry-fixture/project'; -import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import {render, screen} from 'sentry-test/reactTestingLibrary'; import {AutofixSetupContent} from 'sentry/components/events/autofix/autofixSetupModal'; import {AutofixCodebaseIndexingStatus} from 'sentry/components/events/autofix/types'; @@ -40,22 +40,23 @@ describe('AutofixSetupContent', function () { }, }); - const onComplete = jest.fn(); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/integrations/?provider_key=github&includeConfig=0', + body: [], + }); - render(); + render(); expect(await screen.findByText('Install the GitHub Integration')).toBeInTheDocument(); - expect( - screen.getByText(/Install the GitHub integration by navigating to/) - ).toBeInTheDocument(); + expect(screen.getByText(/Install the GitHub integration/)).toBeInTheDocument(); }); - it('displays successful integration text when it is installed', async function () { + it('renders the code mappings instructions', async function () { MockApiClient.addMockResponse({ url: '/issues/1/autofix/setup/', body: { - genAIConsent: {ok: false}, - integration: {ok: true}, + genAIConsent: {ok: true}, + integration: {ok: false, reason: 'integration_no_code_mappings'}, githubWriteIntegration: { ok: false, repos: [ @@ -71,25 +72,34 @@ describe('AutofixSetupContent', function () { }, }); - const onComplete = jest.fn(); - - render(); - - await screen.findByText('Install the GitHub Integration'); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/integrations/?provider_key=github&includeConfig=0', + body: [ + { + id: '123', + }, + ], + }); - await userEvent.click(screen.getByRole('button', {name: 'Back'})); + render(); + expect(await screen.findByText('Set up Code Mappings')).toBeInTheDocument(); + expect( + screen.getByText( + /Set up code mappings for the Github repositories you want to run Autofix on/ + ) + ).toBeInTheDocument(); expect( - await screen.findByText(/The GitHub integration is already installed/) + screen.getByRole('button', {name: 'Configure Code Mappings'}) ).toBeInTheDocument(); }); - it('displays pending repos for github app text', async function () { + it('renders the code mappings with fallback if no integration is configured', async function () { MockApiClient.addMockResponse({ url: '/issues/1/autofix/setup/', body: { - genAIConsent: {ok: false}, - integration: {ok: true}, + genAIConsent: {ok: true}, + integration: {ok: false, reason: 'integration_no_code_mappings'}, githubWriteIntegration: { ok: false, repos: [ @@ -98,13 +108,6 @@ describe('AutofixSetupContent', function () { owner: 'getsentry', name: 'sentry', external_id: '123', - ok: true, - }, - { - provider: 'integrations:github', - owner: 'getsentry', - name: 'seer', - external_id: '235', ok: false, }, ], @@ -112,52 +115,21 @@ describe('AutofixSetupContent', function () { }, }); - const onComplete = jest.fn(); - - render(); - - expect(await screen.findByText('getsentry/sentry')).toBeInTheDocument(); - expect(await screen.findByText('getsentry/seer')).toBeInTheDocument(); - }); - - it('displays success repos for github app text', async function () { MockApiClient.addMockResponse({ - url: '/issues/1/autofix/setup/', - body: { - genAIConsent: {ok: false}, - integration: {ok: true}, - githubWriteIntegration: { - ok: false, - repos: [ - { - provider: 'integrations:github', - owner: 'getsentry', - name: 'sentry', - external_id: '123', - ok: true, - }, - { - provider: 'integrations:github', - owner: 'getsentry', - name: 'seer', - external_id: '235', - ok: false, - }, - ], - }, - }, + url: '/organizations/org-slug/integrations/?provider_key=github&includeConfig=0', + body: [], }); - const onComplete = jest.fn(); - - render(); - - await screen.findByText('Allow Autofix to Make Pull Requests'); + render(); + expect(await screen.findByText('Set up Code Mappings')).toBeInTheDocument(); + expect( + screen.getByText( + /Set up code mappings for the Github repositories you want to run Autofix on/ + ) + ).toBeInTheDocument(); expect( - await screen.findByText(/for the following repositories:/) + screen.getByRole('button', {name: 'Configure Integration'}) ).toBeInTheDocument(); - expect(await screen.findByText('getsentry/sentry')).toBeInTheDocument(); - expect(await screen.findByText('getsentry/seer')).toBeInTheDocument(); }); }); diff --git a/static/app/components/events/autofix/autofixSetupModal.tsx b/static/app/components/events/autofix/autofixSetupModal.tsx index d8ba9cd5e3a3fa..50bfcd8719ee90 100644 --- a/static/app/components/events/autofix/autofixSetupModal.tsx +++ b/static/app/components/events/autofix/autofixSetupModal.tsx @@ -1,4 +1,4 @@ -import {Fragment, useEffect, useMemo} from 'react'; +import {Fragment, useEffect} from 'react'; import styled from '@emotion/styled'; import {Button} from 'sentry/components/button'; @@ -8,21 +8,17 @@ import { useAutofixSetup, } from 'sentry/components/events/autofix/useAutofixSetup'; import {GuidedSteps} from 'sentry/components/guidedSteps/guidedSteps'; -import HookOrDefault from 'sentry/components/hookOrDefault'; import ExternalLink from 'sentry/components/links/externalLink'; import LoadingError from 'sentry/components/loadingError'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import {IconCheckmark, IconGithub} from 'sentry/icons'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; +import type {Integration} from 'sentry/types/integrations'; import {trackAnalytics} from 'sentry/utils/analytics'; +import {useApiQuery} from 'sentry/utils/queryClient'; import useOrganization from 'sentry/utils/useOrganization'; -const ConsentStep = HookOrDefault({ - hookName: 'component:autofix-setup-step-consent', - defaultComponent: null, -}); - function AutofixIntegrationStep({autofixSetup}: {autofixSetup: AutofixSetupResponse}) { if (autofixSetup.integration.ok) { return ( @@ -30,7 +26,9 @@ function AutofixIntegrationStep({autofixSetup}: {autofixSetup: AutofixSetupRespo {tct('The GitHub integration is already installed, [link: view in settings].', { link: , })} - + + + ); } @@ -56,33 +54,14 @@ function AutofixIntegrationStep({autofixSetup}: {autofixSetup: AutofixSetupRespo } )}

- - - ); - } - - if (autofixSetup.integration.reason === 'integration_no_code_mappings') { - return ( - -

- {tct( - 'You have an active GitHub installation, but no code mappings for this project. Add code mappings by visiting the [link:integration settings page] and editing your configuration.', - { - link: , - } - )} -

-

- {tct( - 'Once added, come back to this page. For more information related to installing the GitHub integration, read the [link:documentation].', - { - link: ( - - ), - } - )} -

- + + + + + +
); } @@ -90,11 +69,8 @@ function AutofixIntegrationStep({autofixSetup}: {autofixSetup: AutofixSetupRespo return (

- {tct( - 'Install the GitHub integration by navigating to the [link:integration settings page] and clicking the "Install" button. Follow the steps provided.', - { - link: , - } + {t( + 'Install the GitHub integration on the integration settings page and clicking the "Install" button. Follow the steps provided.' )}

@@ -107,7 +83,47 @@ function AutofixIntegrationStep({autofixSetup}: {autofixSetup: AutofixSetupRespo } )}

- + + + + + + +
+ ); +} + +function AutofixCodeMappingStep() { + const organization = useOrganization(); + const {data: integrationConfigurations} = useApiQuery( + [ + `/organizations/${organization.slug}/integrations/?provider_key=github&includeConfig=0`, + ], + { + staleTime: Infinity, + } + ); + + const configurationId = integrationConfigurations?.at(0)?.id; + const url = `/settings/integrations/github/${configurationId ? configurationId + '/?tab=codeMappings' : ''}`; + + return ( + +

+ {t( + 'Set up code mappings for the Github repositories you want to run Autofix on for this project.' + )} +

+ + + + + +
); } @@ -134,175 +150,25 @@ export function GitRepoLink({repo}: {repo: AutofixSetupRepoDefinition}) { ); } -function AutofixGithubIntegrationStep({ - autofixSetup, - canStartAutofix, - onComplete, - isLastStep, -}: { - autofixSetup: AutofixSetupResponse; - canStartAutofix: boolean; - isLastStep?: boolean; - onComplete?: () => void; -}) { - const handleClose = () => { - onComplete?.(); - }; - - const sortedRepos = useMemo( - () => - autofixSetup.githubWriteIntegration.repos.toSorted((a, b) => { - if (a.ok === b.ok) { - return `${a.owner}/${a.name}`.localeCompare(`${b.owner}/${b.name}`); - } - return a.ok ? -1 : 1; - }), - [autofixSetup.githubWriteIntegration.repos] - ); - - if (autofixSetup.githubWriteIntegration.ok) { - return ( - -

- {tct( - 'The [link:Sentry Autofix GitHub App] has been installed on all required repositories:', - { - link: ( - - ), - } - )} -

- - {sortedRepos.map(repo => ( - - ))} - - - {isLastStep && ( - - )} - -
- ); - } - - if (autofixSetup.githubWriteIntegration.repos.length > 0) { - return ( - -

- {tct( - 'Install and grant write access to the [link:Sentry Autofix Github App] for the following repositories:', - { - link: ( - - ), - } - )} -

- - {sortedRepos.map(repo => ( - - ))} - -

- {t( - 'Without this, Autofix can still provide root analysis and suggested code changes.' - )} -

- - {isLastStep && ( - - )} - -
- ); - } - - return ( - -

- {tct( - 'Install and grant write access to the [link:Sentry Autofix Github App] for the relevant repositories.', - { - link: ( - - ), - } - )} -

-

- {t( - 'Without this, Autofix can still provide root analysis and suggested code changes.' - )} -

- - {isLastStep && ( - - )} - -
- ); -} - -function AutofixSetupSteps({ - autofixSetup, - canStartAutofix, - onComplete, -}: { - autofixSetup: AutofixSetupResponse; - canStartAutofix: boolean; - groupId: string; - projectId: string; - onComplete?: () => void; -}) { +function AutofixSetupSteps({autofixSetup}: {autofixSetup: AutofixSetupResponse}) { return ( - - + ); @@ -311,14 +177,12 @@ function AutofixSetupSteps({ export function AutofixSetupContent({ projectId, groupId, - onComplete, }: { groupId: string; projectId: string; - onComplete?: () => void; }) { const organization = useOrganization(); - const {data, canStartAutofix, isPending, isError} = useAutofixSetup( + const {data, isPending, isError} = useAutofixSetup( {groupId}, // Want to check setup status whenever the user comes back to the tab {refetchOnWindowFocus: true} @@ -335,7 +199,7 @@ export function AutofixSetupContent({ organization, setup_gen_ai_consent: data.genAIConsent.ok, setup_integration: data.integration.ok, - setup_write_integration: data.githubWriteIntegration.ok, + setup_write_integration: data.githubWriteIntegration?.ok, }); }, [data, groupId, organization, projectId]); @@ -349,19 +213,14 @@ export function AutofixSetupContent({ return ( +
Set up Autofix

Sentry's AI-enabled Autofix uses all of the contextual data surrounding this error to work with you to find the root cause and create a fix.

-

A few additional steps are needed before you can use Autofix.

- +

To use Autofix, please follow the instructions below.

+
); } @@ -383,13 +242,6 @@ const Header = styled('p')` margin-top: ${space(2)}; `; -const RepoLinkUl = styled('ul')` - display: flex; - flex-direction: column; - gap: ${space(0.5)}; - padding: 0; -`; - const RepoLinkItem = styled('li')` display: flex; align-items: center; @@ -401,3 +253,8 @@ const GithubLink = styled('div')` align-items: center; gap: ${space(0.5)}; `; + +const Divider = styled('div')` + margin: ${space(3)} 0; + border-bottom: 2px solid ${p => p.theme.gray100}; +`; diff --git a/static/app/components/events/autofix/autofixSetupWriteAccessModal.spec.tsx b/static/app/components/events/autofix/autofixSetupWriteAccessModal.spec.tsx index 3e9957efeddfd4..f4833a8e39212f 100644 --- a/static/app/components/events/autofix/autofixSetupWriteAccessModal.spec.tsx +++ b/static/app/components/events/autofix/autofixSetupWriteAccessModal.spec.tsx @@ -6,7 +6,7 @@ import {AutofixSetupWriteAccessModal} from 'sentry/components/events/autofix/aut describe('AutofixSetupWriteAccessModal', function () { it('displays help text when repos are not all installed', async function () { MockApiClient.addMockResponse({ - url: '/issues/1/autofix/setup/', + url: '/issues/1/autofix/setup/?check_write_access=true', body: { genAIConsent: {ok: false}, integration: {ok: true}, @@ -29,7 +29,6 @@ describe('AutofixSetupWriteAccessModal', function () { }, ], }, - codebaseIndexing: {ok: false}, }, }); @@ -57,7 +56,7 @@ describe('AutofixSetupWriteAccessModal', function () { it('displays success text when installed repos for github app text', async function () { MockApiClient.addMockResponse({ - url: '/issues/1/autofix/setup/', + url: '/issues/1/autofix/setup/?check_write_access=true', body: { genAIConsent: {ok: false}, integration: {ok: true}, @@ -80,7 +79,6 @@ describe('AutofixSetupWriteAccessModal', function () { }, ], }, - codebaseIndexing: {ok: false}, }, }); diff --git a/static/app/components/events/autofix/autofixSetupWriteAccessModal.tsx b/static/app/components/events/autofix/autofixSetupWriteAccessModal.tsx index 697475c35e8704..2b150bc86d0db9 100644 --- a/static/app/components/events/autofix/autofixSetupWriteAccessModal.tsx +++ b/static/app/components/events/autofix/autofixSetupWriteAccessModal.tsx @@ -17,13 +17,13 @@ interface AutofixSetupWriteAccessModalProps extends ModalRenderProps { function Content({groupId, closeModal}: {closeModal: () => void; groupId: string}) { const {canCreatePullRequests, data} = useAutofixSetup( - {groupId}, + {groupId, checkWriteAccess: true}, {refetchOnWindowFocus: true} // We want to check each time the user comes back to the tab ); const sortedRepos = useMemo( () => - data?.githubWriteIntegration.repos.toSorted((a, b) => { + data?.githubWriteIntegration?.repos.toSorted((a, b) => { if (a.ok === b.ok) { return `${a.owner}/${a.name}`.localeCompare(`${b.owner}/${b.name}`); } @@ -93,10 +93,10 @@ export function AutofixSetupWriteAccessModal({ groupId, closeModal, }: AutofixSetupWriteAccessModalProps) { - const {canCreatePullRequests} = useAutofixSetup({groupId}); + const {canCreatePullRequests} = useAutofixSetup({groupId, checkWriteAccess: true}); return ( - +

{t('Allow Autofix to Make Pull Requests')}

@@ -117,7 +117,7 @@ export function AutofixSetupWriteAccessModal({ )} - +
); } diff --git a/static/app/components/events/autofix/autofixSteps.spec.tsx b/static/app/components/events/autofix/autofixSteps.spec.tsx index 74540b1d953cd2..18b0067e8007a5 100644 --- a/static/app/components/events/autofix/autofixSteps.spec.tsx +++ b/static/app/components/events/autofix/autofixSteps.spec.tsx @@ -7,7 +7,11 @@ import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrar import {addSuccessMessage} from 'sentry/actionCreators/indicator'; import {AutofixSteps} from 'sentry/components/events/autofix/autofixSteps'; -import {type AutofixStep, AutofixStepType} from 'sentry/components/events/autofix/types'; +import { + AutofixStatus, + type AutofixStep, + AutofixStepType, +} from 'sentry/components/events/autofix/types'; jest.mock('sentry/actionCreators/indicator'); @@ -24,14 +28,14 @@ describe('AutofixSteps', () => { AutofixStepFixture({ id: '1', type: AutofixStepType.DEFAULT, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, insights: [], progress: [], }), AutofixStepFixture({ id: '2', type: AutofixStepType.ROOT_CAUSE_ANALYSIS, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, causes: [ { id: 'cause1', @@ -47,7 +51,7 @@ describe('AutofixSteps', () => { repositories: [], created_at: '2023-01-01T00:00:00Z', run_id: '1', - status: 'PROCESSING', + status: AutofixStatus.PROCESSING, }), groupId: 'group1', runId: 'run1', @@ -126,7 +130,7 @@ describe('AutofixSteps', () => { AutofixStepFixture({ id: '3', type: AutofixStepType.DEFAULT, - status: 'PROCESSING', + status: AutofixStatus.PROCESSING, progress: [ AutofixProgressItemFixture({ message: 'Log message', @@ -149,7 +153,6 @@ describe('AutofixSteps', () => { url: '/issues/group1/autofix/setup/', body: { genAIConsent: {ok: true}, - codebaseIndexing: {ok: true}, integration: {ok: true}, githubWriteIntegration: { repos: [], @@ -173,7 +176,7 @@ describe('AutofixSteps', () => { AutofixStepFixture({ id: '1', type: AutofixStepType.DEFAULT, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, insights: [], progress: [], index: 0, @@ -181,7 +184,7 @@ describe('AutofixSteps', () => { AutofixStepFixture({ id: '2', type: AutofixStepType.CHANGES, - status: 'COMPLETED', + status: AutofixStatus.COMPLETED, progress: [], changes: [changeData], }), @@ -191,7 +194,7 @@ describe('AutofixSteps', () => { render(); - const input = screen.getByPlaceholderText('Say something...'); + const input = screen.getByPlaceholderText('Share helpful context or feedback...'); await userEvent.type(input, 'Feedback on changes'); await userEvent.click(screen.getByRole('button', {name: 'Send'})); diff --git a/static/app/components/events/autofix/autofixSteps.tsx b/static/app/components/events/autofix/autofixSteps.tsx index 5b4b5bf9d80bb8..37a5879485a631 100644 --- a/static/app/components/events/autofix/autofixSteps.tsx +++ b/static/app/components/events/autofix/autofixSteps.tsx @@ -34,6 +34,7 @@ interface StepProps { hasStepBelow: boolean; repos: AutofixRepository[]; runId: string; + shouldHighlightRethink: boolean; step: AutofixStep; } @@ -67,6 +68,7 @@ export function Step({ hasStepBelow, hasStepAbove, hasErroredStepBefore, + shouldHighlightRethink, }: StepProps) { return ( @@ -90,6 +92,7 @@ export function Step({ stepIndex={step.index} groupId={groupId} runId={runId} + shouldHighlightRethink={shouldHighlightRethink} /> )} {step.type === AutofixStepType.ROOT_CAUSE_ANALYSIS && ( @@ -244,17 +247,35 @@ export function AutofixSteps({data, groupId, runId}: AutofixStepsProps) { (previousStep?.type !== AutofixStepType.DEFAULT || previousStep?.insights.length === 0) && step.type !== AutofixStepType.DEFAULT; + const stepBelowProcessingAndEmpty = + nextStep?.type === AutofixStepType.DEFAULT && + nextStep?.status === 'PROCESSING' && + nextStep?.insights?.length === 0; + + const isNextStepLastStep = index === steps.length - 2; + const shouldHighlightRethink = + (nextStep?.type === AutofixStepType.ROOT_CAUSE_ANALYSIS && + isNextStepLastStep) || + (nextStep?.type === AutofixStepType.CHANGES && + nextStep.changes.length > 0 && + !nextStep.changes.every(change => change.pull_request)); + return (
(stepsRef.current[index] = el)} key={step.id}> {twoNonDefaultStepsInARow &&
}
); diff --git a/static/app/components/events/autofix/types.ts b/static/app/components/events/autofix/types.ts index 0191485b1177a0..1e291accf68f01 100644 --- a/static/app/components/events/autofix/types.ts +++ b/static/app/components/events/autofix/types.ts @@ -28,6 +28,15 @@ export enum AutofixCodebaseIndexingStatus { ERRORED = 'errored', } +export enum AutofixStatus { + COMPLETED = 'COMPLETED', + ERROR = 'ERROR', + PROCESSING = 'PROCESSING', + NEED_MORE_INFORMATION = 'NEED_MORE_INFORMATION', + CANCELLED = 'CANCELLED', + WAITING_FOR_USER_RESPONSE = 'WAITING_FOR_USER_RESPONSE', +} + export type AutofixPullRequestDetails = { pr_number: number; pr_url: string; @@ -49,13 +58,7 @@ export type AutofixData = { created_at: string; repositories: AutofixRepository[]; run_id: string; - status: - | 'PENDING' - | 'PROCESSING' - | 'COMPLETED' - | 'NOFIX' - | 'ERROR' - | 'NEED_MORE_INFORMATION'; + status: AutofixStatus; actor_ids?: number[]; codebase_indexing?: { status: 'COMPLETED'; @@ -80,13 +83,7 @@ interface BaseStep { id: string; index: number; progress: AutofixProgressItem[]; - status: - | 'PENDING' - | 'PROCESSING' - | 'COMPLETED' - | 'ERROR' - | 'CANCELLED' - | 'WAITING_FOR_USER_RESPONSE'; + status: AutofixStatus; title: string; type: AutofixStepType; completedMessage?: string; @@ -96,6 +93,8 @@ export type CodeSnippetContext = { file_path: string; repo_name: string; snippet: string; + end_line?: number; + start_line?: number; }; export type StacktraceContext = { diff --git a/static/app/components/events/autofix/useAutofix.tsx b/static/app/components/events/autofix/useAutofix.tsx index fd4c793a8b8c1c..1af55d87943a48 100644 --- a/static/app/components/events/autofix/useAutofix.tsx +++ b/static/app/components/events/autofix/useAutofix.tsx @@ -2,6 +2,7 @@ import {useCallback, useState} from 'react'; import { type AutofixData, + AutofixStatus, AutofixStepType, type GroupWithAutofix, } from 'sentry/components/events/autofix/types'; @@ -26,14 +27,14 @@ export const makeAutofixQueryKey = (groupId: string): ApiQueryKey => [ const makeInitialAutofixData = (): AutofixResponse => ({ autofix: { - status: 'PROCESSING', + status: AutofixStatus.PROCESSING, run_id: '', steps: [ { type: AutofixStepType.DEFAULT, id: '1', index: 0, - status: 'PROCESSING', + status: AutofixStatus.PROCESSING, title: 'Starting Autofix...', insights: [], progress: [], @@ -48,13 +49,13 @@ const makeErrorAutofixData = (errorMessage: string): AutofixResponse => { const data = makeInitialAutofixData(); if (data.autofix) { - data.autofix.status = 'ERROR'; + data.autofix.status = AutofixStatus.ERROR; data.autofix.steps = [ { type: AutofixStepType.DEFAULT, id: '1', index: 0, - status: 'ERROR', + status: AutofixStatus.ERROR, title: 'Something went wrong', completedMessage: errorMessage, insights: [], @@ -66,7 +67,12 @@ const makeErrorAutofixData = (errorMessage: string): AutofixResponse => { return data; }; -const isPolling = (autofixData?: AutofixData | null) => autofixData?.status !== 'PENDING'; +/** Will not poll when the autofix is in an error state or has completed */ +const isPolling = (autofixData?: AutofixData | null) => + !autofixData || + ![AutofixStatus.ERROR, AutofixStatus.COMPLETED, AutofixStatus.CANCELLED].includes( + autofixData.status + ); export const useAutofixData = ({groupId}: {groupId: string}) => { const {data} = useApiQuery(makeAutofixQueryKey(groupId), { diff --git a/static/app/components/events/autofix/useAutofixSetup.tsx b/static/app/components/events/autofix/useAutofixSetup.tsx index 23dc3204677301..7abf06860111b8 100644 --- a/static/app/components/events/autofix/useAutofixSetup.tsx +++ b/static/app/components/events/autofix/useAutofixSetup.tsx @@ -14,39 +14,44 @@ export type AutofixSetupResponse = { genAIConsent: { ok: boolean; }; - githubWriteIntegration: { - ok: boolean; - repos: AutofixSetupRepoDefinition[]; - }; integration: { ok: boolean; reason: string | null; }; - subprocessorConsent: { + githubWriteIntegration?: { ok: boolean; - }; + repos: AutofixSetupRepoDefinition[]; + } | null; }; -export function makeAutofixSetupQueryKey(groupId: string): ApiQueryKey { - return [`/issues/${groupId}/autofix/setup/`]; +export function makeAutofixSetupQueryKey( + groupId: string, + checkWriteAccess?: boolean +): ApiQueryKey { + return [ + `/issues/${groupId}/autofix/setup/${checkWriteAccess ? '?check_write_access=true' : ''}`, + ]; } export function useAutofixSetup( - {groupId}: {groupId: string}, + {groupId, checkWriteAccess}: {groupId: string; checkWriteAccess?: boolean}, options: Omit, 'staleTime'> = {} ) { - const queryData = useApiQuery(makeAutofixSetupQueryKey(groupId), { - enabled: Boolean(groupId), - staleTime: 0, - retry: false, - ...options, - }); + const queryData = useApiQuery( + makeAutofixSetupQueryKey(groupId, checkWriteAccess), + { + enabled: Boolean(groupId), + staleTime: 0, + retry: false, + ...options, + } + ); return { ...queryData, canStartAutofix: Boolean( queryData.data?.integration.ok && queryData.data?.genAIConsent.ok ), - canCreatePullRequests: Boolean(queryData.data?.githubWriteIntegration.ok), + canCreatePullRequests: Boolean(queryData.data?.githubWriteIntegration?.ok), }; } diff --git a/static/app/components/events/contexts/contextCard.tsx b/static/app/components/events/contexts/contextCard.tsx index db6f311641abc2..f3fd784dbfe1c7 100644 --- a/static/app/components/events/contexts/contextCard.tsx +++ b/static/app/components/events/contexts/contextCard.tsx @@ -112,7 +112,7 @@ export default function ContextCard({ title={ <div>{getContextTitle({alias, type, value})}</div> - <div> + <div style={{minWidth: 14}}> <ErrorBoundary customComponent={null}> {getContextIcon({ alias, diff --git a/static/app/components/events/contexts/contextIcon.tsx b/static/app/components/events/contexts/contextIcon.tsx index 03627b359d6157..2f95643adeffac 100644 --- a/static/app/components/events/contexts/contextIcon.tsx +++ b/static/app/components/events/contexts/contextIcon.tsx @@ -12,6 +12,7 @@ import logoAppleWatch from 'sentry-logos/logo-apple-watch.svg'; import logoArm from 'sentry-logos/logo-arm.svg'; import logoChrome from 'sentry-logos/logo-chrome.svg'; import logoChromium from 'sentry-logos/logo-chromium.svg'; +import logoCloudflareWorker from 'sentry-logos/logo-cloudflare-worker.svg'; import logoCrystal from 'sentry-logos/logo-crystal.svg'; import logoDeno from 'sentry-logos/logo-deno.svg'; import logoDotnet from 'sentry-logos/logo-dotnet.svg'; @@ -41,6 +42,7 @@ import logoSamsung from 'sentry-logos/logo-samsung.svg'; import logoUbuntu from 'sentry-logos/logo-ubuntu.svg'; import logoUnity from 'sentry-logos/logo-unity.svg'; import logoUnknown from 'sentry-logos/logo-unknown.svg'; +import logoVercel from 'sentry-logos/logo-vercel.svg'; import logoWindows from 'sentry-logos/logo-windows.svg'; import ConfigStore from 'sentry/stores/configStore'; @@ -70,6 +72,7 @@ const LOGO_MAPPING = { arm: logoArm, chrome: logoChrome, chromium: logoChromium, + cloudflare: logoCloudflareWorker, cpython: logoPython, crystal: logoCrystal, darwin: logoApple, @@ -100,6 +103,7 @@ const LOGO_MAPPING = { samsung: logoSamsung, tvos: logoApple, ubuntu: logoUbuntu, + vercel: logoVercel, watch: logoAppleWatch, watchos: logoApple, windows: logoWindows, @@ -117,6 +121,7 @@ const INVERT_IN_DARKMODE = [ 'mac', 'apple', 'watchos', + 'vercel', ]; const darkCss = css` diff --git a/static/app/components/events/contexts/default.tsx b/static/app/components/events/contexts/default.tsx deleted file mode 100644 index 52b93510f2725f..00000000000000 --- a/static/app/components/events/contexts/default.tsx +++ /dev/null @@ -1,22 +0,0 @@ -import ContextBlock from 'sentry/components/events/contexts/contextBlock'; -import type {Event} from 'sentry/types/event'; - -type Props = { - alias: string; - data: Record<string, React.ReactNode | undefined>; - event: Event; -}; - -export function getDefaultContextData(data: Props['data']) { - return Object.entries(data) - .filter(([k]) => k !== 'type' && k !== 'title') - .map(([key, value]) => ({ - key, - subject: key, - value, - })); -} - -export function DefaultContext({data}: Props) { - return <ContextBlock data={getDefaultContextData(data)} />; -} diff --git a/static/app/components/events/contexts/device/getDeviceKnownDataDetails.spec.tsx b/static/app/components/events/contexts/device/getDeviceKnownDataDetails.spec.tsx deleted file mode 100644 index 20d5d4669f158d..00000000000000 --- a/static/app/components/events/contexts/device/getDeviceKnownDataDetails.spec.tsx +++ /dev/null @@ -1,67 +0,0 @@ -import {EventFixture} from 'sentry-fixture/event'; - -import { - deviceKnownDataValues, - getDeviceKnownDataDetails, -} from 'sentry/components/events/contexts/device/getDeviceKnownDataDetails'; - -import {deviceMockData} from './index.spec'; - -describe('getDeviceKnownDataDetails', function () { - it('returns values and according to the parameters', function () { - const allKnownData: ReturnType<typeof getDeviceKnownDataDetails>[] = []; - - for (const type of Object.keys(deviceKnownDataValues)) { - const deviceKnownData = getDeviceKnownDataDetails({ - type: deviceKnownDataValues[type], - data: deviceMockData, - event: EventFixture(), - }); - - if (!deviceKnownData) { - continue; - } - - allKnownData.push(deviceKnownData); - } - - expect(allKnownData).toEqual([ - {subject: 'Architecture', value: 'x86'}, - {subject: 'Battery Level', value: '100%'}, - {subject: 'Battery Status', value: undefined}, - {subject: 'Battery Temperature (°C)', value: 45}, - expect.objectContaining({subject: 'Boot Time'}), - {subject: 'Brand', value: 'google'}, - {subject: 'Charging', value: true}, - {subject: 'CPU Description', value: undefined}, - {subject: 'Device Type', value: undefined}, - expect.objectContaining({subject: 'External Free Storage'}), - expect.objectContaining({subject: 'External Storage Size'}), - {subject: 'Family', value: 'Android'}, - expect.objectContaining({subject: 'Free Memory'}), - expect.objectContaining({subject: 'Free Storage'}), - {subject: 'Low Memory', value: false}, - {subject: 'Manufacturer', value: 'Google'}, - expect.objectContaining({subject: 'Memory Size'}), - expect.objectContaining({subject: 'Model'}), - {subject: 'Model Id', value: 'NYC'}, - {subject: 'Name', value: ''}, - {subject: 'Online', value: true}, - {subject: 'Orientation', value: 'portrait'}, - {subject: 'Screen Density', value: 3}, - {subject: 'Screen DPI', value: 480}, - {subject: 'Screen Height Pixels', value: 1136}, - {subject: 'Screen Resolution', value: '1136x768'}, - {subject: 'Screen Width Pixels', value: 768}, - {subject: 'Simulator', value: true}, - expect.objectContaining({subject: 'Storage Size'}), - {subject: 'Usable Memory', value: undefined}, - {subject: 'Memory', value: undefined}, - { - subject: 'Capacity', - value: - 'Total: 779.3 MiB / Free: 485.2 MiB (External Total: 510.0 MiB / Free: 509.9 MiB)', - }, - ]); - }); -}); diff --git a/static/app/components/events/contexts/device/getDeviceKnownDataDetails.tsx b/static/app/components/events/contexts/device/getDeviceKnownDataDetails.tsx deleted file mode 100644 index 096562cc0d0ea1..00000000000000 --- a/static/app/components/events/contexts/device/getDeviceKnownDataDetails.tsx +++ /dev/null @@ -1,222 +0,0 @@ -import {DeviceName} from 'sentry/components/deviceName'; -import FileSize from 'sentry/components/fileSize'; -import {t} from 'sentry/locale'; -import type {DeviceContext, Event} from 'sentry/types/event'; -import {DeviceContextKey} from 'sentry/types/event'; -import {defined} from 'sentry/utils'; - -import {getRelativeTimeFromEventDateCreated, type KnownDataDetails} from '../utils'; - -import {formatMemory, formatStorage} from './utils'; - -export const deviceKnownDataValues = [ - ...Object.values(DeviceContextKey), - // Added two general keys here to namespace the values - // tracks memory_size, free_memory, usable_memory - 'memory', - // tracks storage_size, free_storage, external_storage_size, external_free_storage - 'storage', -]; - -type Props = { - data: DeviceContext; - event: Event; - type: (typeof deviceKnownDataValues)[number]; -}; - -export function getDeviceKnownDataDetails({data, event, type}: Props): KnownDataDetails { - switch (type) { - case DeviceContextKey.NAME: - return { - subject: t('Name'), - value: data.name, - }; - case DeviceContextKey.FAMILY: - return { - subject: t('Family'), - value: data.family, - }; - case DeviceContextKey.MODEL_ID: - return { - subject: t('Model Id'), - value: data.model_id, - }; - case DeviceContextKey.MODEL: - return { - subject: t('Model'), - value: - typeof data.model === 'string' ? ( - <DeviceName - value={`${data.model} ${data?.model_id ? `(${data.model_id})` : ''}`} - /> - ) : undefined, - }; - case DeviceContextKey.CPU_DESCRIPTION: - return { - subject: t('CPU Description'), - value: data.cpu_description, - }; - case DeviceContextKey.ARCH: - return { - subject: t('Architecture'), - value: data.arch, - }; - case DeviceContextKey.BATTERY_LEVEL: - return { - subject: t('Battery Level'), - value: defined(data.battery_level) ? `${data.battery_level}%` : undefined, - }; - case DeviceContextKey.BATTERY_STATUS: - return { - subject: t('Battery Status'), - value: data.battery_status, - }; - case DeviceContextKey.BATTERY_TEMPERATURE: - return { - subject: t('Battery Temperature (°C)'), - value: data.battery_temperature, - }; - case DeviceContextKey.ORIENTATION: - return { - subject: t('Orientation'), - value: data.orientation, - }; - case 'memory': - const {memory_size, free_memory, usable_memory} = data; - return { - subject: t('Memory'), - value: - memory_size && free_memory && usable_memory - ? formatMemory(memory_size, free_memory, usable_memory) - : undefined, - }; - case 'storage': - const {storage_size, free_storage, external_storage_size, external_free_storage} = - data; - return { - subject: t('Capacity'), - value: - storage_size && free_storage && external_storage_size && external_free_storage - ? formatStorage( - storage_size, - free_storage, - external_storage_size, - external_free_storage - ) - : undefined, - }; - case DeviceContextKey.FREE_STORAGE: { - return { - subject: t('Free Storage'), - value: data.free_storage ? <FileSize bytes={data.free_storage} /> : undefined, - }; - } - case DeviceContextKey.STORAGE_SIZE: { - return { - subject: t('Storage Size'), - value: data.storage_size ? <FileSize bytes={data.storage_size} /> : undefined, - }; - } - case DeviceContextKey.EXTERNAL_STORAGE_SIZE: { - return { - subject: t('External Storage Size'), - value: data.external_storage_size ? ( - <FileSize bytes={data.external_storage_size} /> - ) : undefined, - }; - } - case DeviceContextKey.EXTERNAL_FREE_STORAGE: { - return { - subject: t('External Free Storage'), - value: data.external_free_storage ? ( - <FileSize bytes={data.external_free_storage} /> - ) : undefined, - }; - } - case DeviceContextKey.SIMULATOR: - return { - subject: t('Simulator'), - value: data.simulator, - }; - case DeviceContextKey.BOOT_TIME: - return { - subject: t('Boot Time'), - value: getRelativeTimeFromEventDateCreated( - event.dateCreated ? event.dateCreated : event.dateReceived, - data.boot_time - ), - }; - case DeviceContextKey.DEVICE_TYPE: - return { - subject: t('Device Type'), - value: data.device_type, - }; - case DeviceContextKey.BRAND: - return { - subject: t('Brand'), - value: data.brand, - }; - case DeviceContextKey.CHARGING: - return { - subject: t('Charging'), - value: data.charging, - }; - case DeviceContextKey.LOW_MEMORY: - return { - subject: t('Low Memory'), - value: data.low_memory, - }; - case DeviceContextKey.FREE_MEMORY: - return { - subject: t('Free Memory'), - value: data.free_memory ? <FileSize bytes={data.free_memory} /> : undefined, - }; - case DeviceContextKey.MEMORY_SIZE: - return { - subject: t('Memory Size'), - value: data.memory_size ? <FileSize bytes={data.memory_size} /> : undefined, - }; - case DeviceContextKey.USABLE_MEMORY: - return { - subject: t('Usable Memory'), - value: data.usable_memory ? <FileSize bytes={data.usable_memory} /> : undefined, - }; - case DeviceContextKey.MANUFACTURER: - return { - subject: t('Manufacturer'), - value: data.manufacturer, - }; - case DeviceContextKey.ONLINE: - return { - subject: t('Online'), - value: data.online, - }; - case DeviceContextKey.SCREEN_DENSITY: - return { - subject: t('Screen Density'), - value: data.screen_density, - }; - case DeviceContextKey.SCREEN_DPI: - return { - subject: t('Screen DPI'), - value: data.screen_dpi, - }; - case DeviceContextKey.SCREEN_HEIGHT_PIXELS: - return { - subject: t('Screen Height Pixels'), - value: data.screen_height_pixels, - }; - case DeviceContextKey.SCREEN_RESOLUTION: - return { - subject: t('Screen Resolution'), - value: data.screen_resolution, - }; - case DeviceContextKey.SCREEN_WIDTH_PIXELS: - return { - subject: t('Screen Width Pixels'), - value: data.screen_width_pixels, - }; - default: - return undefined; - } -} diff --git a/static/app/components/events/contexts/device/index.spec.tsx b/static/app/components/events/contexts/device/index.spec.tsx deleted file mode 100644 index 03cc66681e9390..00000000000000 --- a/static/app/components/events/contexts/device/index.spec.tsx +++ /dev/null @@ -1,76 +0,0 @@ -import {DataScrubbingRelayPiiConfigFixture} from 'sentry-fixture/dataScrubbingRelayPiiConfig'; -import {EventFixture} from 'sentry-fixture/event'; - -import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; -import {textWithMarkupMatcher} from 'sentry-test/utils'; - -import {DeviceEventContext} from 'sentry/components/events/contexts/device'; -import type {DeviceContext} from 'sentry/types/event'; - -export const deviceMockData: DeviceContext = { - screen_resolution: '1136x768', - orientation: 'portrait', - family: 'Android', - battery_level: 100, - battery_temperature: 45, - screen_dpi: 480, - memory_size: 1055186944, - timezone: 'America/Los_Angeles', - external_storage_size: 534761472, - external_free_storage: 534702080, - screen_width_pixels: 768, - low_memory: false, - simulator: true, - screen_height_pixels: 1136, - free_memory: 658702336, - online: true, - screen_density: 3, - type: 'device', - charging: true, - model_id: 'NYC', - brand: 'google', - storage_size: 817143808, - boot_time: '2019-12-11T11:38:15Z', - arch: 'x86', - manufacturer: 'Google', - name: '', // redacted - free_storage: 508784640, - model: 'Android SDK built for x86', -}; - -export const deviceContextMetaMockData = { - name: { - '': { - rem: [['organization:0', 's', 0, 0]], - len: 25, - }, - }, -}; - -const event = EventFixture({ - _meta: { - contexts: { - device: deviceContextMetaMockData, - }, - }, -}); - -describe('device event context', function () { - it('display redacted data', async function () { - render(<DeviceEventContext event={event} data={deviceMockData} />, { - organization: { - relayPiiConfig: JSON.stringify(DataScrubbingRelayPiiConfigFixture()), - }, - }); - expect(screen.getByText('Name')).toBeInTheDocument(); // subject - expect(screen.getByText(/redacted/)).toBeInTheDocument(); // value - await userEvent.hover(screen.getByText(/redacted/)); - expect( - await screen.findByText( - textWithMarkupMatcher( - "Replaced because of the data scrubbing rule [Replace] [Password fields] with [Scrubbed] from [password] in your organization's settings" - ) - ) - ).toBeInTheDocument(); // tooltip description - }); -}); diff --git a/static/app/components/events/contexts/device/index.tsx b/static/app/components/events/contexts/device/index.tsx deleted file mode 100644 index 05f803aedb9c89..00000000000000 --- a/static/app/components/events/contexts/device/index.tsx +++ /dev/null @@ -1,61 +0,0 @@ -import {Fragment} from 'react'; - -import ContextBlock from 'sentry/components/events/contexts/contextBlock'; -import type {DeviceContext, Event} from 'sentry/types/event'; - -import { - getContextMeta, - getKnownData, - getKnownStructuredData, - getUnknownData, -} from '../utils'; - -import { - deviceKnownDataValues, - getDeviceKnownDataDetails, -} from './getDeviceKnownDataDetails'; -import {getInferredData} from './utils'; - -type Props = { - data: DeviceContext; - event: Event; - meta?: Record<string, any>; -}; - -const deviceIgnoredDataValues = []; - -export function getKnownDeviceContextData({data, event, meta}: Props) { - const inferredData = getInferredData(data); - return getKnownData<DeviceContext, (typeof deviceKnownDataValues)[number]>({ - data: inferredData, - meta, - knownDataTypes: deviceKnownDataValues, - onGetKnownDataDetails: v => getDeviceKnownDataDetails({...v, event}), - }).map(v => ({ - ...v, - subjectDataTestId: `device-context-${v.key.toLowerCase()}-value`, - })); -} - -export function getUnknownDeviceContextData({data, meta}: Pick<Props, 'data' | 'meta'>) { - const inferredData = getInferredData(data); - return getUnknownData({ - allData: inferredData, - knownKeys: [...deviceKnownDataValues, ...deviceIgnoredDataValues], - meta, - }); -} - -export function DeviceEventContext({data, event, meta: propsMeta}: Props) { - const meta = propsMeta ?? getContextMeta(event, 'device'); - const knownData = getKnownDeviceContextData({data, event, meta}); - const knownStructuredData = getKnownStructuredData(knownData, meta); - const unknownData = getUnknownDeviceContextData({data, meta}); - - return ( - <Fragment> - <ContextBlock data={knownStructuredData} /> - <ContextBlock data={unknownData} /> - </Fragment> - ); -} diff --git a/static/app/components/events/contexts/device/utils.tsx b/static/app/components/events/contexts/device/utils.tsx deleted file mode 100644 index 480a20be117059..00000000000000 --- a/static/app/components/events/contexts/device/utils.tsx +++ /dev/null @@ -1,125 +0,0 @@ -import type {DeviceContext} from 'sentry/types/event'; -import {DeviceContextKey} from 'sentry/types/event'; -import {defined} from 'sentry/utils'; -import {formatBytesBase2} from 'sentry/utils/bytes/formatBytesBase2'; - -export function formatMemory( - memory_size: number, - free_memory: number, - usable_memory: number -) { - if ( - !Number.isInteger(memory_size) || - memory_size <= 0 || - !Number.isInteger(free_memory) || - free_memory <= 0 - ) { - return null; - } - - let memory = `Total: ${formatBytesBase2(memory_size)} / Free: ${formatBytesBase2( - free_memory - )}`; - if (Number.isInteger(usable_memory) && usable_memory > 0) { - memory = `${memory} / Usable: ${formatBytesBase2(usable_memory)}`; - } - - return memory; -} - -export function formatStorage( - storage_size: number, - free_storage: number, - external_storage_size: number, - external_free_storage: number -) { - if (!Number.isInteger(storage_size) || storage_size <= 0) { - return null; - } - - let storage = `Total: ${formatBytesBase2(storage_size)}`; - if (Number.isInteger(free_storage) && free_storage > 0) { - storage = `${storage} / Free: ${formatBytesBase2(free_storage)}`; - } - - if ( - Number.isInteger(external_storage_size) && - external_storage_size > 0 && - Number.isInteger(external_free_storage) && - external_free_storage > 0 - ) { - storage = `${storage} (External Total: ${formatBytesBase2( - external_storage_size - )} / Free: ${formatBytesBase2(external_free_storage)})`; - } - - return storage; -} - -// List of common display resolutions taken from the source: https://en.wikipedia.org/wiki/Display_resolution#Common_display_resolutions -export const commonDisplayResolutions = { - '640x360': 'nHD', - '800x600': 'SVGA', - '1024x768': 'XGA', - '1280x720': 'WXGA', - '1280x800': 'WXGA', - '1280x1024': 'SXGA', - '1360x768': 'HD', - '1366x768': 'HD', - '1440x900': 'WXGA+', - '1536x864': 'NA', - '1600x900': 'HD+', - '1680x1050': 'WSXGA+', - '1920x1080': 'FHD', - '1920x1200': 'WUXGA', - '2048x1152': 'QWXGA', - '2560x1080': 'N/A', - '2560x1440': 'QHD', - '3440x1440': 'N/A', - '3840x2160': '4K UHD', -}; - -export function getInferredData(data: DeviceContext) { - const screenResolution = data[DeviceContextKey.SCREEN_RESOLUTION]; - const screenWidth = data[DeviceContextKey.SCREEN_WIDTH_PIXELS]; - const screenHeight = data[DeviceContextKey.SCREEN_HEIGHT_PIXELS]; - - if (screenResolution) { - const displayResolutionDescription = commonDisplayResolutions[screenResolution]; - - const commonData = { - ...data, - [DeviceContextKey.SCREEN_RESOLUTION]: displayResolutionDescription - ? `${screenResolution} (${displayResolutionDescription})` - : screenResolution, - }; - - if (!defined(screenWidth) && !defined(screenHeight)) { - const [width, height] = screenResolution.split('x'); - - if (width && height) { - return { - ...commonData, - [DeviceContextKey.SCREEN_WIDTH_PIXELS]: Number(width), - [DeviceContextKey.SCREEN_HEIGHT_PIXELS]: Number(height), - }; - } - } - - return commonData; - } - - if (defined(screenWidth) && defined(screenHeight)) { - const displayResolution = `${screenWidth}x${screenHeight}`; - const displayResolutionDescription = commonDisplayResolutions[displayResolution]; - - return { - ...data, - [DeviceContextKey.SCREEN_RESOLUTION]: displayResolutionDescription - ? `${displayResolution} (${displayResolutionDescription})` - : displayResolution, - }; - } - - return data; -} diff --git a/static/app/components/events/contexts/knownContext/app.spec.tsx b/static/app/components/events/contexts/knownContext/app.spec.tsx index 1c25bb80276b0b..a9f5c0242512ba 100644 --- a/static/app/components/events/contexts/knownContext/app.spec.tsx +++ b/static/app/components/events/contexts/knownContext/app.spec.tsx @@ -8,7 +8,7 @@ import { getAppContextData, } from 'sentry/components/events/contexts/knownContext/app'; -export const MOCK_APP_CONTEXT: AppContext = { +const MOCK_APP_CONTEXT: AppContext = { device_app_hash: '2421fae1ac9237a8131e74883e52b0f7034a143f', build_type: 'test', app_identifier: 'io.sentry.sample.iOS-Swift', diff --git a/static/app/components/events/contexts/knownContext/device.spec.tsx b/static/app/components/events/contexts/knownContext/device.spec.tsx new file mode 100644 index 00000000000000..d4eb74485979d9 --- /dev/null +++ b/static/app/components/events/contexts/knownContext/device.spec.tsx @@ -0,0 +1,158 @@ +import {EventFixture} from 'sentry-fixture/event'; + +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import ContextCard from 'sentry/components/events/contexts/contextCard'; +import {getDeviceContextData} from 'sentry/components/events/contexts/knownContext/device'; +import type {DeviceContext} from 'sentry/types/event'; + +const MOCK_DEVICE_CONTEXT: DeviceContext = { + name: '', // redacted + screen_resolution: '1136x768', + orientation: 'portrait', + family: 'Android', + battery_level: 100, + battery_temperature: 45, + screen_dpi: 480, + memory_size: 1055186944, + timezone: 'America/Los_Angeles', + external_storage_size: 534761472, + external_free_storage: 534702080, + screen_width_pixels: 768, + low_memory: false, + simulator: true, + screen_height_pixels: 1136, + free_memory: 658702336, + online: true, + screen_density: 3, + type: 'device', + charging: true, + model_id: 'NYC', + brand: 'google', + storage_size: 817143808, + boot_time: '2019-12-11T11:38:15Z', + arch: 'x86', + manufacturer: 'Google', + free_storage: 508784640, + model: 'Android SDK built for x86', +}; + +const MOCK_REDACTION = { + name: { + '': { + rem: [['organization:0', 's', 0, 0]], + len: 25, + }, + }, +}; + +describe('DeviceContext', function () { + it('returns values and according to the parameters', function () { + // We need to use expect.anything() for some fields as they return React components. + expect( + getDeviceContextData({data: MOCK_DEVICE_CONTEXT, event: EventFixture()}) + ).toEqual([ + {key: 'name', subject: 'Name', value: ''}, + { + key: 'screen_resolution', + subject: 'Screen Resolution', + value: '1136x768', + }, + {key: 'orientation', subject: 'Orientation', value: 'portrait'}, + {key: 'family', subject: 'Family', value: 'Android'}, + {key: 'battery_level', subject: 'Battery Level', value: '100%'}, + { + key: 'battery_temperature', + subject: 'Battery Temperature (°C)', + value: 45, + }, + {key: 'screen_dpi', subject: 'Screen DPI', value: 480}, + { + key: 'memory_size', + subject: 'Memory Size', + value: expect.anything(), + }, + { + key: 'timezone', + subject: 'timezone', + value: 'America/Los_Angeles', + meta: undefined, + }, + { + key: 'external_storage_size', + subject: 'External Storage Size', + value: expect.anything(), + }, + { + key: 'external_free_storage', + subject: 'External Free Storage', + value: expect.anything(), + }, + { + key: 'screen_width_pixels', + subject: 'Screen Width Pixels', + value: 768, + }, + {key: 'low_memory', subject: 'Low Memory', value: false}, + {key: 'simulator', subject: 'Simulator', value: true}, + { + key: 'screen_height_pixels', + subject: 'Screen Height Pixels', + value: 1136, + }, + { + key: 'free_memory', + subject: 'Free Memory', + value: expect.anything(), + }, + {key: 'online', subject: 'Online', value: true}, + {key: 'screen_density', subject: 'Screen Density', value: 3}, + {key: 'charging', subject: 'Charging', value: true}, + {key: 'model_id', subject: 'Model Id', value: 'NYC'}, + {key: 'brand', subject: 'Brand', value: 'google'}, + { + key: 'storage_size', + subject: 'Storage Size', + value: expect.anything(), + }, + { + key: 'boot_time', + subject: 'Boot Time', + value: expect.anything(), + }, + {key: 'arch', subject: 'Architecture', value: 'x86'}, + {key: 'manufacturer', subject: 'Manufacturer', value: 'Google'}, + { + key: 'free_storage', + subject: 'Free Storage', + value: expect.anything(), + }, + { + key: 'model', + subject: 'Model', + value: expect.anything(), + }, + ]); + }); + + it('renders with meta annotations correctly', function () { + const event = EventFixture({ + _meta: {contexts: {device: MOCK_REDACTION}}, + }); + + render( + <ContextCard + event={event} + type={'device'} + alias={'device'} + value={{...MOCK_DEVICE_CONTEXT, name: ''}} + /> + ); + + expect(screen.getByText('Device')).toBeInTheDocument(); + expect(screen.getByText('Orientation')).toBeInTheDocument(); + expect(screen.getByText('portrait')).toBeInTheDocument(); + expect(screen.getByText('Name')).toBeInTheDocument(); + expect(screen.getByText(/redacted/)).toBeInTheDocument(); + }); +}); diff --git a/static/app/components/events/contexts/knownContext/device.tsx b/static/app/components/events/contexts/knownContext/device.tsx new file mode 100644 index 00000000000000..f25751daa02d09 --- /dev/null +++ b/static/app/components/events/contexts/knownContext/device.tsx @@ -0,0 +1,372 @@ +import {DeviceName} from 'sentry/components/deviceName'; +import { + getContextKeys, + getRelativeTimeFromEventDateCreated, +} from 'sentry/components/events/contexts/utils'; +import FileSize from 'sentry/components/fileSize'; +import {t} from 'sentry/locale'; +import {type DeviceContext, DeviceContextKey, type Event} from 'sentry/types/event'; +import type {KeyValueListData} from 'sentry/types/group'; +import {defined} from 'sentry/utils'; +import {formatBytesBase2} from 'sentry/utils/bytes/formatBytesBase2'; + +function formatMemory(memory_size: number, free_memory: number, usable_memory: number) { + if ( + !Number.isInteger(memory_size) || + memory_size <= 0 || + !Number.isInteger(free_memory) || + free_memory <= 0 + ) { + return null; + } + + let memory = `Total: ${formatBytesBase2(memory_size)} / Free: ${formatBytesBase2( + free_memory + )}`; + if (Number.isInteger(usable_memory) && usable_memory > 0) { + memory = `${memory} / Usable: ${formatBytesBase2(usable_memory)}`; + } + + return memory; +} + +function formatStorage( + storage_size: number, + free_storage: number, + external_storage_size: number, + external_free_storage: number +) { + if (!Number.isInteger(storage_size) || storage_size <= 0) { + return null; + } + + let storage = `Total: ${formatBytesBase2(storage_size)}`; + if (Number.isInteger(free_storage) && free_storage > 0) { + storage = `${storage} / Free: ${formatBytesBase2(free_storage)}`; + } + + if ( + Number.isInteger(external_storage_size) && + external_storage_size > 0 && + Number.isInteger(external_free_storage) && + external_free_storage > 0 + ) { + storage = `${storage} (External Total: ${formatBytesBase2( + external_storage_size + )} / Free: ${formatBytesBase2(external_free_storage)})`; + } + + return storage; +} + +const commonDisplayResolutions = { + '640x360': 'nHD', + '800x600': 'SVGA', + '1024x768': 'XGA', + '1280x720': 'WXGA', + '1280x800': 'WXGA', + '1280x1024': 'SXGA', + '1360x768': 'HD', + '1366x768': 'HD', + '1440x900': 'WXGA+', + '1536x864': 'NA', + '1600x900': 'HD+', + '1680x1050': 'WSXGA+', + '1920x1080': 'FHD', + '1920x1200': 'WUXGA', + '2048x1152': 'QWXGA', + '2560x1080': 'N/A', + '2560x1440': 'QHD', + '3440x1440': 'N/A', + '3840x2160': '4K UHD', +}; + +function getInferredData(data: DeviceContext) { + const screenResolution = data[DeviceContextKey.SCREEN_RESOLUTION]; + const screenWidth = data[DeviceContextKey.SCREEN_WIDTH_PIXELS]; + const screenHeight = data[DeviceContextKey.SCREEN_HEIGHT_PIXELS]; + + if (screenResolution) { + const displayResolutionDescription = commonDisplayResolutions[screenResolution]; + + const commonData = { + ...data, + [DeviceContextKey.SCREEN_RESOLUTION]: displayResolutionDescription + ? `${screenResolution} (${displayResolutionDescription})` + : screenResolution, + }; + + if (!defined(screenWidth) && !defined(screenHeight)) { + const [width, height] = screenResolution.split('x'); + + if (width && height) { + return { + ...commonData, + [DeviceContextKey.SCREEN_WIDTH_PIXELS]: Number(width), + [DeviceContextKey.SCREEN_HEIGHT_PIXELS]: Number(height), + }; + } + } + + return commonData; + } + + if (defined(screenWidth) && defined(screenHeight)) { + const displayResolution = `${screenWidth}x${screenHeight}`; + const displayResolutionDescription = commonDisplayResolutions[displayResolution]; + + return { + ...data, + [DeviceContextKey.SCREEN_RESOLUTION]: displayResolutionDescription + ? `${displayResolution} (${displayResolutionDescription})` + : displayResolution, + }; + } + + return data; +} + +export function getDeviceContextData({ + data, + event, + meta, +}: { + data: DeviceContext; + event: Event; + meta?: Record<keyof DeviceContext, any>; +}): KeyValueListData { + return getContextKeys({data: getInferredData(data)}).map(ctxKey => { + switch (ctxKey) { + case DeviceContextKey.NAME: + return { + key: ctxKey, + subject: t('Name'), + value: data.name, + }; + case DeviceContextKey.FAMILY: + return { + key: ctxKey, + subject: t('Family'), + value: data.family, + }; + case DeviceContextKey.MODEL_ID: + return { + key: ctxKey, + subject: t('Model Id'), + value: data.model_id, + }; + case DeviceContextKey.MODEL: + return { + key: ctxKey, + subject: t('Model'), + value: + typeof data.model === 'string' ? ( + <DeviceName + value={`${data.model} ${data?.model_id ? `(${data.model_id})` : ''}`} + /> + ) : undefined, + }; + case DeviceContextKey.CPU_DESCRIPTION: + return { + key: ctxKey, + subject: t('CPU Description'), + value: data.cpu_description, + }; + case DeviceContextKey.ARCH: + return { + key: ctxKey, + subject: t('Architecture'), + value: data.arch, + }; + case DeviceContextKey.BATTERY_LEVEL: + return { + key: ctxKey, + subject: t('Battery Level'), + value: defined(data.battery_level) ? `${data.battery_level}%` : undefined, + }; + case DeviceContextKey.BATTERY_STATUS: + return { + key: ctxKey, + subject: t('Battery Status'), + value: data.battery_status, + }; + case DeviceContextKey.BATTERY_TEMPERATURE: + return { + key: ctxKey, + subject: t('Battery Temperature (°C)'), + value: data.battery_temperature, + }; + case DeviceContextKey.ORIENTATION: + return { + key: ctxKey, + subject: t('Orientation'), + value: data.orientation, + }; + case 'memory': + const {memory_size, free_memory, usable_memory} = data; + return { + key: ctxKey, + subject: t('Memory'), + value: + memory_size && free_memory && usable_memory + ? formatMemory(memory_size, free_memory, usable_memory) + : undefined, + }; + case 'storage': + const {storage_size, free_storage, external_storage_size, external_free_storage} = + data; + return { + key: ctxKey, + subject: t('Capacity'), + value: + storage_size && free_storage && external_storage_size && external_free_storage + ? formatStorage( + storage_size, + free_storage, + external_storage_size, + external_free_storage + ) + : undefined, + }; + case DeviceContextKey.FREE_STORAGE: { + return { + key: ctxKey, + subject: t('Free Storage'), + value: data.free_storage ? <FileSize bytes={data.free_storage} /> : undefined, + }; + } + case DeviceContextKey.STORAGE_SIZE: { + return { + key: ctxKey, + subject: t('Storage Size'), + value: data.storage_size ? <FileSize bytes={data.storage_size} /> : undefined, + }; + } + case DeviceContextKey.EXTERNAL_STORAGE_SIZE: { + return { + key: ctxKey, + subject: t('External Storage Size'), + value: data.external_storage_size ? ( + <FileSize bytes={data.external_storage_size} /> + ) : undefined, + }; + } + case DeviceContextKey.EXTERNAL_FREE_STORAGE: { + return { + key: ctxKey, + subject: t('External Free Storage'), + value: data.external_free_storage ? ( + <FileSize bytes={data.external_free_storage} /> + ) : undefined, + }; + } + case DeviceContextKey.SIMULATOR: + return { + key: ctxKey, + subject: t('Simulator'), + value: data.simulator, + }; + case DeviceContextKey.BOOT_TIME: + return { + key: ctxKey, + subject: t('Boot Time'), + value: getRelativeTimeFromEventDateCreated( + event.dateCreated ? event.dateCreated : event.dateReceived, + data.boot_time + ), + }; + case DeviceContextKey.DEVICE_TYPE: + return { + key: ctxKey, + subject: t('Device Type'), + value: data.device_type, + }; + case DeviceContextKey.BRAND: + return { + key: ctxKey, + subject: t('Brand'), + value: data.brand, + }; + case DeviceContextKey.CHARGING: + return { + key: ctxKey, + subject: t('Charging'), + value: data.charging, + }; + case DeviceContextKey.LOW_MEMORY: + return { + key: ctxKey, + subject: t('Low Memory'), + value: data.low_memory, + }; + case DeviceContextKey.FREE_MEMORY: + return { + key: ctxKey, + subject: t('Free Memory'), + value: data.free_memory ? <FileSize bytes={data.free_memory} /> : undefined, + }; + case DeviceContextKey.MEMORY_SIZE: + return { + key: ctxKey, + subject: t('Memory Size'), + value: data.memory_size ? <FileSize bytes={data.memory_size} /> : undefined, + }; + case DeviceContextKey.USABLE_MEMORY: + return { + key: ctxKey, + subject: t('Usable Memory'), + value: data.usable_memory ? <FileSize bytes={data.usable_memory} /> : undefined, + }; + case DeviceContextKey.MANUFACTURER: + return { + key: ctxKey, + subject: t('Manufacturer'), + value: data.manufacturer, + }; + case DeviceContextKey.ONLINE: + return { + key: ctxKey, + subject: t('Online'), + value: data.online, + }; + case DeviceContextKey.SCREEN_DENSITY: + return { + key: ctxKey, + subject: t('Screen Density'), + value: data.screen_density, + }; + case DeviceContextKey.SCREEN_DPI: + return { + key: ctxKey, + subject: t('Screen DPI'), + value: data.screen_dpi, + }; + case DeviceContextKey.SCREEN_HEIGHT_PIXELS: + return { + key: ctxKey, + subject: t('Screen Height Pixels'), + value: data.screen_height_pixels, + }; + case DeviceContextKey.SCREEN_RESOLUTION: + return { + key: ctxKey, + subject: t('Screen Resolution'), + value: data.screen_resolution, + }; + case DeviceContextKey.SCREEN_WIDTH_PIXELS: + return { + key: ctxKey, + subject: t('Screen Width Pixels'), + value: data.screen_width_pixels, + }; + + default: + return { + key: ctxKey, + subject: ctxKey, + value: data[ctxKey], + meta: meta?.[ctxKey]?.[''], + }; + } + }); +} diff --git a/static/app/components/events/contexts/knownContext/user.spec.tsx b/static/app/components/events/contexts/knownContext/user.spec.tsx new file mode 100644 index 00000000000000..8ed1b952ffeb2f --- /dev/null +++ b/static/app/components/events/contexts/knownContext/user.spec.tsx @@ -0,0 +1,93 @@ +import {EventFixture} from 'sentry-fixture/event'; + +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import ContextCard from 'sentry/components/events/contexts/contextCard'; +import { + getUserContextData, + type UserContext, +} from 'sentry/components/events/contexts/knownContext/user'; + +const MOCK_USER_CONTEXT: UserContext = { + email: 'leander.rodrigues@sentry.io', + ip_address: '127.0.0.1', + id: '808', + name: 'Leander', + username: 'leeandher', + geo: { + country_code: 'US', + city: 'San Francisco', + subdivision: 'California', + region: 'United States', + }, + // Extra data is still valid and preserved + extra_data: 'something', + unknown_key: 123, +}; + +const MOCK_REDACTION = { + name: { + '': { + rem: [['organization:0', 's', 0, 0]], + len: 5, + }, + }, +}; + +describe('UserContext', function () { + it('returns values and according to the parameters', function () { + expect(getUserContextData({data: MOCK_USER_CONTEXT})).toEqual([ + { + key: 'email', + subject: 'Email', + value: 'leander.rodrigues@sentry.io', + action: {link: 'mailto:leander.rodrigues@sentry.io'}, + }, + {key: 'ip_address', subject: 'IP Address', value: '127.0.0.1'}, + {key: 'id', subject: 'ID', value: '808'}, + {key: 'name', subject: 'Name', value: 'Leander'}, + {key: 'username', subject: 'Username', value: 'leeandher'}, + { + key: 'geo', + subject: 'Geography', + value: 'San Francisco, California, United States (US)', + }, + { + key: 'extra_data', + subject: 'extra_data', + value: 'something', + meta: undefined, + }, + { + key: 'unknown_key', + subject: 'unknown_key', + value: 123, + meta: undefined, + }, + ]); + }); + + it('renders with meta annotations correctly', function () { + const event = EventFixture({ + _meta: {contexts: {user: MOCK_REDACTION}}, + }); + + render( + <ContextCard + event={event} + type={'default'} + alias={'user'} + value={{...MOCK_USER_CONTEXT, name: ''}} + /> + ); + + expect(screen.getByText('User')).toBeInTheDocument(); + expect(screen.getByText('Email')).toBeInTheDocument(); + expect(screen.getByText('leander.rodrigues@sentry.io')).toBeInTheDocument(); + expect( + screen.getByRole('link', {name: 'leander.rodrigues@sentry.io'}) + ).toBeInTheDocument(); + expect(screen.getByText('Name')).toBeInTheDocument(); + expect(screen.getByText(/redacted/)).toBeInTheDocument(); + }); +}); diff --git a/static/app/components/events/contexts/knownContext/user.tsx b/static/app/components/events/contexts/knownContext/user.tsx new file mode 100644 index 00000000000000..83b6e69055ce7a --- /dev/null +++ b/static/app/components/events/contexts/knownContext/user.tsx @@ -0,0 +1,127 @@ +import {getContextKeys} from 'sentry/components/events/contexts/utils'; +import {t} from 'sentry/locale'; +import type {KeyValueListData} from 'sentry/types/group'; +import {defined} from 'sentry/utils'; + +enum UserContextKeys { + ID = 'id', + EMAIL = 'email', + USERNAME = 'username', + IP_ADDRESS = 'ip_address', + NAME = 'name', + GEO = 'geo', +} + +export interface UserContext { + // Any custom keys users may set + [key: string]: any; + [UserContextKeys.ID]?: string; + [UserContextKeys.EMAIL]?: string; + [UserContextKeys.USERNAME]?: string; + [UserContextKeys.IP_ADDRESS]?: string; + [UserContextKeys.NAME]?: string; + [UserContextKeys.GEO]?: Partial<Record<UserContextGeoKeys, string>>; +} + +enum UserContextGeoKeys { + CITY = 'city', + COUNTRY_CODE = 'country_code', + SUBDIVISION = 'subdivision', + REGION = 'region', +} + +const EMAIL_REGEX = /[^@]+@[^\.]+\..+/; + +function formatGeo(geoData: UserContext['geo'] = {}): string | undefined { + if (!geoData) { + return undefined; + } + + const geoStringArray: string[] = []; + + if (geoData.city) { + geoStringArray.push(geoData.city); + } + + if (geoData.subdivision) { + geoStringArray.push(geoData.subdivision); + } + + if (geoData.region) { + geoStringArray.push( + geoData.country_code + ? `${geoData.region} (${geoData.country_code})` + : geoData.region + ); + } + + return geoStringArray.join(', '); +} + +export function getUserContextData({ + data, + meta, +}: { + data: UserContext; + meta?: Record<keyof UserContext, any>; +}): KeyValueListData { + return ( + getContextKeys({data}) + .map(ctxKey => { + switch (ctxKey) { + case UserContextKeys.NAME: + return { + key: ctxKey, + subject: t('Name'), + value: data.name, + }; + case UserContextKeys.USERNAME: + return { + key: ctxKey, + subject: t('Username'), + value: data.username, + }; + case UserContextKeys.ID: + return { + key: ctxKey, + subject: t('ID'), + value: data.id, + }; + case UserContextKeys.IP_ADDRESS: + return { + key: ctxKey, + subject: t('IP Address'), + value: data.ip_address, + }; + case UserContextKeys.EMAIL: + return { + key: ctxKey, + subject: t('Email'), + value: data.email, + action: { + link: + defined(data.email) && EMAIL_REGEX.test(data.email) + ? `mailto:${data.email}` + : undefined, + }, + }; + case UserContextKeys.GEO: + return { + key: ctxKey, + subject: t('Geography'), + value: formatGeo(data.geo), + }; + default: + return { + key: ctxKey, + subject: ctxKey, + value: data[ctxKey], + meta: meta?.[ctxKey]?.[''], + }; + } + }) + // Since user context is generated separately from the rest, it has all known keys with those + // unset appearing as `null`. We want to omit those unless they have annotations. + .filter(item => defined(item.value) || defined(meta?.[item.key])) + ); +} diff --git a/static/app/components/events/contexts/platform/index.spec.tsx b/static/app/components/events/contexts/platform/index.spec.tsx deleted file mode 100644 index 8a701a75a2dd76..00000000000000 --- a/static/app/components/events/contexts/platform/index.spec.tsx +++ /dev/null @@ -1,40 +0,0 @@ -import {EventFixture} from 'sentry-fixture/event'; -import {GroupFixture} from 'sentry-fixture/group'; -import {OrganizationFixture} from 'sentry-fixture/organization'; -import {ProjectFixture} from 'sentry-fixture/project'; - -import {render, screen} from 'sentry-test/reactTestingLibrary'; - -import ContextCard from 'sentry/components/events/contexts/contextCard'; - -describe('platform event context', function () { - const platformContexts = { - laravel: { - type: 'default', - some: 'value', - number: 123, - }, - }; - const organization = OrganizationFixture(); - const event = EventFixture({contexts: platformContexts}); - const group = GroupFixture(); - const project = ProjectFixture(); - - it('renders laravel context', function () { - const alias = 'laravel'; - render( - <ContextCard - type={platformContexts[alias].type} - alias={alias} - value={platformContexts[alias]} - event={event} - group={group} - project={project} - />, - {organization} - ); - - expect(screen.getByText('Laravel Context')).toBeInTheDocument(); - expect(screen.getByTestId(`${alias}-context-icon`)).toBeInTheDocument(); - }); -}); diff --git a/static/app/components/events/contexts/platform/index.tsx b/static/app/components/events/contexts/platform/index.tsx deleted file mode 100644 index e24415544fc6eb..00000000000000 --- a/static/app/components/events/contexts/platform/index.tsx +++ /dev/null @@ -1,78 +0,0 @@ -import {PlatformIcon} from 'platformicons'; - -import {getKnownData, getUnknownData} from 'sentry/components/events/contexts/utils'; -import type {IconSize} from 'sentry/utils/theme'; - -/** - * Mapping of platform to known context keys for platform-specific context. - */ -const KNOWN_PLATFORM_CONTEXT_KEYS: Record<string, string[]> = { - laravel: [], -}; - -export const KNOWN_PLATFORM_CONTEXTS = new Set(Object.keys(KNOWN_PLATFORM_CONTEXT_KEYS)); - -interface PlatformContextProps { - data: Record<string, any>; - platform: string; - meta?: Record<string, any>; -} - -enum PlatformContextKeys {} - -export function getKnownPlatformContextData({ - platform, - data, - meta, -}: PlatformContextProps) { - return getKnownData<PlatformContextProps['data'], PlatformContextKeys>({ - data, - meta, - knownDataTypes: KNOWN_PLATFORM_CONTEXT_KEYS[platform] ?? [], - onGetKnownDataDetails: () => { - switch (platform) { - default: - return undefined; - } - }, - }); -} - -export function getUnknownPlatformContextData({ - platform, - data, - meta, -}: PlatformContextProps) { - return getUnknownData({ - allData: data, - knownKeys: KNOWN_PLATFORM_CONTEXT_KEYS[platform] ?? [], - meta, - }); -} - -export function getPlatformContextIcon({ - platform, - size = 'sm', -}: Pick<PlatformContextProps, 'platform'> & { - size?: IconSize; -}) { - let platformIconName = ''; - switch (platform) { - case 'laravel': - platformIconName = 'php-laravel'; - break; - default: - break; - } - - if (platformIconName.length === 0) { - return null; - } - return ( - <PlatformIcon - size={size} - platform={platformIconName} - data-test-id={`${platform}-context-icon`} - /> - ); -} diff --git a/static/app/components/events/contexts/platformContext/laravel.spec.tsx b/static/app/components/events/contexts/platformContext/laravel.spec.tsx new file mode 100644 index 00000000000000..a4fc4935c23412 --- /dev/null +++ b/static/app/components/events/contexts/platformContext/laravel.spec.tsx @@ -0,0 +1,65 @@ +import {EventFixture} from 'sentry-fixture/event'; + +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import ContextCard from 'sentry/components/events/contexts/contextCard'; +import { + getLaravelContextData, + type LaravelContext, +} from 'sentry/components/events/contexts/platformContext/laravel'; + +const MOCK_LARAVEL_CONTEXT: LaravelContext = { + type: 'default', + // No known keys, but extra data is still valid and preserved + extra_data: 'something', + unknown_key: 123, +}; + +const MOCK_REDACTION = { + extra_data: { + '': { + rem: [['organization:0', 's', 0, 0]], + len: 5, + }, + }, +}; + +describe('LaravelContext', function () { + it('returns values and according to the parameters', function () { + expect(getLaravelContextData({data: MOCK_LARAVEL_CONTEXT})).toEqual([ + { + key: 'extra_data', + subject: 'extra_data', + value: 'something', + meta: undefined, + }, + { + key: 'unknown_key', + subject: 'unknown_key', + value: 123, + meta: undefined, + }, + ]); + }); + + it('renders with meta annotations correctly', function () { + const event = EventFixture({ + _meta: {contexts: {laravel: MOCK_REDACTION}}, + }); + + render( + <ContextCard + event={event} + type={'default'} + alias={'laravel'} + value={{...MOCK_LARAVEL_CONTEXT, extra_data: ''}} + /> + ); + + expect(screen.getByText('Laravel Context')).toBeInTheDocument(); + expect(screen.getByText('unknown_key')).toBeInTheDocument(); + expect(screen.getByText('123')).toBeInTheDocument(); + expect(screen.getByText('extra_data')).toBeInTheDocument(); + expect(screen.getByText(/redacted/)).toBeInTheDocument(); + }); +}); diff --git a/static/app/components/events/contexts/platformContext/laravel.tsx b/static/app/components/events/contexts/platformContext/laravel.tsx new file mode 100644 index 00000000000000..7bd120a40def8e --- /dev/null +++ b/static/app/components/events/contexts/platformContext/laravel.tsx @@ -0,0 +1,15 @@ +import {getContextKeys} from 'sentry/components/events/contexts/utils'; +import type {KeyValueListData} from 'sentry/types/group'; + +export interface LaravelContext { + // Any custom keys users may set + [key: string]: any; +} + +export function getLaravelContextData({data}: {data: LaravelContext}): KeyValueListData { + return getContextKeys({data}).map(ctxKey => ({ + key: ctxKey, + subject: ctxKey, + value: data[ctxKey], + })); +} diff --git a/static/app/components/events/contexts/platformContext/react.spec.tsx b/static/app/components/events/contexts/platformContext/react.spec.tsx new file mode 100644 index 00000000000000..c82b8872b30fc8 --- /dev/null +++ b/static/app/components/events/contexts/platformContext/react.spec.tsx @@ -0,0 +1,71 @@ +import {EventFixture} from 'sentry-fixture/event'; + +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import ContextCard from 'sentry/components/events/contexts/contextCard'; +import { + getReactContextData, + type ReactContext, +} from 'sentry/components/events/contexts/platformContext/react'; + +const MOCK_REACT_CONTEXT: ReactContext = { + type: 'default', + version: '17.0.2', + // Extra data is still valid and preserved + extra_data: 'something', + unknown_key: 123, +}; + +const MOCK_REDACTION = { + version: { + '': { + rem: [['organization:0', 's', 0, 0]], + len: 5, + }, + }, +}; + +describe('ReactContext', function () { + it('returns values and according to the parameters', function () { + expect(getReactContextData({data: MOCK_REACT_CONTEXT})).toEqual([ + { + key: 'version', + subject: 'Version', + value: '17.0.2', + }, + { + key: 'extra_data', + subject: 'extra_data', + value: 'something', + meta: undefined, + }, + { + key: 'unknown_key', + subject: 'unknown_key', + value: 123, + meta: undefined, + }, + ]); + }); + + it('renders with meta annotations correctly', function () { + const event = EventFixture({ + _meta: {contexts: {react: MOCK_REDACTION}}, + }); + + render( + <ContextCard + event={event} + type={'default'} + alias={'react'} + value={{...MOCK_REACT_CONTEXT, version: ''}} + /> + ); + + expect(screen.getByText('React')).toBeInTheDocument(); + expect(screen.getByText('unknown_key')).toBeInTheDocument(); + expect(screen.getByText('123')).toBeInTheDocument(); + expect(screen.getByText('Version')).toBeInTheDocument(); + expect(screen.getByText(/redacted/)).toBeInTheDocument(); + }); +}); diff --git a/static/app/components/events/contexts/platformContext/react.tsx b/static/app/components/events/contexts/platformContext/react.tsx new file mode 100644 index 00000000000000..0b4d1fa861fee5 --- /dev/null +++ b/static/app/components/events/contexts/platformContext/react.tsx @@ -0,0 +1,32 @@ +import {getContextKeys} from 'sentry/components/events/contexts/utils'; +import {t} from 'sentry/locale'; +import type {KeyValueListData} from 'sentry/types/group'; + +enum ReactContextKeys { + VERSION = 'version', +} + +export interface ReactContext { + // Any custom keys users may set + [key: string]: any; + [ReactContextKeys.VERSION]: string; +} + +export function getReactContextData({data}: {data: ReactContext}): KeyValueListData { + return getContextKeys({data}).map(ctxKey => { + switch (ctxKey) { + case ReactContextKeys.VERSION: + return { + key: ctxKey, + subject: t('Version'), + value: data.version, + }; + default: + return { + key: ctxKey, + subject: ctxKey, + value: data[ctxKey], + }; + } + }); +} diff --git a/static/app/components/events/contexts/platformContext/unity.spec.tsx b/static/app/components/events/contexts/platformContext/unity.spec.tsx new file mode 100644 index 00000000000000..5ef900a88519ee --- /dev/null +++ b/static/app/components/events/contexts/platformContext/unity.spec.tsx @@ -0,0 +1,77 @@ +import {EventFixture} from 'sentry-fixture/event'; + +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import ContextCard from 'sentry/components/events/contexts/contextCard'; +import {getUnityContextData} from 'sentry/components/events/contexts/platformContext/unity'; + +export const MOCK_UNITY_CONTEXT = { + type: 'unity' as const, + copy_texture_support: 'Basic, Copy3D, DifferentTypes, TextureToRT, RTToTexture', + editor_version: '2022.1.23f1', + install_mode: 'Store', + rendering_threading_mode: 'LegacyJobified', + target_frame_rate: '-1', + // Extra data is still valid and preserved + extra_data: 'something', + unknown_key: 123, +}; + +export const MOCK_REDACTION = { + install_mode: { + '': { + rem: [['organization:0', 'x']], + }, + }, +}; + +describe('UnityContext', function () { + it('returns values and according to the parameters', function () { + expect(getUnityContextData({data: MOCK_UNITY_CONTEXT})).toEqual([ + { + key: 'copy_texture_support', + subject: 'Copy Texture Support', + value: 'Basic, Copy3D, DifferentTypes, TextureToRT, RTToTexture', + }, + { + key: 'editor_version', + subject: 'Editor Version', + value: '2022.1.23f1', + }, + {key: 'install_mode', subject: 'Install Mode', value: 'Store'}, + { + key: 'rendering_threading_mode', + subject: 'Rendering Threading Mode', + value: 'LegacyJobified', + }, + { + key: 'target_frame_rate', + subject: 'Target Frame Rate', + value: '-1', + }, + {key: 'extra_data', subject: 'extra_data', value: 'something'}, + {key: 'unknown_key', subject: 'unknown_key', value: 123}, + ]); + }); + + it('renders with meta annotations correctly', function () { + const event = EventFixture({ + _meta: {contexts: {unity: MOCK_REDACTION}}, + }); + + render( + <ContextCard + event={event} + type={'unity'} + alias={'unity'} + value={{...MOCK_UNITY_CONTEXT, install_mode: ''}} + /> + ); + + expect(screen.getByText('Unity')).toBeInTheDocument(); + expect(screen.getByText('Editor Version')).toBeInTheDocument(); + expect(screen.getByText('2022.1.23f1')).toBeInTheDocument(); + expect(screen.getByText('Install Mode')).toBeInTheDocument(); + expect(screen.getByText(/redacted/)).toBeInTheDocument(); + }); +}); diff --git a/static/app/components/events/contexts/platformContext/unity.tsx b/static/app/components/events/contexts/platformContext/unity.tsx new file mode 100644 index 00000000000000..99e3efecec03dd --- /dev/null +++ b/static/app/components/events/contexts/platformContext/unity.tsx @@ -0,0 +1,47 @@ +import {getContextKeys} from 'sentry/components/events/contexts/utils'; +import {t} from 'sentry/locale'; +import {type UnityContext, UnityContextKey} from 'sentry/types/event'; +import type {KeyValueListData} from 'sentry/types/group'; + +export function getUnityContextData({data}: {data: UnityContext}): KeyValueListData { + return getContextKeys({data}).map(ctxKey => { + switch (ctxKey) { + case UnityContextKey.COPY_TEXTURE_SUPPORT: + return { + key: ctxKey, + subject: t('Copy Texture Support'), + value: data.copy_texture_support, + }; + case UnityContextKey.EDITOR_VERSION: + return { + key: ctxKey, + subject: t('Editor Version'), + value: data.editor_version, + }; + case UnityContextKey.INSTALL_MODE: + return { + key: ctxKey, + subject: t('Install Mode'), + value: data.install_mode, + }; + case UnityContextKey.RENDERING_THREADING_MODE: + return { + key: ctxKey, + subject: t('Rendering Threading Mode'), + value: data.rendering_threading_mode, + }; + case UnityContextKey.TARGET_FRAME_RATE: + return { + key: ctxKey, + subject: t('Target Frame Rate'), + value: data.target_frame_rate, + }; + default: + return { + key: ctxKey, + subject: ctxKey, + value: data[ctxKey], + }; + } + }); +} diff --git a/static/app/components/events/contexts/platformContext/utils.tsx b/static/app/components/events/contexts/platformContext/utils.tsx new file mode 100644 index 00000000000000..32978c4cadd50c --- /dev/null +++ b/static/app/components/events/contexts/platformContext/utils.tsx @@ -0,0 +1,88 @@ +import {PlatformIcon} from 'platformicons'; + +import {getLaravelContextData} from 'sentry/components/events/contexts/platformContext/laravel'; +import {getReactContextData} from 'sentry/components/events/contexts/platformContext/react'; +import {getUnityContextData} from 'sentry/components/events/contexts/platformContext/unity'; +import {getContextKeys} from 'sentry/components/events/contexts/utils'; +import {t} from 'sentry/locale'; +import type {KeyValueListData} from 'sentry/types/group'; +import type {IconSize} from 'sentry/utils/theme'; + +export enum PlatformContextKeys { + LARAVEL = 'laravel', + REACT = 'react', + UNITY = 'unity', +} + +export const PLATFORM_CONTEXT_KEYS = new Set<string>(Object.values(PlatformContextKeys)); + +export function getPlatformContextTitle({platform}: {platform: string}): string { + switch (platform) { + case PlatformContextKeys.LARAVEL: + return t('Laravel Context'); + case PlatformContextKeys.REACT: + return 'React'; + case PlatformContextKeys.UNITY: + return 'Unity'; + default: + return platform; + } +} + +export function getPlatformContextIcon({ + platform, + size = 'sm', +}: { + platform: string; + size?: IconSize; +}) { + let platformIconName = ''; + switch (platform) { + case PlatformContextKeys.LARAVEL: + platformIconName = 'php-laravel'; + break; + case PlatformContextKeys.REACT: + platformIconName = 'javascript-react'; + break; + case PlatformContextKeys.UNITY: + platformIconName = 'unity'; + break; + default: + break; + } + + if (platformIconName.length === 0) { + return null; + } + + return ( + <PlatformIcon + size={size} + platform={platformIconName} + data-test-id={`${platform}-context-icon`} + /> + ); +} + +export function getPlatformContextData({ + platform, + data, +}: { + data: any; + platform: string; +}): KeyValueListData { + switch (platform) { + case PlatformContextKeys.LARAVEL: + return getLaravelContextData({data}); + case PlatformContextKeys.REACT: + return getReactContextData({data}); + case PlatformContextKeys.UNITY: + return getUnityContextData({data}); + default: + return getContextKeys({data}).map(ctxKey => ({ + key: ctxKey, + subject: ctxKey, + value: data[ctxKey], + })); + } +} diff --git a/static/app/components/events/contexts/unity/getUnityKnownDataDetails.spec.tsx b/static/app/components/events/contexts/unity/getUnityKnownDataDetails.spec.tsx deleted file mode 100644 index 1cbfc24ef10343..00000000000000 --- a/static/app/components/events/contexts/unity/getUnityKnownDataDetails.spec.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import { - getUnityKnownDataDetails, - unityKnownDataValues, -} from 'sentry/components/events/contexts/unity/getUnityKnownDataDetails'; - -import {unityMockData} from './index.spec'; - -describe('getUnityKnownDataDetails', function () { - it('returns values and according to the parameters', function () { - const allKnownData: ReturnType<typeof getUnityKnownDataDetails>[] = []; - - for (const type of Object.keys(unityKnownDataValues)) { - const threadPoolInfoKnownData = getUnityKnownDataDetails({ - type: unityKnownDataValues[type], - data: unityMockData, - }); - - if (!threadPoolInfoKnownData) { - continue; - } - - allKnownData.push(threadPoolInfoKnownData); - } - - expect(allKnownData).toEqual([ - { - subject: 'Copy Texture Support', - value: 'Basic, Copy3D, DifferentTypes, TextureToRT, RTToTexture', - }, - {subject: 'Editor Version', value: '2022.1.23f1'}, - {subject: 'Install Mode', value: 'Store'}, - {subject: 'Rendering Threading Mode', value: 'LegacyJobified'}, - {subject: 'Target Frame Rate', value: '-1'}, - ]); - }); -}); diff --git a/static/app/components/events/contexts/unity/getUnityKnownDataDetails.tsx b/static/app/components/events/contexts/unity/getUnityKnownDataDetails.tsx deleted file mode 100644 index 7a13abf0e28247..00000000000000 --- a/static/app/components/events/contexts/unity/getUnityKnownDataDetails.tsx +++ /dev/null @@ -1,43 +0,0 @@ -import type {KnownDataDetails} from 'sentry/components/events/contexts/utils'; -import {t} from 'sentry/locale'; -import type {UnityContext} from 'sentry/types/event'; -import {UnityContextKey} from 'sentry/types/event'; - -export const unityKnownDataValues = Object.values(UnityContextKey); - -type Props = { - data: UnityContext; - type: (typeof unityKnownDataValues)[number]; -}; - -export function getUnityKnownDataDetails({data, type}: Props): KnownDataDetails { - switch (type) { - case UnityContextKey.COPY_TEXTURE_SUPPORT: - return { - subject: t('Copy Texture Support'), - value: data.copy_texture_support, - }; - case UnityContextKey.EDITOR_VERSION: - return { - subject: t('Editor Version'), - value: data.editor_version, - }; - case UnityContextKey.INSTALL_MODE: - return { - subject: t('Install Mode'), - value: data.install_mode, - }; - case UnityContextKey.RENDERING_THREADING_MODE: - return { - subject: t('Rendering Threading Mode'), - value: data.rendering_threading_mode, - }; - case UnityContextKey.TARGET_FRAME_RATE: - return { - subject: t('Target Frame Rate'), - value: data.target_frame_rate, - }; - default: - return undefined; - } -} diff --git a/static/app/components/events/contexts/unity/index.spec.tsx b/static/app/components/events/contexts/unity/index.spec.tsx deleted file mode 100644 index 8ea0c4ba065813..00000000000000 --- a/static/app/components/events/contexts/unity/index.spec.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import {EventFixture} from 'sentry-fixture/event'; - -import {render, screen} from 'sentry-test/reactTestingLibrary'; - -import {UnityEventContext} from 'sentry/components/events/contexts/unity'; -import type {UnityContext} from 'sentry/types/event'; - -export const unityMockData: UnityContext = { - copy_texture_support: 'Basic, Copy3D, DifferentTypes, TextureToRT, RTToTexture', - editor_version: '2022.1.23f1', - install_mode: 'Store', - rendering_threading_mode: 'LegacyJobified', - target_frame_rate: '-1', - type: 'unity', -}; - -export const unityMetaMockData = { - '': { - rem: [['organization:0', 'x']], - }, -}; - -const event = EventFixture({ - _meta: { - contexts: { - unity: unityMetaMockData, - }, - }, -}); - -describe('unity event context', function () { - it('display redacted data', function () { - render(<UnityEventContext event={event} data={null} />); - expect(screen.queryByText('Unity')).not.toBeInTheDocument(); - }); -}); diff --git a/static/app/components/events/contexts/unity/index.tsx b/static/app/components/events/contexts/unity/index.tsx deleted file mode 100644 index 3909903d8adbcd..00000000000000 --- a/static/app/components/events/contexts/unity/index.tsx +++ /dev/null @@ -1,59 +0,0 @@ -import {Fragment} from 'react'; - -import ContextBlock from 'sentry/components/events/contexts/contextBlock'; -import type {Event, UnityContext} from 'sentry/types/event'; - -import { - getContextMeta, - getKnownData, - getKnownStructuredData, - getUnknownData, -} from '../utils'; - -import {getUnityKnownDataDetails, unityKnownDataValues} from './getUnityKnownDataDetails'; - -type Props = { - data: UnityContext | null; - event: Event; - meta?: Record<string, any>; -}; - -export function getKnownUnityContextData({data, meta}: Pick<Props, 'data' | 'meta'>) { - if (!data) { - return []; - } - return getKnownData<UnityContext, (typeof unityKnownDataValues)[number]>({ - data, - meta, - knownDataTypes: unityKnownDataValues, - onGetKnownDataDetails: v => getUnityKnownDataDetails(v), - }); -} - -export function getUnknownUnityContextData({data, meta}: Pick<Props, 'data' | 'meta'>) { - if (!data) { - return []; - } - return getUnknownData({ - allData: data, - knownKeys: unityKnownDataValues, - meta, - }); -} - -export function UnityEventContext({data, event, meta: propsMeta}: Props) { - if (!data) { - return null; - } - const meta = propsMeta ?? getContextMeta(event, 'unity'); - const knownData = getKnownUnityContextData({data, meta}); - const knownStructuredData = getKnownStructuredData(knownData, meta); - const unknownData = getUnknownUnityContextData({data, meta}); - - return ( - <Fragment> - <ContextBlock data={knownStructuredData} /> - <ContextBlock data={unknownData} /> - </Fragment> - ); -} diff --git a/static/app/components/events/contexts/user/getUserKnownDataDetails.spec.tsx b/static/app/components/events/contexts/user/getUserKnownDataDetails.spec.tsx deleted file mode 100644 index 21de3b75fd4889..00000000000000 --- a/static/app/components/events/contexts/user/getUserKnownDataDetails.spec.tsx +++ /dev/null @@ -1,47 +0,0 @@ -import {userKnownDataValues} from 'sentry/components/events/contexts/user'; -import {getUserKnownDataDetails} from 'sentry/components/events/contexts/user/getUserKnownDataDetails'; - -import {userMockData} from './index.spec'; - -describe('getUserKnownDataDetails', function () { - it('returns values and according to the parameters', function () { - const allKnownData: ReturnType<typeof getUserKnownDataDetails>[] = []; - - for (const type of Object.keys(userKnownDataValues)) { - const userKnownData = getUserKnownDataDetails({ - type: userKnownDataValues[type], - data: userMockData, - }); - - if (!userKnownData) { - continue; - } - - allKnownData.push(userKnownData); - } - - expect(allKnownData).toEqual([ - { - subject: 'ID', - value: '', - }, - { - subject: 'Email', - subjectIcon: false, - value: null, - }, - { - subject: 'Username', - value: null, - }, - { - subject: 'IP Address', - value: null, - }, - { - subject: 'Name', - value: null, - }, - ]); - }); -}); diff --git a/static/app/components/events/contexts/user/getUserKnownDataDetails.tsx b/static/app/components/events/contexts/user/getUserKnownDataDetails.tsx deleted file mode 100644 index c9fd144f5b4dba..00000000000000 --- a/static/app/components/events/contexts/user/getUserKnownDataDetails.tsx +++ /dev/null @@ -1,58 +0,0 @@ -import styled from '@emotion/styled'; - -import type {KnownDataDetails} from 'sentry/components/events/contexts/utils'; -import ExternalLink from 'sentry/components/links/externalLink'; -import {IconMail} from 'sentry/icons'; -import {t} from 'sentry/locale'; -import {defined} from 'sentry/utils'; - -import type {UserEventContextData} from '.'; -import {UserKnownDataType} from '.'; - -const EMAIL_REGEX = /[^@]+@[^\.]+\..+/; - -type Props = { - data: UserEventContextData; - type: UserKnownDataType; -}; - -export function getUserKnownDataDetails({data, type}: Props): KnownDataDetails { - switch (type) { - case UserKnownDataType.NAME: - return { - subject: t('Name'), - value: data.name, - }; - case UserKnownDataType.USERNAME: - return { - subject: t('Username'), - value: data.username, - }; - case UserKnownDataType.ID: - return { - subject: t('ID'), - value: data.id, - }; - case UserKnownDataType.IP_ADDRESS: - return { - subject: t('IP Address'), - value: data.ip_address, - }; - case UserKnownDataType.EMAIL: - return { - subject: t('Email'), - value: data.email, - subjectIcon: defined(data.email) && EMAIL_REGEX.test(data.email) && ( - <ExternalLink href={`mailto:${data.email}`} className="external-icon"> - <StyledIconMail size="xs" /> - </ExternalLink> - ), - }; - default: - return undefined; - } -} - -const StyledIconMail = styled(IconMail)` - vertical-align: middle; -`; diff --git a/static/app/components/events/contexts/user/index.spec.tsx b/static/app/components/events/contexts/user/index.spec.tsx deleted file mode 100644 index 9b81a3aa0c87d9..00000000000000 --- a/static/app/components/events/contexts/user/index.spec.tsx +++ /dev/null @@ -1,93 +0,0 @@ -import {EventFixture} from 'sentry-fixture/event'; - -import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; -import {textWithMarkupMatcher} from 'sentry-test/utils'; - -import type {UserEventContextData} from 'sentry/components/events/contexts/user'; -import {UserEventContext} from 'sentry/components/events/contexts/user'; - -// the values of this mock are correct and the types need to be updated -export const userMockData = { - data: null, - email: null, - id: '', - ip_address: null, - name: null, - username: null, -} as unknown as UserEventContextData; - -export const userMetaMockData = { - id: { - '': { - chunks: [ - { - remark: 'x', - rule_id: 'project:0', - text: '', - type: 'redaction', - }, - ], - len: 9, - rem: [['project:0', 'x', 0, 0]], - }, - }, - ip_address: { - '': { - err: [ - [ - 'invalid_data', - { - reason: 'expected an ip address', - }, - ], - ], - len: 14, - rem: [['project:0', 'x', 0, 0]], - val: '', - }, - }, -}; - -const event = { - ...EventFixture(), - _meta: { - user: userMetaMockData, - }, -}; - -describe('user event context', function () { - it('display redacted data', async function () { - render(<UserEventContext event={event} data={userMockData} />); - - expect(screen.getByText('ID')).toBeInTheDocument(); // subject - expect(screen.getByText(/redacted/)).toBeInTheDocument(); // value - await userEvent.hover(screen.getByText(/redacted/)); - expect( - await screen.findByText( - textWithMarkupMatcher( - "Removed because of a data scrubbing rule in your project's settings" - ) // Fall back case - ) - ).toBeInTheDocument(); // tooltip description - - expect(screen.getByText('IP Address')).toBeInTheDocument(); // subject - await userEvent.hover(document.body); - expect(screen.getByText('null')).toBeInTheDocument(); // value - await userEvent.hover(screen.getByText('null')); - - // The content of the first tooltip is not removed from the DOM when it is hidden - // therefore we explicitly need to wait for both tooltips to be visible - // Fixes race condition that causes flakiness https://sentry.sentry.io/issues/3974475742/?project=4857230 - await waitFor(() => { - const tooltips = screen.getAllByText( - textWithMarkupMatcher( - "Removed because of a data scrubbing rule in your project's settings" - ) - ); - - expect(tooltips).toHaveLength(2); - expect(tooltips[1]).toBeInTheDocument(); - expect(tooltips[1]).toBeInTheDocument(); - }); - }); -}); diff --git a/static/app/components/events/contexts/user/index.tsx b/static/app/components/events/contexts/user/index.tsx deleted file mode 100644 index 64a8668bfd5432..00000000000000 --- a/static/app/components/events/contexts/user/index.tsx +++ /dev/null @@ -1,98 +0,0 @@ -import UserAvatar from 'sentry/components/avatar/userAvatar'; -import ErrorBoundary from 'sentry/components/errorBoundary'; -import ContextBlock from 'sentry/components/events/contexts/contextBlock'; -import KeyValueList from 'sentry/components/events/interfaces/keyValueList'; -import {userContextToActor} from 'sentry/components/events/interfaces/utils'; -import type {Event} from 'sentry/types/event'; -import type {AvatarUser} from 'sentry/types/user'; -import {defined} from 'sentry/utils'; - -import { - getContextMeta, - getKnownData, - getKnownStructuredData, - getUnknownData, -} from '../utils'; - -import {getUserKnownDataDetails} from './getUserKnownDataDetails'; - -export type UserEventContextData = { - data: Record<string, string>; -} & AvatarUser; - -type Props = { - data: UserEventContextData; - event: Event; - meta?: Record<string, any>; -}; - -export enum UserKnownDataType { - ID = 'id', - EMAIL = 'email', - USERNAME = 'username', - IP_ADDRESS = 'ip_address', - NAME = 'name', -} - -export enum UserIgnoredDataType { - DATA = 'data', -} - -export const userKnownDataValues = [ - UserKnownDataType.ID, - UserKnownDataType.EMAIL, - UserKnownDataType.USERNAME, - UserKnownDataType.IP_ADDRESS, - UserKnownDataType.NAME, -]; - -const userIgnoredDataValues = [UserIgnoredDataType.DATA]; - -export function getKnownUserContextData({data, meta}: Pick<Props, 'data' | 'meta'>) { - return getKnownData<UserEventContextData, UserKnownDataType>({ - data, - meta, - knownDataTypes: userKnownDataValues, - onGetKnownDataDetails: v => getUserKnownDataDetails(v), - }).map(v => ({ - ...v, - subjectDataTestId: `user-context-${v.key.toLowerCase()}-value`, - })); -} - -export function getUnknownUserContextData({data, meta}: Pick<Props, 'data' | 'meta'>) { - return getUnknownData({ - allData: data, - knownKeys: [...userKnownDataValues, ...userIgnoredDataValues], - meta, - }); -} -export function UserEventContext({data, event, meta: propsMeta}: Props) { - const meta = propsMeta ?? getContextMeta(event, 'user'); - const knownData = getKnownUserContextData({data, meta}); - const knownStructuredData = getKnownStructuredData(knownData, meta); - const unknownData = getUnknownUserContextData({data, meta}); - - return ( - <div className="user-widget"> - <div className="pull-left"> - <UserAvatar user={userContextToActor(data)} size={48} gravatar={false} /> - </div> - <ContextBlock data={knownStructuredData} /> - <ContextBlock data={unknownData} /> - {defined(data?.data) && ( - <ErrorBoundary mini> - <KeyValueList - data={Object.entries(data.data).map(([key, value]) => ({ - key, - value, - subject: key, - meta: meta[key]?.[''], - }))} - isContextData - /> - </ErrorBoundary> - )} - </div> - ); -} diff --git a/static/app/components/events/contexts/utils.tsx b/static/app/components/events/contexts/utils.tsx index 79e773fca652f4..8ad210103a7935 100644 --- a/static/app/components/events/contexts/utils.tsx +++ b/static/app/components/events/contexts/utils.tsx @@ -15,6 +15,7 @@ import {getAppContextData} from 'sentry/components/events/contexts/knownContext/ import {getBrowserContextData} from 'sentry/components/events/contexts/knownContext/browser'; import {getCloudResourceContextData} from 'sentry/components/events/contexts/knownContext/cloudResource'; import {getCultureContextData} from 'sentry/components/events/contexts/knownContext/culture'; +import {getDeviceContextData} from 'sentry/components/events/contexts/knownContext/device'; import {getGPUContextData} from 'sentry/components/events/contexts/knownContext/gpu'; import {getMemoryInfoContext} from 'sentry/components/events/contexts/knownContext/memoryInfo'; import {getMissingInstrumentationContextData} from 'sentry/components/events/contexts/knownContext/missingInstrumentation'; @@ -25,6 +26,13 @@ import {getRuntimeContextData} from 'sentry/components/events/contexts/knownCont import {getStateContextData} from 'sentry/components/events/contexts/knownContext/state'; import {getThreadPoolInfoContext} from 'sentry/components/events/contexts/knownContext/threadPoolInfo'; import {getTraceContextData} from 'sentry/components/events/contexts/knownContext/trace'; +import {getUserContextData} from 'sentry/components/events/contexts/knownContext/user'; +import { + getPlatformContextData, + getPlatformContextIcon, + getPlatformContextTitle, + PLATFORM_CONTEXT_KEYS, +} from 'sentry/components/events/contexts/platformContext/utils'; import {userContextToActor} from 'sentry/components/events/interfaces/utils'; import StructuredEventData from 'sentry/components/structuredEventData'; import {t} from 'sentry/locale'; @@ -37,17 +45,6 @@ import type {AvatarUser} from 'sentry/types/user'; import {defined} from 'sentry/utils'; import commonTheme from 'sentry/utils/theme'; -import {getDefaultContextData} from './default'; -import {getKnownDeviceContextData, getUnknownDeviceContextData} from './device'; -import { - getKnownPlatformContextData, - getPlatformContextIcon, - getUnknownPlatformContextData, - KNOWN_PLATFORM_CONTEXTS, -} from './platform'; -import {getKnownUnityContextData, getUnknownUnityContextData} from './unity'; -import {getKnownUserContextData, getUnknownUserContextData} from './user'; - /** * Generates the class name used for contexts */ @@ -78,6 +75,10 @@ export function generateIconName( return 'google'; } + if (lowerCaseName.startsWith('vercel')) { + return 'vercel'; + } + const formattedName = name .split(/\d/)[0] .toLowerCase() @@ -255,6 +256,10 @@ export function getContextTitle({ const contextType = getContextType({alias, type}); + if (PLATFORM_CONTEXT_KEYS.has(contextType)) { + return getPlatformContextTitle({platform: alias}); + } + switch (contextType) { case 'app': return t('App'); @@ -333,12 +338,14 @@ export function getContextIcon({ contextIconProps?: Partial<ContextIconProps>; value?: Record<string, any>; }): React.ReactNode { - if (KNOWN_PLATFORM_CONTEXTS.has(alias)) { + const contextType = getContextType({alias, type}); + if (PLATFORM_CONTEXT_KEYS.has(contextType)) { return getPlatformContextIcon({ platform: alias, size: contextIconProps?.size ?? 'xl', }); } + let iconName = ''; switch (type) { case 'device': @@ -390,21 +397,15 @@ export function getFormattedContextData({ }): KeyValueListData { const meta = getContextMeta(event, contextType); - if (KNOWN_PLATFORM_CONTEXTS.has(contextType)) { - return [ - ...getKnownPlatformContextData({platform: contextType, data: contextValue, meta}), - ...getUnknownPlatformContextData({platform: contextType, data: contextValue, meta}), - ]; + if (PLATFORM_CONTEXT_KEYS.has(contextType)) { + return getPlatformContextData({platform: contextType, data: contextValue}); } switch (contextType) { case 'app': return getAppContextData({data: contextValue, event, meta}); case 'device': - return [ - ...getKnownDeviceContextData({data: contextValue, event, meta}), - ...getUnknownDeviceContextData({data: contextValue, meta}), - ]; + return getDeviceContextData({data: contextValue, event, meta}); case 'memory_info': // Current case 'Memory Info': // Legacy return getMemoryInfoContext({data: contextValue, meta}); @@ -412,18 +413,10 @@ export function getFormattedContextData({ return getBrowserContextData({data: contextValue, meta}); case 'os': return getOperatingSystemContextData({data: contextValue, meta}); - case 'unity': - return [ - ...getKnownUnityContextData({data: contextValue, meta}), - ...getUnknownUnityContextData({data: contextValue, meta}), - ]; case 'runtime': return getRuntimeContextData({data: contextValue, meta}); case 'user': - return [ - ...getKnownUserContextData({data: contextValue, meta}), - ...getUnknownUserContextData({data: contextValue, meta}), - ]; + return getUserContextData({data: contextValue, meta}); case 'gpu': return getGPUContextData({data: contextValue, meta}); case 'trace': @@ -451,7 +444,12 @@ export function getFormattedContextData({ case 'missing_instrumentation': return getMissingInstrumentationContextData({data: contextValue, meta}); default: - return getDefaultContextData(contextValue); + return getContextKeys({data: contextValue}).map(ctxKey => ({ + key: ctxKey, + subject: ctxKey, + value: contextValue[ctxKey], + meta: meta?.[ctxKey]?.[''], + })); } } /** diff --git a/static/app/components/events/eventReplay/replayClipSection.tsx b/static/app/components/events/eventReplay/replayClipSection.tsx index 0a744f665047da..eb7998a87ad9dc 100644 --- a/static/app/components/events/eventReplay/replayClipSection.tsx +++ b/static/app/components/events/eventReplay/replayClipSection.tsx @@ -65,10 +65,13 @@ export function ReplayClipSection({event, group, replayId}: Props) { replayCount && replayCount > 1 ? ( <Fragment> <div> - {t( - 'There are %s for this issue.', - tn('%s replay', '%s replays', replayCount ?? 0) - )} + {replayCount > 50 + ? t('There are 50+ replays for this issue.') + : tn( + 'There is %s replay for this issue.', + 'there are %s replays for this issue.', + replayCount ?? 0 + )} </div> {allReplaysButton} </Fragment> diff --git a/static/app/components/events/eventStatisticalDetector/eventAffectedTransactions.tsx b/static/app/components/events/eventStatisticalDetector/eventAffectedTransactions.tsx deleted file mode 100644 index af95c4189d4722..00000000000000 --- a/static/app/components/events/eventStatisticalDetector/eventAffectedTransactions.tsx +++ /dev/null @@ -1,360 +0,0 @@ -import {useEffect, useMemo, useState} from 'react'; -import * as Sentry from '@sentry/react'; - -import {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable'; -import {SegmentedControl} from 'sentry/components/segmentedControl'; -import {t} from 'sentry/locale'; -import type {Event} from 'sentry/types/event'; -import type {Group} from 'sentry/types/group'; -import type {Project} from 'sentry/types/project'; -import {defined} from 'sentry/utils'; -import {trackAnalytics} from 'sentry/utils/analytics'; -import {useProfileTopEventsStats} from 'sentry/utils/profiling/hooks/useProfileTopEventsStats'; -import {useRelativeDateTime} from 'sentry/utils/profiling/hooks/useRelativeDateTime'; -import { - generateProfileFlamechartRouteWithQuery, - generateProfileSummaryRouteWithQuery, -} from 'sentry/utils/profiling/routes'; -import {MutableSearch} from 'sentry/utils/tokenizeSearch'; -import useOrganization from 'sentry/utils/useOrganization'; -import {SectionKey} from 'sentry/views/issueDetails/streamline/context'; -import {InterimSection} from 'sentry/views/issueDetails/streamline/interimSection'; - -import {RELATIVE_DAYS_WINDOW} from './consts'; -import {EventRegressionTable} from './eventRegressionTable'; -import {useTransactionsDelta} from './transactionsDeltaProvider'; - -interface EventAffectedTransactionsProps { - event: Event; - group: Group; - project: Project; -} - -export function EventAffectedTransactions({ - event, - project, -}: EventAffectedTransactionsProps) { - const evidenceData = event.occurrence?.evidenceData; - const fingerprint = evidenceData?.fingerprint; - const breakpoint = evidenceData?.breakpoint; - const frameName = evidenceData?.function; - const framePackage = evidenceData?.package || evidenceData?.module; - - const isValid = defined(fingerprint) && defined(breakpoint); - - useEffect(() => { - if (isValid) { - return; - } - - Sentry.withScope(scope => { - scope.setContext('evidence data fields', { - fingerprint, - breakpoint, - }); - - Sentry.captureException( - new Error('Missing required evidence data on function regression issue.') - ); - }); - }, [isValid, fingerprint, breakpoint]); - - if (!isValid) { - return null; - } - - return ( - <EventAffectedTransactionsInner - breakpoint={breakpoint} - fingerprint={fingerprint} - frameName={frameName} - framePackage={framePackage} - project={project} - /> - ); -} - -const TRANSACTIONS_LIMIT = 10; - -const ADDITIONAL_COLUMNS = [ - {key: 'transaction', name: t('Transaction'), width: COL_WIDTH_UNDEFINED}, -]; - -interface EventAffectedTransactionsInnerProps { - breakpoint: number; - fingerprint: number; - frameName: string; - framePackage: string; - project: Project; -} - -function EventAffectedTransactionsInner({ - breakpoint, - fingerprint, - frameName, - framePackage, - project, -}: EventAffectedTransactionsInnerProps) { - const [causeType, setCauseType] = useState<'duration' | 'throughput'>('duration'); - const organization = useOrganization(); - - const datetime = useRelativeDateTime({ - anchor: breakpoint, - relativeDays: RELATIVE_DAYS_WINDOW, - }); - - const transactionsDeltaQuery = useTransactionsDelta(); - - const percentileBefore = `percentile_before(function.duration, 0.95, ${breakpoint})`; - const percentileAfter = `percentile_after(function.duration, 0.95, ${breakpoint})`; - const throughputBefore = `cpm_before(${breakpoint})`; - const throughputAfter = `cpm_after(${breakpoint})`; - - const query = useMemo(() => { - const data = transactionsDeltaQuery.data?.data ?? []; - if (!data.length) { - return null; - } - - const conditions = new MutableSearch(''); - conditions.addFilterValue('fingerprint', String(fingerprint), true); - conditions.addOp('('); - for (let i = 0; i < data.length; i++) { - if (i > 0) { - conditions.addOp('OR'); - } - conditions.addFilterValue('transaction', data[i].transaction as string, true); - } - conditions.addOp(')'); - return conditions.formatString(); - }, [fingerprint, transactionsDeltaQuery]); - - const functionStats = useProfileTopEventsStats({ - dataset: 'profileFunctions', - datetime, - fields: ['transaction', 'count()'], - query: query ?? '', - enabled: defined(query), - others: false, - referrer: 'api.profiling.functions.regression.transaction-stats', - topEvents: TRANSACTIONS_LIMIT, - yAxes: ['examples()'], - }); - - const examplesByTransaction = useMemo(() => { - const allExamples: Record<string, [string | null, string | null]> = {}; - if (!defined(functionStats.data)) { - return allExamples; - } - - const timestamps = functionStats.data.timestamps; - const breakpointIndex = timestamps.indexOf(breakpoint); - if (breakpointIndex < 0) { - return allExamples; - } - - transactionsDeltaQuery.data?.data?.forEach(row => { - const transaction = row.transaction as string; - const data = functionStats.data.data.find( - ({axis, label}) => axis === 'examples()' && label === transaction - ); - if (!defined(data)) { - return; - } - - allExamples[transaction] = findExamplePair(data.values, breakpointIndex); - }); - - return allExamples; - }, [breakpoint, transactionsDeltaQuery, functionStats]); - - const tableData = useMemo(() => { - return ( - transactionsDeltaQuery.data?.data.map(row => { - const [exampleBefore, exampleAfter] = examplesByTransaction[ - row.transaction as string - ] ?? [null, null]; - - if (causeType === 'throughput') { - const before = row[throughputBefore] as number; - const after = row[throughputAfter] as number; - return { - exampleBefore, - exampleAfter, - transaction: row.transaction, - throughputBefore: before, - throughputAfter: after, - percentageChange: after / before - 1, - }; - } - - const before = (row[percentileBefore] as number) / 1e9; - const after = (row[percentileAfter] as number) / 1e9; - return { - exampleBefore, - exampleAfter, - transaction: row.transaction, - durationBefore: before, - durationAfter: after, - percentageChange: after / before - 1, - }; - }) || [] - ); - }, [ - causeType, - percentileBefore, - percentileAfter, - throughputBefore, - throughputAfter, - transactionsDeltaQuery.data?.data, - examplesByTransaction, - ]); - - const options = useMemo(() => { - function handleGoToProfile() { - trackAnalytics('profiling_views.go_to_flamegraph', { - organization, - source: 'profiling.issue.function_regression.transactions', - }); - } - - const before = dataRow => - defined(dataRow.exampleBefore) - ? { - target: generateProfileFlamechartRouteWithQuery({ - orgSlug: organization.slug, - projectSlug: project.slug, - profileId: dataRow.exampleBefore, - query: { - frameName, - framePackage, - }, - }), - onClick: handleGoToProfile, - } - : undefined; - - const after = dataRow => - defined(dataRow.exampleAfter) - ? { - target: generateProfileFlamechartRouteWithQuery({ - orgSlug: organization.slug, - projectSlug: project.slug, - profileId: dataRow.exampleAfter, - query: { - frameName, - framePackage, - }, - }), - onClick: handleGoToProfile, - } - : undefined; - - return { - transaction: { - link: dataRow => ({ - target: generateProfileSummaryRouteWithQuery({ - orgSlug: organization.slug, - projectSlug: project.slug, - transaction: dataRow.transaction as string, - }), - }), - }, - durationBefore: {link: before}, - durationAfter: {link: after}, - throughputBefore: {link: before}, - throughputAfter: {link: after}, - }; - }, [organization, project, frameName, framePackage]); - - return ( - <InterimSection - type={SectionKey.REGRESSION_AFFECTED_TRANSACTIONS} - title={t('Most Affected')} - actions={ - <SegmentedControl - size="xs" - aria-label={t('Duration or Throughput')} - value={causeType} - onChange={setCauseType} - > - <SegmentedControl.Item key="duration"> - {t('Duration (P95)')} - </SegmentedControl.Item> - <SegmentedControl.Item key="throughput"> - {t('Throughput')} - </SegmentedControl.Item> - </SegmentedControl> - } - > - <EventRegressionTable - causeType={causeType} - columns={ADDITIONAL_COLUMNS} - data={tableData || []} - isLoading={transactionsDeltaQuery.isPending} - isError={transactionsDeltaQuery.isError} - options={options} - /> - </InterimSection> - ); -} - -/** - * Find an example pair of profile ids from before and after the breakpoint. - * - * We prioritize profile ids from outside some window around the breakpoint - * because the breakpoint is not 100% accurate and giving a buffer around - * the breakpoint to so we can more accurate get a example profile from - * before and after ranges. - * - * @param examples list of example profile ids - * @param breakpointIndex the index where the breakpoint is - * @param window the window around the breakpoint to deprioritize - */ -function findExamplePair( - examples: string[][], - breakpointIndex, - window = 3 -): [string | null, string | null] { - let before: string | null = null; - - for (let i = breakpointIndex - window; i < examples.length && i >= 0; i--) { - if (Array.isArray(examples[i]) && examples[i].length > 0) { - before = examples[i][0]; - break; - } - } - - if (!defined(before)) { - for ( - let i = breakpointIndex; - i < examples.length && i > breakpointIndex - window; - i-- - ) { - if (Array.isArray(examples[i]) && examples[i].length > 0) { - before = examples[i][0]; - break; - } - } - } - - let after: string | null = null; - - for (let i = breakpointIndex + window; i < examples.length; i++) { - if (Array.isArray(examples[i]) && examples[i].length > 0) { - after = examples[i][0]; - break; - } - } - - if (!defined(before)) { - for (let i = breakpointIndex; i < breakpointIndex + window; i++) { - if (Array.isArray(examples[i]) && examples[i].length > 0) { - after = examples[i][0]; - break; - } - } - } - - return [before, after]; -} diff --git a/static/app/components/events/eventStatisticalDetector/eventDifferentialFlamegraph.tsx b/static/app/components/events/eventStatisticalDetector/eventDifferentialFlamegraph.tsx index 1911f9b30e8841..db7b22c57bac9c 100644 --- a/static/app/components/events/eventStatisticalDetector/eventDifferentialFlamegraph.tsx +++ b/static/app/components/events/eventStatisticalDetector/eventDifferentialFlamegraph.tsx @@ -8,7 +8,6 @@ import ButtonBar from 'sentry/components/buttonBar'; import Link from 'sentry/components/links/link'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import Panel from 'sentry/components/panels/panel'; -import PerformanceDuration from 'sentry/components/performanceDuration'; import Placeholder from 'sentry/components/placeholder'; import {DifferentialFlamegraph} from 'sentry/components/profiling/flamegraph/differentialFlamegraph'; import {DifferentialFlamegraphToolbar} from 'sentry/components/profiling/flamegraph/flamegraphToolbar/differentialFlamegraphToolbar'; @@ -18,6 +17,7 @@ import ProjectsStore from 'sentry/stores/projectsStore'; import {space} from 'sentry/styles/space'; import type {Event} from 'sentry/types/event'; import type {Project} from 'sentry/types/project'; +import {defined} from 'sentry/utils'; import {formatAbbreviatedNumber} from 'sentry/utils/formatters'; import {formatPercentage} from 'sentry/utils/number/formatPercentage'; import { @@ -31,7 +31,6 @@ import {FlamegraphThemeProvider} from 'sentry/utils/profiling/flamegraph/flamegr import {useFlamegraphTheme} from 'sentry/utils/profiling/flamegraph/useFlamegraphTheme'; import type {FlamegraphFrame} from 'sentry/utils/profiling/flamegraphFrame'; import type {Frame} from 'sentry/utils/profiling/frame'; -import type {EventsResultsDataRow} from 'sentry/utils/profiling/hooks/types'; import {useDifferentialFlamegraphModel} from 'sentry/utils/profiling/hooks/useDifferentialFlamegraphModel'; import type {DifferentialFlamegraphQueryResult} from 'sentry/utils/profiling/hooks/useDifferentialFlamegraphQuery'; import {useDifferentialFlamegraphQuery} from 'sentry/utils/profiling/hooks/useDifferentialFlamegraphQuery'; @@ -41,8 +40,6 @@ import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; import {LOADING_PROFILE_GROUP} from 'sentry/views/profiling/profileGroupProvider'; -import {useTransactionsDelta} from './transactionsDeltaProvider'; - interface EventDifferentialFlamegraphProps { event: Event; } @@ -75,53 +72,13 @@ export function EventDifferentialFlamegraph(props: EventDifferentialFlamegraphPr }); }, [isValid, fingerprint, breakpoint]); - const transactions = useTransactionsDelta(); - const [transaction, setTransaction] = useState< - EventsResultsDataRow<string> | undefined - >(undefined); - - if (transaction === undefined) { - const firstTransaction = transactions?.data?.data?.[0]; - if (firstTransaction) { - setTransaction(firstTransaction); - } - } - const {before, after} = useDifferentialFlamegraphQuery({ projectID: parseInt(props.event.projectID, 10), breakpoint, environments: selection.selection.environments, fingerprint: props.event.occurrence?.evidenceData?.fingerprint, - transaction: (transaction?.transaction as string) ?? '', }); - const onNextTransactionClick = useMemo(() => { - if (!transaction) { - return undefined; - } - const idx = transactions?.data?.data?.indexOf?.(transaction) ?? -1; - if (idx === -1 || idx === (transactions?.data?.data?.length ?? 0) - 1) { - return undefined; - } - - return () => { - setTransaction(transactions?.data?.data?.[idx + 1] ?? transaction); - }; - }, [transaction, transactions?.data?.data]); - - const onPreviousTransactionClick = useMemo(() => { - if (!transaction) { - return undefined; - } - const idx = transactions?.data?.data?.indexOf?.(transaction) ?? -1; - if (idx === -1 || idx === 0) { - return undefined; - } - return () => { - setTransaction(transactions?.data?.data?.[idx - 1] ?? transaction); - }; - }, [transaction, transactions?.data?.data]); - return ( <Fragment> <FlamegraphThemeProvider> @@ -135,9 +92,6 @@ export function EventDifferentialFlamegraph(props: EventDifferentialFlamegraphPr > <EventDifferentialFlamegraphView project={project} - onNextTransactionClick={onNextTransactionClick} - onPreviousTransactionClick={onPreviousTransactionClick} - transaction={transaction} before={before} after={after} /> @@ -158,10 +112,7 @@ function systemFrameOnly(frame: Frame): boolean { interface EventDifferentialFlamegraphViewProps { after: DifferentialFlamegraphQueryResult['before']; before: DifferentialFlamegraphQueryResult['after']; - onNextTransactionClick: (() => void) | undefined; - onPreviousTransactionClick: (() => void) | undefined; project: Project | undefined; - transaction: EventsResultsDataRow<string> | undefined; } function EventDifferentialFlamegraphView(props: EventDifferentialFlamegraphViewProps) { const organization = useOrganization(); @@ -196,27 +147,21 @@ function EventDifferentialFlamegraphView(props: EventDifferentialFlamegraphViewP if (!frame.profileIds?.length) { return ''; } - const profileId = frame.profileIds[0]; - - if (typeof profileId !== 'undefined') { - return ( - generateProfileRouteFromProfileReference({ - orgSlug: organization.slug, - projectSlug: props.project.slug, - reference: - typeof profileId === 'string' - ? profileId - : 'profiler_id' in profileId - ? profileId.profiler_id - : profileId.profile_id, - framePackage: frame.frame.package, - frameName: frame.frame.name, - }) ?? '' - ); + const profile = frame.profileIds?.[0]; + + if (!defined(profile)) { + return ''; } - // Regression issues do not work with continuous profiles - return ''; + return ( + generateProfileRouteFromProfileReference({ + orgSlug: organization.slug, + projectSlug: props.project.slug, + reference: profile, + framePackage: frame.frame.package, + frameName: frame.frame.name, + }) ?? '' + ); }, [organization.slug, props.project] ); @@ -224,11 +169,6 @@ function EventDifferentialFlamegraphView(props: EventDifferentialFlamegraphViewP return ( <FlamegraphContainer> <StyledPanel> - <DifferentialFlamegraphTransactionToolbar - transaction={props.transaction} - onNextTransactionClick={props.onNextTransactionClick} - onPreviousTransactionClick={props.onPreviousTransactionClick} - /> <DifferentialFlamegraphToolbar frameFilter={frameFilterSetting} onFrameFilterChange={setFrameFilterSetting} @@ -310,85 +250,6 @@ function EventDifferentialFlamegraphView(props: EventDifferentialFlamegraphViewP ); } -const numberFormatter = Intl.NumberFormat(undefined, { - maximumFractionDigits: 2, -}); - -interface DifferentialFlamegraphTransactionToolbarProps { - onNextTransactionClick: (() => void) | undefined; - onPreviousTransactionClick: (() => void) | undefined; - transaction: EventsResultsDataRow<string> | undefined; -} -function DifferentialFlamegraphTransactionToolbar( - props: DifferentialFlamegraphTransactionToolbarProps -) { - const [before, after] = useMemo(() => { - if (!props.transaction) { - return [0, 0]; - } - - const keys = Object.keys(props.transaction); - - let beforePercentile = 0; - let afterPercentile = 0; - - for (const key of keys) { - if (key.startsWith('percentile_after')) { - afterPercentile = props.transaction[key] as number; - } - if (key.startsWith('percentile_before')) { - beforePercentile = props.transaction[key] as number; - } - } - - return [beforePercentile, afterPercentile]; - }, [props.transaction]); - - return ( - <DifferentialFlamegraphTransactionToolbarContainer> - {props.transaction?.transaction ? ( - <DifferentialFlamegraphTransactionName> - {props.transaction.transaction} - </DifferentialFlamegraphTransactionName> - ) : ( - <Placeholder height="20px" width="66%" /> - )} - - {props.transaction ? ( - <span> - <PerformanceDuration nanoseconds={before} abbreviation /> - <DifferentialFlamegraphRegressionChange> - {after === 0 || before === 0 - ? '' - : '+' + numberFormatter.format(relativeChange(after, before) * 100) + '%'} - </DifferentialFlamegraphRegressionChange> - </span> - ) : ( - <Fragment> - <Placeholder height="20px" width="60px" /> - <Placeholder height="20px" width="60px" /> - </Fragment> - )} - <ButtonBar merged> - <DifferentialFlamegraphPaginationButton - icon={<IconChevron direction="left" />} - aria-label={t('Previous Transaction')} - size="xs" - disabled={!props.onPreviousTransactionClick} - onClick={props.onPreviousTransactionClick} - /> - <DifferentialFlamegraphPaginationButton - icon={<IconChevron direction="right" />} - aria-label={t('Next Transaction')} - size="xs" - disabled={!props.onNextTransactionClick} - onClick={props.onNextTransactionClick} - /> - </ButtonBar> - </DifferentialFlamegraphTransactionToolbarContainer> - ); -} - interface PaginationReducerState { page: number; pageCount: number; @@ -762,26 +623,6 @@ const DifferentialFlamegraphPaginationButton = styled(Button)` padding-left: ${space(0.75)}; padding-right: ${space(0.75)}; `; -const DifferentialFlamegraphTransactionName = styled('div')` - font-weight: ${p => p.theme.fontWeightBold}; - flex: 1; - overflow: hidden; - text-overflow: ellipsis; -`; - -const DifferentialFlamegraphRegressionChange = styled('span')` - margin-left: ${space(1)}; - color: ${p => p.theme.red300}; -`; - -const DifferentialFlamegraphTransactionToolbarContainer = styled('div')` - display: flex; - justify-content: space-between; - align-items: center; - padding: ${space(1)}; - gap: ${space(1)}; - border-bottom: 1px solid ${p => p.theme.border}; -`; const ErrorMessageContainer = styled('div')` position: absolute; diff --git a/static/app/components/events/eventStatisticalDetector/eventFunctionComparisonList.tsx b/static/app/components/events/eventStatisticalDetector/eventFunctionComparisonList.tsx deleted file mode 100644 index 16c60bc563bb22..00000000000000 --- a/static/app/components/events/eventStatisticalDetector/eventFunctionComparisonList.tsx +++ /dev/null @@ -1,320 +0,0 @@ -import {Fragment, useEffect, useMemo} from 'react'; -import styled from '@emotion/styled'; -import * as Sentry from '@sentry/react'; - -import Link from 'sentry/components/links/link'; -import PerformanceDuration from 'sentry/components/performanceDuration'; -import {t} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; -import type {Event} from 'sentry/types/event'; -import type {Group} from 'sentry/types/group'; -import type {Organization} from 'sentry/types/organization'; -import type {Project} from 'sentry/types/project'; -import {defined} from 'sentry/utils'; -import {trackAnalytics} from 'sentry/utils/analytics'; -import {Container, NumberContainer} from 'sentry/utils/discover/styles'; -import {getShortEventId} from 'sentry/utils/events'; -import {useProfileEvents} from 'sentry/utils/profiling/hooks/useProfileEvents'; -import {useProfileFunctions} from 'sentry/utils/profiling/hooks/useProfileFunctions'; -import {useRelativeDateTime} from 'sentry/utils/profiling/hooks/useRelativeDateTime'; -import {generateProfileFlamechartRouteWithQuery} from 'sentry/utils/profiling/routes'; -import useOrganization from 'sentry/utils/useOrganization'; -import {SectionKey} from 'sentry/views/issueDetails/streamline/context'; -import {InterimSection} from 'sentry/views/issueDetails/streamline/interimSection'; - -interface EventFunctionComparisonListProps { - event: Event; - group: Group; - project: Project; -} - -export function EventFunctionComparisonList({ - event, - project, -}: EventFunctionComparisonListProps) { - const evidenceData = event.occurrence?.evidenceData; - const fingerprint = evidenceData?.fingerprint; - const breakpoint = evidenceData?.breakpoint; - const frameName = evidenceData?.function; - const framePackage = evidenceData?.package || evidenceData?.module; - - const isValid = - defined(fingerprint) && - defined(breakpoint) && - defined(frameName) && - defined(framePackage); - - useEffect(() => { - if (isValid) { - return; - } - - Sentry.withScope(scope => { - scope.setContext('evidence data fields', { - fingerprint, - breakpoint, - frameName, - framePackage, - }); - - Sentry.captureException( - new Error('Missing required evidence data on function regression issue.') - ); - }); - }, [isValid, fingerprint, breakpoint, frameName, framePackage]); - - if (!isValid) { - return null; - } - - return ( - <EventComparisonListInner - breakpoint={breakpoint} - fingerprint={fingerprint} - frameName={frameName} - framePackage={framePackage} - project={project} - /> - ); -} - -interface EventComparisonListInnerProps { - breakpoint: number; - fingerprint: number; - frameName: string; - framePackage: string; - project: Project; -} - -function EventComparisonListInner({ - breakpoint, - fingerprint, - frameName, - framePackage, - project, -}: EventComparisonListInnerProps) { - const organization = useOrganization(); - - const breakpointDateTime = new Date(breakpoint * 1000); - const datetime = useRelativeDateTime({ - anchor: breakpoint, - relativeDays: 1, - }); - const {start: beforeDateTime, end: afterDateTime} = datetime; - - const beforeProfilesQuery = useProfileFunctions({ - datetime: { - start: beforeDateTime, - end: breakpointDateTime, - utc: true, - period: null, - }, - fields: ['examples()'], - sort: { - key: 'examples()', - order: 'asc', - }, - query: `fingerprint:${fingerprint}`, - projects: [project.id], - limit: 1, - referrer: 'api.profiling.functions.regression.list', - }); - - const afterProfilesQuery = useProfileFunctions({ - datetime: { - start: breakpointDateTime, - end: afterDateTime, - utc: true, - period: null, - }, - fields: ['examples()'], - sort: { - key: 'examples()', - order: 'asc', - }, - query: `fingerprint:${fingerprint}`, - projects: [project.id], - limit: 1, - referrer: 'api.profiling.functions.regression.list', - }); - - const beforeProfileIds = - (beforeProfilesQuery.data?.data?.[0]?.['examples()'] as string[]) ?? []; - const afterProfileIds = - (afterProfilesQuery.data?.data?.[0]?.['examples()'] as string[]) ?? []; - - const profilesQuery = useProfileEvents({ - datetime, - fields: ['profile.id', 'transaction', 'transaction.duration'], - query: `profile.id:[${[...beforeProfileIds, ...afterProfileIds].join(', ')}]`, - sort: { - key: 'transaction.duration', - order: 'desc', - }, - projects: [project.id], - limit: beforeProfileIds.length + afterProfileIds.length, - enabled: beforeProfileIds.length > 0 && afterProfileIds.length > 0, - referrer: 'api.profiling.functions.regression.examples', - }); - - const beforeProfiles = useMemo(() => { - const profileIds = new Set( - (beforeProfilesQuery.data?.data?.[0]?.['examples()'] as string[]) ?? [] - ); - - return ( - (profilesQuery.data?.data?.filter(row => - profileIds.has(row['profile.id'] as string) - ) as ProfileItem[]) ?? [] - ); - }, [beforeProfilesQuery, profilesQuery]); - - const afterProfiles = useMemo(() => { - const profileIds = new Set( - (afterProfilesQuery.data?.data?.[0]?.['examples()'] as string[]) ?? [] - ); - - return ( - (profilesQuery.data?.data?.filter(row => - profileIds.has(row['profile.id'] as string) - ) as ProfileItem[]) ?? [] - ); - }, [afterProfilesQuery, profilesQuery]); - - const durationUnit = profilesQuery.data?.meta?.units?.['transaction.duration'] ?? ''; - - return ( - <InterimSection - type={SectionKey.REGRESSION_PROFILE_COMPARISON} - title={t('Profile Comparison')} - > - <Wrapper> - <div> - <Header>{t('Example Profiles Before')}</Header> - <EventList - frameName={frameName} - framePackage={framePackage} - organization={organization} - profiles={beforeProfiles} - project={project} - unit={durationUnit} - /> - </div> - <div> - <Header>{t('Example Profiles After')}</Header> - <EventList - frameName={frameName} - framePackage={framePackage} - organization={organization} - profiles={afterProfiles} - project={project} - unit={durationUnit} - /> - </div> - </Wrapper> - </InterimSection> - ); -} - -interface ProfileItem { - 'profile.id': string; - timestamp: string; - transaction: string; - 'transaction.duration': number; -} - -interface EventListProps { - frameName: string; - framePackage: string; - organization: Organization; - profiles: ProfileItem[]; - project: Project; - unit: string; -} - -function EventList({ - frameName, - framePackage, - organization, - profiles, - project, - unit, -}: EventListProps) { - return ( - <ListContainer> - <Container> - <strong>{t('Profile ID')}</strong> - </Container> - <Container> - <strong>{t('Transaction')}</strong> - </Container> - <NumberContainer> - <strong>{t('Duration')} </strong> - </NumberContainer> - {profiles.map(item => { - const target = generateProfileFlamechartRouteWithQuery({ - orgSlug: organization.slug, - projectSlug: project.slug, - profileId: item['profile.id'], - query: { - frameName, - framePackage, - }, - }); - - return ( - <Fragment key={item['profile.id']}> - <Container> - <Link - to={target} - onClick={() => { - trackAnalytics('profiling_views.go_to_flamegraph', { - organization, - source: 'profiling.issue.function_regression.list', - }); - }} - > - {getShortEventId(item['profile.id'])} - </Link> - </Container> - <Container>{item.transaction}</Container> - <NumberContainer> - {unit === 'millisecond' ? ( - <PerformanceDuration - milliseconds={item['transaction.duration']} - abbreviation - /> - ) : ( - <PerformanceDuration - nanoseconds={item['transaction.duration']} - abbreviation - /> - )} - </NumberContainer> - </Fragment> - ); - })} - </ListContainer> - ); -} - -const Header = styled('h6')` - font-size: ${p => p.theme.fontSizeMedium}; - margin-bottom: ${space(1)}; -`; - -const Wrapper = styled('div')` - display: grid; - grid-template-columns: 1fr; - gap: ${space(1)}; - - @media (min-width: ${p => p.theme.breakpoints.medium}) { - grid-template-columns: 1fr 1fr; - } -`; - -const ListContainer = styled('div')` - display: grid; - grid-template-columns: minmax(75px, 1fr) auto minmax(75px, 1fr); - gap: ${space(1)}; -`; diff --git a/static/app/components/events/eventStatisticalDetector/eventRegressionTable.tsx b/static/app/components/events/eventStatisticalDetector/eventRegressionTable.tsx index 616361971beb2d..cdb0e326dd79e3 100644 --- a/static/app/components/events/eventStatisticalDetector/eventRegressionTable.tsx +++ b/static/app/components/events/eventStatisticalDetector/eventRegressionTable.tsx @@ -137,6 +137,10 @@ function durationRenderer(duration, {dataRow, option}) { } function changeRenderer(percentageChange) { + if (percentageChange === Infinity) { + return <ChangeContainer change={'neutral'} />; + } + return ( <ChangeContainer change={ diff --git a/static/app/components/events/eventStatisticalDetector/transactionsDeltaProvider.tsx b/static/app/components/events/eventStatisticalDetector/transactionsDeltaProvider.tsx deleted file mode 100644 index 521edd1f5b9a85..00000000000000 --- a/static/app/components/events/eventStatisticalDetector/transactionsDeltaProvider.tsx +++ /dev/null @@ -1,80 +0,0 @@ -import type React from 'react'; -import {createContext, useContext} from 'react'; - -import {RELATIVE_DAYS_WINDOW} from 'sentry/components/events/eventStatisticalDetector/consts'; -import type {Event} from 'sentry/types/event'; -import type {Project} from 'sentry/types/project'; -import type {EventsResults} from 'sentry/utils/profiling/hooks/types'; -import {useProfileFunctions} from 'sentry/utils/profiling/hooks/useProfileFunctions'; -import {useRelativeDateTime} from 'sentry/utils/profiling/hooks/useRelativeDateTime'; -import type {UseApiQueryResult} from 'sentry/utils/queryClient'; -import type RequestError from 'sentry/utils/requestError/requestError'; - -const TransactionsDeltaProviderContext = createContext<UseApiQueryResult< - EventsResults<string>, - RequestError -> | null>(null); - -const TRANSACTIONS_LIMIT = 10; - -export function useTransactionsDelta(): UseApiQueryResult< - EventsResults<string>, - RequestError -> { - const ctx = useContext(TransactionsDeltaProviderContext); - if (!ctx) { - throw new Error( - 'useTransactionsDelta called outside of TransactionsDeltaProviderProvider' - ); - } - return ctx; -} - -interface TransactionsDeltaProviderProps { - children: React.ReactNode; - event: Event; - project: Project; -} - -export function TransactionsDeltaProvider(props: TransactionsDeltaProviderProps) { - const evidenceData = props.event.occurrence?.evidenceData; - const fingerprint = evidenceData?.fingerprint; - const breakpoint = evidenceData?.breakpoint; - - const datetime = useRelativeDateTime({ - anchor: breakpoint, - relativeDays: RELATIVE_DAYS_WINDOW, - }); - - const regressionScore = `regression_score(function.duration, 0.95, ${breakpoint})`; - const percentileBefore = `percentile_before(function.duration, 0.95, ${breakpoint})`; - const percentileAfter = `percentile_after(function.duration, 0.95, ${breakpoint})`; - const throughputBefore = `cpm_before(${breakpoint})`; - const throughputAfter = `cpm_after(${breakpoint})`; - - const transactionsDeltaQuery = useProfileFunctions({ - datetime, - fields: [ - 'transaction', - percentileBefore, - percentileAfter, - throughputBefore, - throughputAfter, - regressionScore, - ], - sort: { - key: regressionScore, - order: 'desc', - }, - query: `fingerprint:${fingerprint}`, - projects: [props.project.id], - limit: TRANSACTIONS_LIMIT, - referrer: 'api.profiling.functions.regression.transactions', - }); - - return ( - <TransactionsDeltaProviderContext.Provider value={transactionsDeltaQuery}> - {props.children} - </TransactionsDeltaProviderContext.Provider> - ); -} diff --git a/static/app/components/events/eventTags/eventTagsTree.spec.tsx b/static/app/components/events/eventTags/eventTagsTree.spec.tsx index 40f24bd15f60d6..65c65347f5b26d 100644 --- a/static/app/components/events/eventTags/eventTagsTree.spec.tsx +++ b/static/app/components/events/eventTags/eventTagsTree.spec.tsx @@ -22,9 +22,9 @@ describe('EventTagsTree', function () { {key: 'tree', value: 'maple'}, {key: 'tree.branch', value: 'jagged'}, {key: 'tree.branch.leaf', value: 'red'}, - {key: 'favourite.colour', value: 'teal'}, - {key: 'favourite.animal', value: 'dog'}, - {key: 'favourite.game', value: 'everdell'}, + {key: 'favorite.color', value: 'teal'}, + {key: 'favorite.animal', value: 'dog'}, + {key: 'favorite.game', value: 'everdell'}, {key: 'magic.is', value: 'real'}, {key: 'magic.is.probably.not', value: 'spells'}, {key: 'double..dot', value: 'works'}, @@ -36,13 +36,13 @@ describe('EventTagsTree', function () { 'app.version', 'tree.branch', 'tree.branch.leaf', - 'favourite.colour', - 'favourite.animal', - 'favourite.game', + 'favorite.color', + 'favorite.animal', + 'favorite.game', 'magic.is', 'magic.is.probably.not', ]; - const emptyBranchTags = ['favourite', 'magic', 'probably']; + const emptyBranchTags = ['favorite', 'magic', 'probably']; const treeBranchTags = [ 'app_start_time', 'app_name', @@ -50,7 +50,7 @@ describe('EventTagsTree', function () { 'tree', 'branch', 'leaf', - 'colour', + 'color', 'animal', 'game', 'is', diff --git a/static/app/components/events/featureFlags/eventFeatureFlagList.spec.tsx b/static/app/components/events/featureFlags/eventFeatureFlagList.spec.tsx index bfb0c51acb982c..817f37e932878f 100644 --- a/static/app/components/events/featureFlags/eventFeatureFlagList.spec.tsx +++ b/static/app/components/events/featureFlags/eventFeatureFlagList.spec.tsx @@ -1,9 +1,11 @@ +import {OrganizationFixture} from 'sentry-fixture/organization'; import {TagsFixture} from 'sentry-fixture/tags'; import { render, screen, userEvent, + waitFor, waitForDrawerToHide, } from 'sentry-test/reactTestingLibrary'; @@ -12,7 +14,8 @@ import { EMPTY_STATE_SECTION_PROPS, MOCK_DATA_SECTION_PROPS, MOCK_FLAGS, - NO_FLAG_CONTEXT_SECTION_PROPS, + NO_FLAG_CONTEXT_SECTION_PROPS_CTA, + NO_FLAG_CONTEXT_SECTION_PROPS_NO_CTA, } from 'sentry/components/events/featureFlags/testUtils'; // Needed to mock useVirtualizer lists. @@ -30,6 +33,22 @@ jest.spyOn(window.Element.prototype, 'getBoundingClientRect').mockImplementation describe('EventFeatureFlagList', function () { beforeEach(function () { + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/issues/1/events/', + body: [], + }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/flags/logs/', + body: {data: []}, + }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/prompts-activity/', + body: {data: {}}, + }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/prompts-activity/', + body: {data: {dismissed_ts: null}}, + }); MockApiClient.addMockResponse({ url: `/organizations/org-slug/issues/1/tags/`, body: TagsFixture(), @@ -208,8 +227,54 @@ describe('EventFeatureFlagList', function () { ).toBeInTheDocument(); }); - it('renders nothing if event.contexts.flags is not set', function () { - render(<EventFeatureFlagList {...NO_FLAG_CONTEXT_SECTION_PROPS} />); + it('renders cta if event.contexts.flags is not set and should show cta', async function () { + const org = OrganizationFixture({features: ['feature-flag-cta']}); + + render(<EventFeatureFlagList {...NO_FLAG_CONTEXT_SECTION_PROPS_CTA} />, { + organization: org, + }); + + const control = screen.queryByRole('button', {name: 'Sort Flags'}); + expect(control).not.toBeInTheDocument(); + const search = screen.queryByRole('button', {name: 'Open Feature Flag Search'}); + expect(search).not.toBeInTheDocument(); + expect( + screen.queryByRole('button', {name: 'Set Up Integration'}) + ).not.toBeInTheDocument(); + + // wait for the CTA to be rendered + expect(await screen.findByText('Set Up Feature Flags')).toBeInTheDocument(); + expect(screen.getByText('Feature Flags')).toBeInTheDocument(); + }); + + it('renders nothing if event.contexts.flags is not set and should not show cta - wrong platform', async function () { + const org = OrganizationFixture({features: ['feature-flag-cta']}); + + render(<EventFeatureFlagList {...NO_FLAG_CONTEXT_SECTION_PROPS_NO_CTA} />, { + organization: org, + }); + + const control = screen.queryByRole('button', {name: 'Sort Flags'}); + expect(control).not.toBeInTheDocument(); + const search = screen.queryByRole('button', {name: 'Open Feature Flag Search'}); + expect(search).not.toBeInTheDocument(); + expect( + screen.queryByRole('button', {name: 'Set Up Integration'}) + ).not.toBeInTheDocument(); + + // CTA should not appear + await waitFor(() => { + expect(screen.queryByText('Set Up Feature Flags')).not.toBeInTheDocument(); + }); + expect(screen.queryByText('Feature Flags')).not.toBeInTheDocument(); + }); + + it('renders nothing if event.contexts.flags is not set and should not show cta - no feature flag', async function () { + const org = OrganizationFixture({features: ['fake-feature-flag']}); + + render(<EventFeatureFlagList {...NO_FLAG_CONTEXT_SECTION_PROPS_CTA} />, { + organization: org, + }); const control = screen.queryByRole('button', {name: 'Sort Flags'}); expect(control).not.toBeInTheDocument(); @@ -218,6 +283,11 @@ describe('EventFeatureFlagList', function () { expect( screen.queryByRole('button', {name: 'Set Up Integration'}) ).not.toBeInTheDocument(); + + // CTA should not appear + await waitFor(() => { + expect(screen.queryByText('Set Up Feature Flags')).not.toBeInTheDocument(); + }); expect(screen.queryByText('Feature Flags')).not.toBeInTheDocument(); }); }); diff --git a/static/app/components/events/featureFlags/eventFeatureFlagList.tsx b/static/app/components/events/featureFlags/eventFeatureFlagList.tsx index 3b8b5ef9575776..550a4d2b3e8ca7 100644 --- a/static/app/components/events/featureFlags/eventFeatureFlagList.tsx +++ b/static/app/components/events/featureFlags/eventFeatureFlagList.tsx @@ -1,20 +1,17 @@ import {Fragment, useCallback, useEffect, useMemo, useRef, useState} from 'react'; import styled from '@emotion/styled'; -import {openModal} from 'sentry/actionCreators/modal'; import {Button} from 'sentry/components/button'; import ButtonBar from 'sentry/components/buttonBar'; import EmptyStateWarning from 'sentry/components/emptyStateWarning'; -import ErrorBoundary from 'sentry/components/errorBoundary'; import { CardContainer, FeatureFlagDrawer, } from 'sentry/components/events/featureFlags/featureFlagDrawer'; +import FeatureFlagInlineCTA from 'sentry/components/events/featureFlags/featureFlagInlineCTA'; import FeatureFlagSort from 'sentry/components/events/featureFlags/featureFlagSort'; -import { - modalCss, - SetupIntegrationModal, -} from 'sentry/components/events/featureFlags/setupIntegrationModal'; +import {useFeatureFlagOnboarding} from 'sentry/components/events/featureFlags/useFeatureFlagOnboarding'; +import useIssueEvents from 'sentry/components/events/featureFlags/useIssueEvents'; import { FlagControlOptions, OrderBy, @@ -23,6 +20,7 @@ import { } from 'sentry/components/events/featureFlags/utils'; import useDrawer from 'sentry/components/globalDrawer'; import KeyValueData from 'sentry/components/keyValueData'; +import {featureFlagOnboardingPlatforms} from 'sentry/data/platformCategories'; import {IconMegaphone, IconSearch} from 'sentry/icons'; import {t} from 'sentry/locale'; import type {Event, FeatureFlag} from 'sentry/types/event'; @@ -32,10 +30,10 @@ import {trackAnalytics} from 'sentry/utils/analytics'; import {useFeedbackForm} from 'sentry/utils/useFeedbackForm'; import useOrganization from 'sentry/utils/useOrganization'; import {SectionKey} from 'sentry/views/issueDetails/streamline/context'; +import {useIssueDetailsEventView} from 'sentry/views/issueDetails/streamline/hooks/useIssueDetailsDiscoverQuery'; +import {useOrganizationFlagLog} from 'sentry/views/issueDetails/streamline/hooks/useOrganizationFlagLog'; +import useSuspectFlags from 'sentry/views/issueDetails/streamline/hooks/useSuspectFlags'; import {InterimSection} from 'sentry/views/issueDetails/streamline/interimSection'; -import {useIssueDetailsEventView} from 'sentry/views/issueDetails/streamline/useIssueDetailsDiscoverQuery'; -import {useOrganizationFlagLog} from 'sentry/views/issueDetails/streamline/useOrganizationFlagLog'; -import useSuspectFlags from 'sentry/views/issueDetails/streamline/useSuspectFlags'; export function EventFeatureFlagList({ event, @@ -81,6 +79,14 @@ export function EventFeatureFlagList({ }, }); + const { + data: relatedEvents, + isPending: isRelatedEventsPending, + isError: isRelatedEventsError, + } = useIssueEvents({issueId: group.id}); + + const {activateSidebarSkipConfigure} = useFeatureFlagOnboarding(); + const { suspectFlags, isError: isSuspectError, @@ -92,33 +98,43 @@ export function EventFeatureFlagList({ event, }); - const hasFlagContext = !!event.contexts.flags; - const hasFlags = Boolean(hasFlagContext && event?.contexts?.flags?.values.length); - - function handleSetupButtonClick() { - trackAnalytics('flags.setup_modal_opened', {organization}); - openModal(modalProps => <SetupIntegrationModal {...modalProps} />, { - modalCss, - }); - } - const suspectFlagNames: Set<string> = useMemo(() => { return isSuspectError || isSuspectPending ? new Set() : new Set(suspectFlags.map(f => f.flag)); }, [isSuspectError, isSuspectPending, suspectFlags]); - const hydratedFlags = useMemo(() => { - // Transform the flags array into something readable by the key-value component - // Reverse the flags to show newest at the top by default - const rawFlags: FeatureFlag[] = event.contexts?.flags?.values.toReversed() ?? []; + const hasFlagContext = Boolean(event.contexts?.flags?.values); + const anyEventHasContext = + isRelatedEventsPending || isRelatedEventsError + ? false + : relatedEvents.filter(e => Boolean(e.contexts?.flags?.values)).length > 0; - // Filter out ill-formatted flags, which come from SDK developer error or user-provided contexts. - const flags = rawFlags.filter( - f => f && typeof f === 'object' && 'flag' in f && 'result' in f + const eventFlags: Required<FeatureFlag>[] = useMemo(() => { + // At runtime there's no type guarantees on the event flags. So we have to + // explicitly validate against SDK developer error or user-provided contexts. + const rawFlags = event.contexts?.flags?.values ?? []; + return rawFlags.filter( + (f): f is Required<FeatureFlag> => + f && + typeof f === 'object' && + typeof f.flag === 'string' && + typeof f.result === 'boolean' ); + }, [event]); + + const hasFlags = hasFlagContext && eventFlags.length > 0; + + const showCTA = + !hasFlagContext && + !anyEventHasContext && + featureFlagOnboardingPlatforms.includes(project.platform ?? 'other') && + organization.features.includes('feature-flag-cta'); - return flags.map(f => { + const hydratedFlags = useMemo(() => { + // Transform the flags array into something readable by the key-value component. + // Reverse the flags to show newest at the top by default. + return eventFlags.toReversed().map(f => { return { item: { key: f.flag, @@ -135,7 +151,7 @@ export function EventFeatureFlagList({ isSuspectFlag: suspectFlagNames.has(f.flag), }; }); - }, [event, suspectFlagNames]); + }, [suspectFlagNames, eventFlags]); const onViewAllFlags = useCallback( (focusControl?: FlagControlOptions) => { @@ -181,7 +197,10 @@ export function EventFeatureFlagList({ } }, [hasFlags, hydratedFlags.length, organization]); - // TODO: for LD users, show a CTA in this section instead + if (showCTA) { + return <FeatureFlagInlineCTA projectId={event.projectID} />; + } + // if contexts.flags is not set, hide the section if (!hasFlagContext) { return null; @@ -190,45 +209,45 @@ export function EventFeatureFlagList({ const actions = ( <ButtonBar gap={1}> {feedbackButton} - {hasFlagContext && ( - <Fragment> - <Button - aria-label={t('Set Up Integration')} - size="xs" - onClick={handleSetupButtonClick} - > - {t('Set Up Integration')} - </Button> - {hasFlags && ( - <Fragment> - <Button - size="xs" - aria-label={t('View All')} - ref={viewAllButtonRef} - title={t('View All Flags')} - onClick={() => { - isDrawerOpen ? closeDrawer() : onViewAllFlags(); - }} - > - {t('View All')} - </Button> - <Button - aria-label={t('Open Feature Flag Search')} - icon={<IconSearch size="xs" />} - size="xs" - title={t('Open Search')} - onClick={() => onViewAllFlags(FlagControlOptions.SEARCH)} - /> - <FeatureFlagSort - orderBy={orderBy} - sortBy={sortBy} - setSortBy={setSortBy} - setOrderBy={setOrderBy} - /> - </Fragment> - )} - </Fragment> - )} + <Fragment> + <Button + aria-label={t('Set Up Integration')} + size="xs" + onClick={mouseEvent => { + activateSidebarSkipConfigure(mouseEvent, project.id); + }} + > + {t('Set Up Integration')} + </Button> + {hasFlags && ( + <Fragment> + <Button + size="xs" + aria-label={t('View All')} + ref={viewAllButtonRef} + title={t('View All Flags')} + onClick={() => { + isDrawerOpen ? closeDrawer() : onViewAllFlags(); + }} + > + {t('View All')} + </Button> + <Button + aria-label={t('Open Feature Flag Search')} + icon={<IconSearch size="xs" />} + size="xs" + title={t('Open Search')} + onClick={() => onViewAllFlags(FlagControlOptions.SEARCH)} + /> + <FeatureFlagSort + orderBy={orderBy} + sortBy={sortBy} + setSortBy={setSortBy} + setOrderBy={setOrderBy} + /> + </Fragment> + )} + </Fragment> </ButtonBar> ); @@ -241,28 +260,26 @@ export function EventFeatureFlagList({ } return ( - <ErrorBoundary mini message={t('There was a problem loading feature flags.')}> - <InterimSection - help={t( - "The last 100 flags evaluated in the user's session leading up to this event." - )} - isHelpHoverable - title={t('Feature Flags')} - type={SectionKey.FEATURE_FLAGS} - actions={actions} - > - {hasFlags ? ( - <CardContainer numCols={columnTwo.length ? 2 : 1}> - <KeyValueData.Card expandLeft contentItems={columnOne} /> - <KeyValueData.Card expandLeft contentItems={columnTwo} /> - </CardContainer> - ) : ( - <StyledEmptyStateWarning withIcon> - {t('No feature flags were found for this event')} - </StyledEmptyStateWarning> - )} - </InterimSection> - </ErrorBoundary> + <InterimSection + help={t( + "The last 100 flags evaluated in the user's session leading up to this event." + )} + isHelpHoverable + title={t('Feature Flags')} + type={SectionKey.FEATURE_FLAGS} + actions={actions} + > + {hasFlags ? ( + <CardContainer numCols={columnTwo.length ? 2 : 1}> + <KeyValueData.Card expandLeft contentItems={columnOne} /> + <KeyValueData.Card expandLeft contentItems={columnTwo} /> + </CardContainer> + ) : ( + <StyledEmptyStateWarning withIcon> + {t('No feature flags were found for this event')} + </StyledEmptyStateWarning> + )} + </InterimSection> ); } diff --git a/static/app/components/events/featureFlags/featureFlagDrawer.spec.tsx b/static/app/components/events/featureFlags/featureFlagDrawer.spec.tsx index b1c9ef98b4970c..d7f38d6317839c 100644 --- a/static/app/components/events/featureFlags/featureFlagDrawer.spec.tsx +++ b/static/app/components/events/featureFlags/featureFlagDrawer.spec.tsx @@ -30,6 +30,14 @@ async function renderFlagDrawer() { describe('FeatureFlagDrawer', function () { beforeEach(function () { + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/issues/1/events/', + body: [], + }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/flags/logs/', + body: {data: []}, + }); MockApiClient.addMockResponse({ url: `/organizations/org-slug/issues/1/tags/`, body: TagsFixture(), diff --git a/static/app/components/events/featureFlags/featureFlagInlineCTA.spec.tsx b/static/app/components/events/featureFlags/featureFlagInlineCTA.spec.tsx new file mode 100644 index 00000000000000..c509ede0025661 --- /dev/null +++ b/static/app/components/events/featureFlags/featureFlagInlineCTA.spec.tsx @@ -0,0 +1,93 @@ +import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; + +import FeatureFlagInlineCTA from 'sentry/components/events/featureFlags/featureFlagInlineCTA'; + +describe('featureFlagInlineCTA', () => { + beforeEach(() => { + MockApiClient.clearMockResponses(); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/prompts-activity/', + body: {data: {dismissed_ts: null}}, + }); + }); + + it('shows an onboarding banner that may be dismissed', async () => { + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/prompts-activity/', + body: {data: {}}, + }); + const dismissMock = MockApiClient.addMockResponse({ + url: '/organizations/org-slug/prompts-activity/', + method: 'PUT', + }); + + render(<FeatureFlagInlineCTA projectId="123" />); + expect(await screen.findByText('Set Up Feature Flags')).toBeInTheDocument(); + + // Open the snooze or dismiss dropdown + await userEvent.click(screen.getByTestId('icon-close')); + expect(screen.getByText('Dismiss')).toBeInTheDocument(); + expect(screen.getByText('Snooze')).toBeInTheDocument(); + + // Click dismiss + await userEvent.click(screen.getByRole('menuitemradio', {name: 'Dismiss'})); + expect(dismissMock).toHaveBeenCalledWith( + '/organizations/org-slug/prompts-activity/', + expect.objectContaining({ + data: expect.objectContaining({ + feature: 'issue_feature_flags_inline_onboarding', + status: 'dismissed', + }), + }) + ); + expect(screen.queryByText('Set Up Feature Flags')).not.toBeInTheDocument(); + }); + + it('shows an onboarding banner that may be snoozed', async () => { + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/prompts-activity/', + body: {data: {}}, + }); + const snoozeMock = MockApiClient.addMockResponse({ + url: '/organizations/org-slug/prompts-activity/', + method: 'PUT', + }); + + render(<FeatureFlagInlineCTA projectId="123" />); + expect(await screen.findByText('Set Up Feature Flags')).toBeInTheDocument(); + + // Open the snooze or dismiss dropdown + await userEvent.click(screen.getByTestId('icon-close')); + expect(screen.getByText('Dismiss')).toBeInTheDocument(); + expect(screen.getByText('Snooze')).toBeInTheDocument(); + + // Click snooze + await userEvent.click(screen.getByRole('menuitemradio', {name: 'Snooze'})); + expect(snoozeMock).toHaveBeenCalledWith( + '/organizations/org-slug/prompts-activity/', + expect.objectContaining({ + data: expect.objectContaining({ + feature: 'issue_feature_flags_inline_onboarding', + status: 'snoozed', + }), + }) + ); + expect(screen.queryByText('Set Up Feature Flags')).not.toBeInTheDocument(); + }); + + it('does not render if already dismissed', () => { + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/prompts-activity/', + body: { + data: { + feature: 'issue_feature_flags_inline_onboarding', + status: 'dismissed', + dismissed_ts: 3, + }, + }, + }); + + render(<FeatureFlagInlineCTA projectId="123" />); + expect(screen.queryByText('Set Up Feature Flags')).not.toBeInTheDocument(); + }); +}); diff --git a/static/app/components/events/featureFlags/featureFlagInlineCTA.tsx b/static/app/components/events/featureFlags/featureFlagInlineCTA.tsx new file mode 100644 index 00000000000000..3613830bb7cb37 --- /dev/null +++ b/static/app/components/events/featureFlags/featureFlagInlineCTA.tsx @@ -0,0 +1,167 @@ +import styled from '@emotion/styled'; + +import {usePrompt} from 'sentry/actionCreators/prompts'; +import {Button, LinkButton} from 'sentry/components/button'; +import ButtonBar from 'sentry/components/buttonBar'; +import {DropdownMenu} from 'sentry/components/dropdownMenu'; +import {useFeatureFlagOnboarding} from 'sentry/components/events/featureFlags/useFeatureFlagOnboarding'; +import {IconClose, IconMegaphone} from 'sentry/icons'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import {trackAnalytics} from 'sentry/utils/analytics'; +import {useFeedbackForm} from 'sentry/utils/useFeedbackForm'; +import useOrganization from 'sentry/utils/useOrganization'; +import {SectionKey} from 'sentry/views/issueDetails/streamline/context'; +import {InterimSection} from 'sentry/views/issueDetails/streamline/interimSection'; + +export default function FeatureFlagInlineCTA({projectId}: {projectId: string}) { + const organization = useOrganization(); + const {activateSidebar} = useFeatureFlagOnboarding(); + + function handleSetupButtonClick(e) { + trackAnalytics('flags.setup_modal_opened', {organization}); + trackAnalytics('flags.cta_setup_button_clicked', {organization}); + activateSidebar(e); + } + + const {isLoading, isError, isPromptDismissed, dismissPrompt, snoozePrompt} = usePrompt({ + feature: 'issue_feature_flags_inline_onboarding', + organization, + projectId, + daysToSnooze: 7, + }); + + const openForm = useFeedbackForm(); + const feedbackButton = openForm ? ( + <Button + aria-label={t('Give feedback on the feature flag section')} + icon={<IconMegaphone />} + size={'xs'} + onClick={() => + openForm({ + messagePlaceholder: t('How can we make feature flags work better for you?'), + tags: { + ['feedback.source']: 'issue_details_feature_flags', + ['feedback.owner']: 'replay', + }, + }) + } + > + {t('Give Feedback')} + </Button> + ) : null; + + if (isLoading || isError || isPromptDismissed) { + return null; + } + + const actions = <ButtonBar gap={1}>{feedbackButton}</ButtonBar>; + + return ( + <InterimSection + help={t( + "The last 100 flags evaluated in the user's session leading up to this event." + )} + isHelpHoverable + title={t('Feature Flags')} + type={SectionKey.FEATURE_FLAGS} + actions={actions} + > + <BannerWrapper> + <div> + <BannerTitle>{t('Set Up Feature Flags')}</BannerTitle> + <BannerDescription> + {t( + 'Want to know which feature flags were associated with this error? Set up your feature flag integration.' + )} + </BannerDescription> + <ActionButton> + <Button onClick={handleSetupButtonClick} priority="primary"> + {t('Set Up Now')} + </Button> + <LinkButton + priority="default" + href="https://docs.sentry.io/product/explore/feature-flags/" + external + > + {t('Read More')} + </LinkButton> + </ActionButton> + </div> + <CloseDropdownMenu + position="bottom-end" + triggerProps={{ + showChevron: false, + borderless: true, + icon: <IconClose color="subText" />, + }} + size="xs" + items={[ + { + key: 'dismiss', + label: t('Dismiss'), + onAction: () => { + dismissPrompt(); + trackAnalytics('flags.cta_dismissed', { + organization, + type: 'dismiss', + }); + }, + }, + { + key: 'snooze', + label: t('Snooze'), + onAction: () => { + snoozePrompt(); + trackAnalytics('flags.cta_dismissed', { + organization, + type: 'snooze', + }); + }, + }, + ]} + /> + </BannerWrapper> + </InterimSection> + ); +} + +const BannerTitle = styled('div')` + font-size: ${p => p.theme.fontSizeExtraLarge}; + margin-bottom: ${space(1)}; + font-weight: ${p => p.theme.fontWeightBold}; +`; + +const BannerDescription = styled('div')` + margin-bottom: ${space(1.5)}; + max-width: 340px; +`; + +const CloseDropdownMenu = styled(DropdownMenu)` + position: absolute; + display: block; + top: ${space(1)}; + right: ${space(1)}; + color: ${p => p.theme.white}; + cursor: pointer; + z-index: 1; +`; + +const ActionButton = styled('div')` + display: flex; + gap: ${space(1)}; +`; + +const BannerWrapper = styled('div')` + position: relative; + border: 1px solid ${p => p.theme.border}; + border-radius: ${p => p.theme.borderRadius}; + padding: ${space(2)}; + margin: ${space(1)} 0; + background: linear-gradient( + 90deg, + ${p => p.theme.backgroundSecondary}00 0%, + ${p => p.theme.backgroundSecondary}FF 70%, + ${p => p.theme.backgroundSecondary}FF 100% + ); +`; diff --git a/static/app/components/events/featureFlags/featureFlagOnboardingLayout.tsx b/static/app/components/events/featureFlags/featureFlagOnboardingLayout.tsx new file mode 100644 index 00000000000000..2ec92f0865c5e7 --- /dev/null +++ b/static/app/components/events/featureFlags/featureFlagOnboardingLayout.tsx @@ -0,0 +1,139 @@ +import {useMemo, useState} from 'react'; +import styled from '@emotion/styled'; + +import Alert from 'sentry/components/alert'; +import {Button} from 'sentry/components/button'; +import {Flex} from 'sentry/components/container/flex'; +import OnboardingIntegrationSection from 'sentry/components/events/featureFlags/onboardingIntegrationSection'; +import {AuthTokenGeneratorProvider} from 'sentry/components/onboarding/gettingStartedDoc/authTokenGenerator'; +import type {OnboardingLayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/onboardingLayout'; +import {Step} from 'sentry/components/onboarding/gettingStartedDoc/step'; +import type {DocsParams} from 'sentry/components/onboarding/gettingStartedDoc/types'; +import {useSourcePackageRegistries} from 'sentry/components/onboarding/gettingStartedDoc/useSourcePackageRegistries'; +import {useUrlPlatformOptions} from 'sentry/components/onboarding/platformOptionsControl'; +import {t} from 'sentry/locale'; +import ConfigStore from 'sentry/stores/configStore'; +import {useLegacyStore} from 'sentry/stores/useLegacyStore'; +import {space} from 'sentry/styles/space'; +import useApi from 'sentry/utils/useApi'; +import useOrganization from 'sentry/utils/useOrganization'; + +interface FeatureFlagOnboardingLayoutProps extends OnboardingLayoutProps { + integration?: string; + provider?: string; + skipConfig?: boolean; +} + +export function FeatureFlagOnboardingLayout({ + docsConfig, + dsn, + platformKey, + projectId, + projectSlug, + projectKeyId, + configType = 'onboarding', + integration = '', + provider = '', + skipConfig, +}: FeatureFlagOnboardingLayoutProps) { + const api = useApi(); + const organization = useOrganization(); + const {isPending: isLoadingRegistry, data: registryData} = + useSourcePackageRegistries(organization); + const selectedOptions = useUrlPlatformOptions(docsConfig.platformOptions); + const {isSelfHosted, urlPrefix} = useLegacyStore(ConfigStore); + const [skipSteps, setSkipSteps] = useState(skipConfig); + + const {steps} = useMemo(() => { + const doc = docsConfig[configType] ?? docsConfig.onboarding; + + const docParams: DocsParams<any> = { + api, + projectKeyId, + dsn, + organization, + platformKey, + projectId, + projectSlug, + isFeedbackSelected: false, + isPerformanceSelected: false, + isProfilingSelected: false, + isReplaySelected: false, + sourcePackageRegistries: { + isLoading: isLoadingRegistry, + data: registryData, + }, + platformOptions: selectedOptions, + isSelfHosted, + urlPrefix, + featureFlagOptions: { + integration, + }, + }; + + return { + steps: [...doc.install(docParams), ...doc.configure(docParams)], + }; + }, [ + docsConfig, + dsn, + isLoadingRegistry, + organization, + platformKey, + projectId, + projectSlug, + registryData, + selectedOptions, + configType, + urlPrefix, + isSelfHosted, + api, + projectKeyId, + integration, + ]); + + return ( + <AuthTokenGeneratorProvider projectSlug={projectSlug}> + <Wrapper> + {!skipConfig ? null : ( + <Alert type="info" showIcon> + <Flex gap={space(3)}> + {t('Feature flag integration detected. Please follow the remaining steps.')} + <Button onClick={() => setSkipSteps(!skipSteps)}> + {skipSteps ? t('Show Full Guide') : t('Hide Full Guide')} + </Button> + </Flex> + </Alert> + )} + {!skipSteps && ( + <Steps> + {steps.map(step => ( + <Step key={step.title ?? step.type} {...step} /> + ))} + </Steps> + )} + <OnboardingIntegrationSection provider={provider} integration={integration} /> + </Wrapper> + </AuthTokenGeneratorProvider> + ); +} + +const Steps = styled('div')` + display: flex; + flex-direction: column; + gap: 1.5rem; +`; + +const Wrapper = styled('div')` + h4 { + margin-bottom: 0.5em; + } + && { + p { + margin-bottom: 0; + } + h5 { + margin-bottom: 0; + } + } +`; diff --git a/static/app/components/events/featureFlags/featureFlagOnboardingSidebar.tsx b/static/app/components/events/featureFlags/featureFlagOnboardingSidebar.tsx new file mode 100644 index 00000000000000..ea3a6dbd5551ad --- /dev/null +++ b/static/app/components/events/featureFlags/featureFlagOnboardingSidebar.tsx @@ -0,0 +1,411 @@ +import type {ReactNode} from 'react'; +import {Fragment, useMemo, useState} from 'react'; +import styled from '@emotion/styled'; + +import HighlightTopRightPattern from 'sentry-images/pattern/highlight-top-right.svg'; + +import {LinkButton} from 'sentry/components/button'; +import {CompactSelect} from 'sentry/components/compactSelect'; +import {FeatureFlagOnboardingLayout} from 'sentry/components/events/featureFlags/featureFlagOnboardingLayout'; +import {FLAG_HASH_SKIP_CONFIG} from 'sentry/components/events/featureFlags/useFeatureFlagOnboarding'; +import { + IntegrationOptions, + ProviderOptions, +} from 'sentry/components/events/featureFlags/utils'; +import RadioGroup from 'sentry/components/forms/controls/radioGroup'; +import IdBadge from 'sentry/components/idBadge'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; +import {useLoadGettingStarted} from 'sentry/components/onboarding/gettingStartedDoc/utils/useLoadGettingStarted'; +import SidebarPanel from 'sentry/components/sidebar/sidebarPanel'; +import type {CommonSidebarProps} from 'sentry/components/sidebar/types'; +import {SidebarPanelKey} from 'sentry/components/sidebar/types'; +import TextOverflow from 'sentry/components/textOverflow'; +import {featureFlagOnboardingPlatforms} from 'sentry/data/platformCategories'; +import platforms, {otherPlatform} from 'sentry/data/platforms'; +import {t, tct} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import type {SelectValue} from 'sentry/types/core'; +import type {Project} from 'sentry/types/project'; +import useOrganization from 'sentry/utils/useOrganization'; +import useUrlParams from 'sentry/utils/useUrlParams'; + +function FeatureFlagOnboardingSidebar(props: CommonSidebarProps) { + const {currentPanel, collapsed, hidePanel, orientation} = props; + const organization = useOrganization(); + + const isActive = currentPanel === SidebarPanelKey.FEATURE_FLAG_ONBOARDING; + const hasProjectAccess = organization.access.includes('project:read'); + + const { + hasDocs, + projects, + allProjects, + currentProject, + setCurrentProject, + supportedProjects, + unsupportedProjects, + } = useCurrentProjectState({ + currentPanel, + targetPanel: SidebarPanelKey.FEATURE_FLAG_ONBOARDING, + onboardingPlatforms: featureFlagOnboardingPlatforms, + allPlatforms: featureFlagOnboardingPlatforms, + }); + + const projectSelectOptions = useMemo(() => { + const supportedProjectItems: SelectValue<string>[] = supportedProjects.map( + project => { + return { + value: project.id, + textValue: project.id, + label: ( + <StyledIdBadge project={project} avatarSize={16} hideOverflow disableLink /> + ), + }; + } + ); + + const unsupportedProjectItems: SelectValue<string>[] = unsupportedProjects.map( + project => { + return { + value: project.id, + textValue: project.id, + label: ( + <StyledIdBadge project={project} avatarSize={16} hideOverflow disableLink /> + ), + disabled: true, + }; + } + ); + return [ + { + label: t('Supported'), + options: supportedProjectItems, + }, + { + label: t('Unsupported'), + options: unsupportedProjectItems, + }, + ]; + }, [supportedProjects, unsupportedProjects]); + + const selectedProject = currentProject ?? projects[0] ?? allProjects[0]; + if (!isActive || !hasProjectAccess || !selectedProject) { + return null; + } + + return ( + <TaskSidebarPanel + orientation={orientation} + collapsed={collapsed} + hidePanel={hidePanel} + > + <TopRightBackgroundImage src={HighlightTopRightPattern} /> + <TaskList> + <Heading>{t('Debug Issues with Feature Flag Context')}</Heading> + <HeaderActions> + <div + onClick={e => { + // we need to stop bubbling the CompactSelect click event + // failing to do so will cause the sidebar panel to close + // the event.target will be unmounted by the time the panel listener + // receives the event and assume the click was outside the panel + e.stopPropagation(); + }} + > + <CompactSelect + triggerLabel={ + currentProject ? ( + <StyledIdBadge + project={currentProject} + avatarSize={16} + hideOverflow + disableLink + /> + ) : ( + t('Select a project') + ) + } + value={currentProject?.id} + onChange={opt => + setCurrentProject(allProjects.find(p => p.id === opt.value)) + } + triggerProps={{'aria-label': currentProject?.slug}} + options={projectSelectOptions} + position="bottom-end" + /> + </div> + </HeaderActions> + <OnboardingContent currentProject={selectedProject} hasDocs={hasDocs} /> + </TaskList> + </TaskSidebarPanel> + ); +} + +function OnboardingContent({ + currentProject, + hasDocs, +}: { + currentProject: Project; + hasDocs: boolean; +}) { + const organization = useOrganization(); + + // useMemo is needed to remember the original hash + // in case window.location.hash disappears + const ORIGINAL_HASH = useMemo(() => { + return window.location.hash; + }, []); + const skipConfig = ORIGINAL_HASH === FLAG_HASH_SKIP_CONFIG; + const openFeatureProviders = [ProviderOptions.LAUNCHDARKLY]; + const sdkProviders = [ProviderOptions.LAUNCHDARKLY]; + + // First dropdown: OpenFeature providers + const openFeatureProviderOptions = openFeatureProviders.map(provider => { + return { + value: provider, + textValue: provider, + label: <TextOverflow>{provider}</TextOverflow>, + }; + }); + + const [openFeatureProvider, setOpenFeatureProvider] = useState<{ + value: string; + label?: ReactNode; + textValue?: string; + }>(openFeatureProviderOptions[0]); + + // Second dropdown: other SDK providers + const sdkProviderOptions = sdkProviders.map(provider => { + return { + value: provider, + textValue: provider, + label: <TextOverflow>{provider}</TextOverflow>, + }; + }); + + const [sdkProvider, setsdkProvider] = useState<{ + value: string; + label?: ReactNode; + textValue?: string; + }>(sdkProviderOptions[0]); + + const defaultTab: string = 'openFeature'; + const {getParamValue: setupMode, setParamValue: setSetupMode} = useUrlParams( + 'mode', + defaultTab + ); + + const currentPlatform = currentProject.platform + ? platforms.find(p => p.id === currentProject.platform) ?? otherPlatform + : otherPlatform; + + const { + docs, + dsn, + isLoading: isProjKeysLoading, + projectKeyId, + } = useLoadGettingStarted({ + platform: currentPlatform, + projSlug: currentProject.slug, + orgSlug: organization.slug, + productType: 'featureFlags', + }); + + const radioButtons = ( + <Header> + <StyledRadioGroup + label="mode" + choices={[ + [ + 'openFeature', + <PlatformSelect key="platform-select"> + {tct('I use the OpenFeature SDK using a provider from [providerSelect]', { + providerSelect: ( + <CompactSelect + triggerLabel={openFeatureProvider.label} + value={openFeatureProvider.value} + onChange={setOpenFeatureProvider} + options={openFeatureProviderOptions} + position="bottom-end" + key={openFeatureProvider.textValue} + disabled={setupMode() === 'other'} + /> + ), + })} + </PlatformSelect>, + ], + [ + 'other', + <PlatformSelect key="platform-select"> + {tct('I use an SDK from [providerSelect]', { + providerSelect: ( + <CompactSelect + triggerLabel={sdkProvider.label} + value={sdkProvider.value} + onChange={setsdkProvider} + options={sdkProviderOptions} + position="bottom-end" + key={sdkProvider.textValue} + disabled={setupMode() === 'openFeature'} + /> + ), + })} + </PlatformSelect>, + ], + ]} + value={setupMode()} + onChange={value => { + setSetupMode(value); + window.location.hash = ORIGINAL_HASH; + }} + /> + </Header> + ); + + if (isProjKeysLoading) { + return ( + <Fragment> + {radioButtons} + <LoadingIndicator /> + </Fragment> + ); + } + + const doesNotSupportFeatureFlags = currentProject.platform + ? !featureFlagOnboardingPlatforms.includes(currentProject.platform) + : true; + + if (doesNotSupportFeatureFlags) { + return ( + <Fragment> + <div> + {tct( + 'Feature Flags isn’t available for your [platform] project. It is currently only available for Python and JavaScript projects.', + {platform: currentPlatform?.name || currentProject.slug} + )} + </div> + <div> + <LinkButton + size="sm" + href="https://docs.sentry.io/product/explore/feature-flags/" + external + > + {t('Go to Sentry Documentation')} + </LinkButton> + </div> + </Fragment> + ); + } + + // No platform, docs import failed, no DSN, or the platform doesn't have onboarding yet + if (!currentPlatform || !docs || !dsn || !hasDocs || !projectKeyId) { + return ( + <Fragment> + <div> + {tct( + 'Fiddlesticks. This checklist isn’t available for your [project] project yet, but for now, go to Sentry docs for installation details.', + {project: currentProject.slug} + )} + </div> + <div> + <LinkButton + size="sm" + href="https://docs.sentry.io/platforms/python/feature-flags/" + external + > + {t('Read Docs')} + </LinkButton> + </div> + </Fragment> + ); + } + + return ( + <Fragment> + {radioButtons} + <FeatureFlagOnboardingLayout + skipConfig={skipConfig} + docsConfig={docs} + dsn={dsn} + projectKeyId={projectKeyId} + activeProductSelection={[]} + platformKey={currentPlatform.id} + projectId={currentProject.id} + projectSlug={currentProject.slug} + integration={ + // either OpenFeature or the SDK selected from the second dropdown + setupMode() === 'openFeature' + ? IntegrationOptions.OPENFEATURE + : sdkProvider.value + } + provider={ + // dropdown value (from either dropdown) + setupMode() === 'openFeature' ? openFeatureProvider.value : sdkProvider.value + } + configType="featureFlagOnboarding" + /> + </Fragment> + ); +} + +const TaskSidebarPanel = styled(SidebarPanel)` + width: 600px; + max-width: 100%; +`; + +const TopRightBackgroundImage = styled('img')` + position: absolute; + top: 0; + right: 0; + width: 60%; + user-select: none; +`; + +const TaskList = styled('div')` + display: grid; + grid-auto-flow: row; + grid-template-columns: 100%; + gap: ${space(1)}; + margin: 50px ${space(4)} ${space(4)} ${space(4)}; +`; + +const Heading = styled('div')` + display: flex; + color: ${p => p.theme.activeText}; + font-size: ${p => p.theme.fontSizeExtraSmall}; + text-transform: uppercase; + font-weight: ${p => p.theme.fontWeightBold}; + line-height: 1; + margin-top: ${space(3)}; +`; + +const StyledIdBadge = styled(IdBadge)` + overflow: hidden; + white-space: nowrap; + flex-shrink: 1; +`; + +const HeaderActions = styled('div')` + display: flex; + flex-direction: row; + justify-content: space-between; + gap: ${space(3)}; +`; + +const PlatformSelect = styled('div')` + display: flex; + gap: ${space(1)}; + align-items: center; + flex-wrap: wrap; +`; + +const StyledRadioGroup = styled(RadioGroup)` + padding: ${space(1)} 0; +`; + +const Header = styled('div')` + padding: ${space(1)} 0; +`; + +export default FeatureFlagOnboardingSidebar; diff --git a/static/app/components/events/featureFlags/onboardingIntegrationSection.tsx b/static/app/components/events/featureFlags/onboardingIntegrationSection.tsx new file mode 100644 index 00000000000000..aa67e54383bce2 --- /dev/null +++ b/static/app/components/events/featureFlags/onboardingIntegrationSection.tsx @@ -0,0 +1,184 @@ +import {Fragment, useState} from 'react'; +import styled from '@emotion/styled'; + +import { + addErrorMessage, + addLoadingMessage, + addSuccessMessage, +} from 'sentry/actionCreators/indicator'; +import {hasEveryAccess} from 'sentry/components/acl/access'; +import Alert from 'sentry/components/alert'; +import {Button} from 'sentry/components/button'; +import {PROVIDER_OPTION_TO_URLS} from 'sentry/components/events/featureFlags/utils'; +import Input from 'sentry/components/input'; +import ExternalLink from 'sentry/components/links/externalLink'; +import TextCopyInput from 'sentry/components/textCopyInput'; +import {Tooltip} from 'sentry/components/tooltip'; +import {IconCheckmark, IconWarning} from 'sentry/icons'; +import {t, tct} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import {handleXhrErrorResponse} from 'sentry/utils/handleXhrErrorResponse'; +import {useMutation, useQueryClient} from 'sentry/utils/queryClient'; +import type RequestError from 'sentry/utils/requestError/requestError'; +import useApi from 'sentry/utils/useApi'; +import useOrganization from 'sentry/utils/useOrganization'; +import {makeFetchSecretQueryKey} from 'sentry/views/settings/featureFlags'; +import type { + CreateSecretQueryVariables, + CreateSecretResponse, +} from 'sentry/views/settings/featureFlags/newProviderForm'; + +export default function OnboardingIntegrationSection({ + provider, + integration, +}: { + integration: string; + provider: string; +}) { + const api = useApi(); + const queryClient = useQueryClient(); + const organization = useOrganization(); + + const {mutate: submitSecret, isPending} = useMutation< + CreateSecretResponse, + RequestError, + CreateSecretQueryVariables + >({ + mutationFn: ({secret}) => { + addLoadingMessage(); + return api.requestPromise( + `/organizations/${organization.slug}/flags/signing-secrets/`, + { + method: 'POST', + data: { + provider: provider.toLowerCase(), + secret, + }, + } + ); + }, + + onSuccess: () => { + addSuccessMessage(t('Added provider and secret.')); + setSecretSaved(true); + queryClient.invalidateQueries({ + queryKey: makeFetchSecretQueryKey({orgSlug: organization.slug}), + }); + }, + onError: error => { + const message = t('Failed to add provider or secret.'); + setSecretSaved(false); + handleXhrErrorResponse(message, error); + addErrorMessage(message); + }, + }); + + const [secretSaved, setSecretSaved] = useState(false); + const [secret, setSecret] = useState(''); + const [storedProvider, setStoredProvider] = useState(provider); + const [storedIntegration, setStoredIntegration] = useState(integration); + + if (provider !== storedProvider || integration !== storedIntegration) { + setStoredProvider(provider); + setStoredIntegration(integration); + setSecret(''); + setSecretSaved(false); + } + + const canRead = hasEveryAccess(['org:read'], {organization}); + const canWrite = hasEveryAccess(['org:write'], {organization}); + const canAdmin = hasEveryAccess(['org:admin'], {organization}); + const hasAccess = canRead || canWrite || canAdmin; + + return ( + <Fragment> + <h4 style={{marginTop: space(4)}}>{t('Integrate Feature Flag Service')}</h4> + <IntegrationSection> + <SubSection> + <div> + {tct( + "Create a webhook integration with your [link:feature flag service]. When you do so, you'll need to enter a URL, which you can find below.", + {link: <ExternalLink href={PROVIDER_OPTION_TO_URLS[provider]} />} + )} + </div> + <InputTitle>{t('Webhook URL')}</InputTitle> + <TextCopyInput + style={{padding: '20px'}} + aria-label={t('Webhook URL')} + size="sm" + > + {`https://sentry.io/api/0/organizations/${organization.slug}/flags/hooks/provider/${provider.toLowerCase()}/`} + </TextCopyInput> + </SubSection> + <SubSection> + <div> + {t( + "During the process of creating a webhook integration, you'll be given the option to sign the webhook. This is an auto-generated secret code that Sentry requires to verify requests from your feature flag service. Paste the secret below." + )} + </div> + <InputTitle>{t('Secret')}</InputTitle> + <InputArea> + <Input + maxLength={32} + minLength={32} + required + value={secret} + type="text" + placeholder={t('Secret')} + onChange={e => setSecret(e.target.value)} + /> + <Tooltip + title={t('You must be an organization member to add a secret.')} + disabled={hasAccess} + > + <Button + priority="default" + onClick={() => submitSecret({provider, secret})} + disabled={secret.length < 32 || secret === '' || !hasAccess || isPending} + > + {t('Save Secret')} + </Button> + </Tooltip> + </InputArea> + {secretSaved ? ( + <StyledAlert showIcon type="success" icon={<IconCheckmark />}> + {t('Secret verified.')} + </StyledAlert> + ) : secret ? ( + <StyledAlert showIcon type="warning" icon={<IconWarning />}> + {t('Make sure the secret is 32 characters long.')} + </StyledAlert> + ) : null} + </SubSection> + </IntegrationSection> + </Fragment> + ); +} + +const InputTitle = styled('div')` + font-weight: bold; +`; + +const InputArea = styled('div')` + display: flex; + flex-direction: row; + gap: ${space(1)}; + align-items: center; +`; + +const IntegrationSection = styled('div')` + display: flex; + flex-direction: column; + gap: ${space(3)}; + margin: ${space(3)} 0; +`; + +const SubSection = styled('div')` + display: flex; + gap: ${space(1)}; + flex-direction: column; +`; + +const StyledAlert = styled(Alert)` + margin: ${space(1.5)} 0 0 0; +`; diff --git a/static/app/components/events/featureFlags/setupIntegrationModal.tsx b/static/app/components/events/featureFlags/setupIntegrationModal.tsx deleted file mode 100644 index aa66ca627d75e2..00000000000000 --- a/static/app/components/events/featureFlags/setupIntegrationModal.tsx +++ /dev/null @@ -1,215 +0,0 @@ -import {Fragment, useCallback, useState} from 'react'; -import {css} from '@emotion/react'; -import styled from '@emotion/styled'; - -import type {ModalRenderProps} from 'sentry/actionCreators/modal'; -import Alert from 'sentry/components/alert'; -import {Button, LinkButton} from 'sentry/components/button'; -import ButtonBar from 'sentry/components/buttonBar'; -import SelectField from 'sentry/components/forms/fields/selectField'; -import type {Data} from 'sentry/components/forms/types'; -import TextCopyInput from 'sentry/components/textCopyInput'; -import {IconWarning} from 'sentry/icons'; -import {t} from 'sentry/locale'; -import OrganizationStore from 'sentry/stores/organizationStore'; -import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import {space} from 'sentry/styles/space'; -import {defined} from 'sentry/utils'; -import useApi from 'sentry/utils/useApi'; - -export type ChildrenProps<T> = { - Body: (props: {children: React.ReactNode}) => ReturnType<ModalRenderProps['Body']>; - Footer: () => ReturnType<ModalRenderProps['Footer']>; - Header: (props: {children: React.ReactNode}) => ReturnType<ModalRenderProps['Header']>; - state: T; -}; - -interface State { - provider: string; - url: string | undefined; -} - -function useGenerateAuthToken({ - state, - orgSlug, -}: { - orgSlug: string | undefined; - state: State; -}) { - const api = useApi(); - const date = new Date().toISOString(); - - const createToken = async () => - await api.requestPromise(`/organizations/${orgSlug}/org-auth-tokens/`, { - method: 'POST', - data: { - name: `${state.provider} Token ${date}`, - }, - }); - - return {createToken}; -} - -export function SetupIntegrationModal<T extends Data>({ - Header, - Body, - Footer, - closeModal, -}: ModalRenderProps) { - const [state, setState] = useState<State>({ - provider: 'LaunchDarkly', - url: undefined, - }); - const {organization} = useLegacyStore(OrganizationStore); - const {createToken} = useGenerateAuthToken({state, orgSlug: organization?.slug}); - - const handleDone = useCallback(() => { - closeModal(); - }, [closeModal]); - - const ModalHeader = useCallback( - ({children: headerChildren}: {children: React.ReactNode}) => { - return ( - <Header closeButton> - <h3>{headerChildren}</h3> - </Header> - ); - }, - [Header] - ); - - const ModalFooter = useCallback(() => { - return ( - <Footer> - <StyledButtonBar gap={1}> - <LinkButton - priority="default" - href="https://docs.sentry.io/product/issues/issue-details/#feature-flags" - external - > - {t('Read Docs')} - </LinkButton> - <Button - priority="primary" - title={!defined(state.provider) && t('Required fields must be filled out.')} - onClick={handleDone} - disabled={!defined(state.provider)} - > - {t('Done')} - </Button> - </StyledButtonBar> - </Footer> - ); - }, [Footer, handleDone, state]); - - const ModalBody = useCallback( - ({children: bodyChildren}: Parameters<ChildrenProps<T>['Body']>[0]) => { - return <Body>{bodyChildren}</Body>; - }, - [Body] - ); - - const onGenerateURL = useCallback(async () => { - const newToken = await createToken(); - const encodedToken = encodeURI(newToken.token); - const provider = state.provider.toLowerCase(); - - setState(prevState => { - return { - ...prevState, - url: `https://sentry.io/api/0/organizations/${organization?.slug}/flags/hooks/provider/${provider}/token/${encodedToken}/`, - }; - }); - }, [createToken, organization, state.provider]); - - const providers = ['LaunchDarkly']; - - return ( - <Fragment> - <ModalHeader>{t('Set Up Feature Flag Integration')}</ModalHeader> - <ModalBody> - <SelectContainer> - <SelectField - label={t('Feature Flag Services')} - name="provider" - inline={false} - options={providers.map(integration => ({ - value: integration, - label: integration, - }))} - placeholder={t('Select a feature flag service')} - value={state.provider} - onChange={value => setState({...state, provider: value})} - flexibleControlStateSize - stacked - required - /> - <WebhookButton - priority="default" - title={!defined(state.provider) && t('You must select a provider first.')} - onClick={onGenerateURL} - disabled={!defined(state.provider) || defined(state.url)} - > - {t('Create Webhook URL')} - </WebhookButton> - </SelectContainer> - <WebhookContainer> - {t('Webhook URL')} - <TextCopyInput - style={{padding: '20px'}} - disabled={!defined(state.url)} - placeholder={t('No webhook URL created yet')} - aria-label={t('Webhook URL')} - size="sm" - > - {state.url ?? ''} - </TextCopyInput> - <InfoContainer> - {t( - 'The final step is to create a Webhook integration within your feature flag service by utilizing the Webhook URL provided in the field above.' - )} - <Alert showIcon type="warning" icon={<IconWarning />}> - {t('You won’t be able to access this URL once this modal is closed.')} - </Alert> - </InfoContainer> - </WebhookContainer> - </ModalBody> - <ModalFooter /> - </Fragment> - ); -} - -export const modalCss = css` - width: 100%; - max-width: 680px; -`; - -const StyledButtonBar = styled(ButtonBar)` - display: flex; - width: 100%; - justify-content: space-between; -`; - -const SelectContainer = styled('div')` - display: grid; - grid-template-columns: 1fr max-content; - align-items: center; - gap: ${space(1)}; -`; - -const WebhookButton = styled(Button)` - margin-top: ${space(1)}; -`; - -const WebhookContainer = styled('div')` - display: flex; - flex-direction: column; - gap: ${space(1)}; -`; - -const InfoContainer = styled('div')` - display: flex; - flex-direction: column; - gap: ${space(2)}; - margin-top: ${space(1)}; -`; diff --git a/static/app/components/events/featureFlags/testUtils.tsx b/static/app/components/events/featureFlags/testUtils.tsx index e4de5c67748a87..832cd223918135 100644 --- a/static/app/components/events/featureFlags/testUtils.tsx +++ b/static/app/components/events/featureFlags/testUtils.tsx @@ -4,7 +4,7 @@ import {ProjectFixture} from 'sentry-fixture/project'; import type {FeatureFlag} from 'sentry/types/event'; -export const MOCK_FLAGS: FeatureFlag[] = [ +export const MOCK_FLAGS: Required<FeatureFlag>[] = [ { flag: 'mobile-replay-ui', result: false, @@ -41,11 +41,22 @@ export const EMPTY_STATE_SECTION_PROPS = { group: GroupFixture(), }; -export const NO_FLAG_CONTEXT_SECTION_PROPS = { +export const NO_FLAG_CONTEXT_SECTION_PROPS_NO_CTA = { event: EventFixture({ id: 'abc123def456ghi789jkl', contexts: {other: {}}, + platform: 'unity', }), - project: ProjectFixture(), - group: GroupFixture(), + project: ProjectFixture({platform: 'unity'}), + group: GroupFixture({platform: 'unity'}), +}; + +export const NO_FLAG_CONTEXT_SECTION_PROPS_CTA = { + event: EventFixture({ + id: 'abc123def456ghi789jkl', + contexts: {other: {}}, + platform: 'javascript', + }), + project: ProjectFixture({platform: 'javascript'}), + group: GroupFixture({platform: 'javascript'}), }; diff --git a/static/app/components/events/featureFlags/useFeatureFlagOnboarding.tsx b/static/app/components/events/featureFlags/useFeatureFlagOnboarding.tsx new file mode 100644 index 00000000000000..abfe3af57313bf --- /dev/null +++ b/static/app/components/events/featureFlags/useFeatureFlagOnboarding.tsx @@ -0,0 +1,53 @@ +import {useCallback, useEffect} from 'react'; + +import {SidebarPanelKey} from 'sentry/components/sidebar/types'; +import SidebarPanelStore from 'sentry/stores/sidebarPanelStore'; +import {trackAnalytics} from 'sentry/utils/analytics'; +import {useLocation} from 'sentry/utils/useLocation'; +import {useNavigate} from 'sentry/utils/useNavigate'; +import useOrganization from 'sentry/utils/useOrganization'; + +const FLAG_HASH = '#flag-sidequest'; +export const FLAG_HASH_SKIP_CONFIG = '#flag-sidequest-skip'; + +export function useFeatureFlagOnboarding() { + const location = useLocation(); + const organization = useOrganization(); + const navigate = useNavigate(); + + useEffect(() => { + if (location.hash === FLAG_HASH || location.hash === FLAG_HASH_SKIP_CONFIG) { + SidebarPanelStore.activatePanel(SidebarPanelKey.FEATURE_FLAG_ONBOARDING); + trackAnalytics('flags.view-setup-sidebar', { + organization, + }); + } + }, [location.hash, organization]); + + const activateSidebar = useCallback((event: React.MouseEvent) => { + event.preventDefault(); + window.location.hash = FLAG_HASH; + SidebarPanelStore.activatePanel(SidebarPanelKey.FEATURE_FLAG_ONBOARDING); + }, []); + + // if we detect that event.contexts.flags is set, use this hook instead + // to skip the configure step + const activateSidebarSkipConfigure = useCallback( + (event: React.MouseEvent, projectId: string) => { + event.preventDefault(); + navigate( + { + pathname: location.pathname, + // Adding the projectId will help pick the correct project in onboarding + query: {...location.query, project: projectId}, + hash: FLAG_HASH_SKIP_CONFIG, + }, + {replace: true} + ); + SidebarPanelStore.activatePanel(SidebarPanelKey.FEATURE_FLAG_ONBOARDING); + }, + [navigate, location.pathname, location.query] + ); + + return {activateSidebar, activateSidebarSkipConfigure}; +} diff --git a/static/app/components/events/featureFlags/useIssueEvents.tsx b/static/app/components/events/featureFlags/useIssueEvents.tsx new file mode 100644 index 00000000000000..11c7fd95a66f3a --- /dev/null +++ b/static/app/components/events/featureFlags/useIssueEvents.tsx @@ -0,0 +1,21 @@ +import type {Event} from '@sentry/types'; + +import {useApiQuery} from 'sentry/utils/queryClient'; +import useOrganization from 'sentry/utils/useOrganization'; + +export default function useIssueEvents({issueId}: {issueId: string}) { + const organization = useOrganization(); + return useApiQuery<Event[]>( + [ + `/organizations/${organization.slug}/issues/${issueId}/events/`, + { + query: { + statsPeriod: '14d', + limit: 20, + full: true, + }, + }, + ], + {staleTime: 0} + ); +} diff --git a/static/app/components/events/featureFlags/utils.tsx b/static/app/components/events/featureFlags/utils.tsx index a560b54d3d3609..59b1814ecc91fc 100644 --- a/static/app/components/events/featureFlags/utils.tsx +++ b/static/app/components/events/featureFlags/utils.tsx @@ -112,3 +112,17 @@ export const sortedFlags = ({ return flags; } }; + +export enum ProviderOptions { + LAUNCHDARKLY = 'LaunchDarkly', +} + +export enum IntegrationOptions { + LAUNCHDARKLY = 'LaunchDarkly', + OPENFEATURE = 'OpenFeature', +} + +export const PROVIDER_OPTION_TO_URLS: Record<ProviderOptions, string> = { + [ProviderOptions.LAUNCHDARKLY]: + 'https://app.launchdarkly.com/settings/integrations/webhooks/new?q=Webhooks', +}; diff --git a/static/app/components/events/groupingInfo/groupingInfo.tsx b/static/app/components/events/groupingInfo/groupingInfo.tsx index a99e19affb6754..a78b096c85e201 100644 --- a/static/app/components/events/groupingInfo/groupingInfo.tsx +++ b/static/app/components/events/groupingInfo/groupingInfo.tsx @@ -79,7 +79,7 @@ export default function GroupingInfo({ return ( <Fragment> - {!hasStreamlinedUI && ( + {hasStreamlinedUI && ( <GroupInfoSummary event={event} group={group} projectSlug={projectSlug} /> )} <ConfigHeader> diff --git a/static/app/components/events/interfaces/breadcrumbs/index.tsx b/static/app/components/events/interfaces/breadcrumbs/index.tsx index e5f978275e3d60..b50eb5bcc00221 100644 --- a/static/app/components/events/interfaces/breadcrumbs/index.tsx +++ b/static/app/components/events/interfaces/breadcrumbs/index.tsx @@ -346,6 +346,10 @@ export const SearchAndSortWrapper = styled('div')` @media (max-width: ${p => p.theme.breakpoints.small}) { grid-template-columns: 1fr; } + + @container breadcrumbs (width < 640px) { + display: none; + } `; const LevelWrap = styled('span')` diff --git a/static/app/components/events/interfaces/debugMeta/index.tsx b/static/app/components/events/interfaces/debugMeta/index.tsx index d5ddc67a72c41f..9ee105aa9b2231 100644 --- a/static/app/components/events/interfaces/debugMeta/index.tsx +++ b/static/app/components/events/interfaces/debugMeta/index.tsx @@ -370,12 +370,14 @@ export function DebugMeta({data, projectSlug, groupId, event}: DebugMetaProps) { /> ); + const isJSPlatform = event.platform?.includes('javascript'); + return ( <InterimSection type={SectionKey.DEBUGMETA} - title={t('Images Loaded')} + title={isJSPlatform ? t('Source Maps Loaded') : t('Images Loaded')} help={t( - 'A list of dynamic libraries or shared objects loaded into process memory at the time of the crash. Images contribute application code that is referenced in stack traces.' + 'A list of dynamic libraries, shared objects or source maps loaded into process memory at the time of the crash. Images contribute application code that is referenced in stack traces.' )} actions={actions} initialCollapse @@ -383,7 +385,7 @@ export function DebugMeta({data, projectSlug, groupId, event}: DebugMetaProps) { {isOpen || hasStreamlinedUI ? ( <Fragment> <StyledSearchBarAction - placeholder={t('Search images loaded')} + placeholder={isJSPlatform ? t('Search source maps') : t('Search images')} onChange={value => DebugMetaStore.updateFilter(value)} query={searchTerm} filterOptions={showFilters ? filterOptions : undefined} diff --git a/static/app/components/events/interfaces/frame/contexts.spec.tsx b/static/app/components/events/interfaces/frame/contexts.spec.tsx deleted file mode 100644 index 2856687f03329a..00000000000000 --- a/static/app/components/events/interfaces/frame/contexts.spec.tsx +++ /dev/null @@ -1,137 +0,0 @@ -import {EventFixture} from 'sentry-fixture/event'; - -import {render, screen} from 'sentry-test/reactTestingLibrary'; - -import {DeviceEventContext} from 'sentry/components/events/contexts/device'; -import {commonDisplayResolutions} from 'sentry/components/events/contexts/device/utils'; -import {UserEventContext} from 'sentry/components/events/contexts/user'; -import {FILTER_MASK} from 'sentry/constants'; -import type {DeviceContext} from 'sentry/types/event'; - -describe('User', function () { - it("displays filtered values but doesn't use them for avatar", function () { - const {rerender} = render( - <UserEventContext - data={{ - id: '26', - name: FILTER_MASK, - email: '', - username: '', - ip_address: '', - data: {}, - }} - event={EventFixture()} - /> - ); - - expect(screen.getByTestId('user-context-name-value')).toHaveTextContent(FILTER_MASK); - expect(screen.getByText('?')).toBeInTheDocument(); - - rerender( - <UserEventContext - data={{ - id: '26', - name: '', - email: FILTER_MASK, - username: '', - ip_address: '', - data: {}, - }} - event={EventFixture()} - /> - ); - - expect(screen.getByTestId('user-context-email-value')).toHaveTextContent(FILTER_MASK); - expect(screen.getByText('?')).toBeInTheDocument(); - - rerender( - <UserEventContext - data={{ - id: '26', - name: '', - email: '', - username: FILTER_MASK, - ip_address: '', - data: {}, - }} - event={EventFixture()} - /> - ); - - expect(screen.getByTestId('user-context-username-value')).toHaveTextContent( - FILTER_MASK - ); - expect(screen.getByText('?')).toBeInTheDocument(); - }); -}); - -describe('Device', function () { - const device: DeviceContext = { - type: 'device', - name: 'Device Name', - screen_resolution: '3840x2160', - screen_width_pixels: 3840, - screen_height_pixels: 2160, - device_type: 'desktop', - }; - - describe('getInferredData', function () { - it('renders', function () { - render(<DeviceEventContext data={device} event={EventFixture()} />); - }); - - it('renders screen_resolution inferred from screen_width_pixels and screen_height_pixels', function () { - render( - <DeviceEventContext - data={{...device, screen_resolution: undefined}} - event={EventFixture()} - /> - ); - - expect( - screen.getByTestId('device-context-screen_width_pixels-value') - ).toHaveTextContent(String(device.screen_width_pixels)); - - expect( - screen.getByTestId('device-context-screen_height_pixels-value') - ).toHaveTextContent(String(device.screen_height_pixels)); - - expect( - screen.getByTestId('device-context-screen_resolution-value') - ).toHaveTextContent( - `${device.screen_resolution} (${ - commonDisplayResolutions[String(device.screen_resolution)] - })` - ); - }); - - it('renders screen_width_pixels and screen_height_pixels inferred from screen_resolution', function () { - render( - <DeviceEventContext - data={{ - ...device, - screen_width_pixels: undefined, - screen_height_pixels: undefined, - }} - event={EventFixture()} - /> - ); - - expect( - screen.getByTestId('device-context-screen_width_pixels-value') - ).toHaveTextContent(String(device.screen_width_pixels)); - - expect( - screen.getByTestId('device-context-screen_height_pixels-value') - ).toHaveTextContent(String(device.screen_height_pixels)); - - expect( - screen.getByTestId('device-context-screen_resolution-value') - ).toHaveTextContent( - `${device.screen_resolution} (${ - commonDisplayResolutions[String(device.screen_resolution)] - })` - ); - }); - }); -}); diff --git a/static/app/components/events/interfaces/frame/deprecatedLine.tsx b/static/app/components/events/interfaces/frame/deprecatedLine.tsx index 05194fdd4dc3a2..df32df23de9a06 100644 --- a/static/app/components/events/interfaces/frame/deprecatedLine.tsx +++ b/static/app/components/events/interfaces/frame/deprecatedLine.tsx @@ -290,7 +290,7 @@ export class DeprecatedLine extends Component<Props, State> { frame_count: hiddenFrameCount, is_frame_expanded: isShowFramesToggleExpanded, }} - size="xs" + size="zero" borderless onClick={e => { this.props.onShowFramesToggle?.(e); @@ -568,6 +568,7 @@ const StyledLi = styled('li')` const ToggleButton = styled(Button)` color: ${p => p.theme.subText}; + font-size: ${p => p.theme.fontSizeSmall}; font-style: italic; font-weight: ${p => p.theme.fontWeightNormal}; padding: ${space(0.25)} ${space(0.5)}; diff --git a/static/app/components/events/interfaces/nativeFrame.tsx b/static/app/components/events/interfaces/nativeFrame.tsx index 79727934bdbda6..0f598426f97037 100644 --- a/static/app/components/events/interfaces/nativeFrame.tsx +++ b/static/app/components/events/interfaces/nativeFrame.tsx @@ -387,7 +387,7 @@ function NativeFrame({ frame_count: hiddenFrameCount, is_frame_expanded: isShowFramesToggleExpanded, }} - size="xs" + size="zero" borderless onClick={e => { onShowFramesToggle?.(e); @@ -568,6 +568,7 @@ const SymbolicatorIcon = styled('div')` const ShowHideButton = styled(Button)` color: ${p => p.theme.subText}; + font-size: ${p => p.theme.fontSizeSmall}; font-style: italic; font-weight: ${p => p.theme.fontWeightNormal}; padding: ${space(0.25)} ${space(0.5)}; diff --git a/static/app/components/events/interfaces/noStackTraceMessage.tsx b/static/app/components/events/interfaces/noStackTraceMessage.tsx index cd3b236ee5c347..0cfbf1c3a2794e 100644 --- a/static/app/components/events/interfaces/noStackTraceMessage.tsx +++ b/static/app/components/events/interfaces/noStackTraceMessage.tsx @@ -6,7 +6,7 @@ type Props = { }; function NoStackTraceMessage({message}: Props) { - return <Alert type="error">{message ?? t('No or unknown stacktrace')}</Alert>; + return <Alert type="muted">{message ?? t('No stacktrace found.')}</Alert>; } export default NoStackTraceMessage; diff --git a/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx b/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx index b11d431282e3da..a01c8f1e53f503 100644 --- a/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx +++ b/static/app/components/events/interfaces/performance/eventTraceView.spec.tsx @@ -5,7 +5,9 @@ import {initializeData} from 'sentry-test/performance/initializePerformanceData' import {render, screen} from 'sentry-test/reactTestingLibrary'; import {EntryType} from 'sentry/types/event'; +import {IssueCategory, IssueTitle} from 'sentry/types/group'; import type {TraceEventResponse} from 'sentry/views/issueDetails/traceTimeline/useTraceTimelineEvents'; +import {makeTraceError} from 'sentry/views/performance/newTraceDetails/traceModels/traceTreeTestUtils'; import {EventTraceView} from './eventTraceView'; @@ -29,6 +31,7 @@ describe('EventTraceView', () => { trace_id: traceId, }, }, + eventID: 'issue-5', }); const issuePlatformBody: TraceEventResponse = { data: [], @@ -51,16 +54,18 @@ describe('EventTraceView', () => { performance_issues: 1, projects: 1, transactions: 1, - transaction_child_count_map: [{'transaction.id': '1', count: 1}], + transaction_child_count_map: new Array(20) + .fill(0) + .map((_, i) => [{'transaction.id': i.toString(), count: 1}]), }, }); MockApiClient.addMockResponse({ url: `/organizations/${organization.slug}/events-trace/${traceId}/`, body: { - transactions: [ + transactions: new Array(20).fill(0).map((_, i) => [ { project_slug: project.slug, - event_id: '1', + event_id: i.toString(), children: [], sdk_name: '', start_timestamp: 0, @@ -69,10 +74,10 @@ describe('EventTraceView', () => { 'transaction.op': '', 'transaction.status': '', performance_issues: [], - errors: [], + errors: i === 5 ? [makeTraceError({event_id: 'issue-5'})] : [], }, - ], - orphan_errors: [], + ]), + orphan_errors: [makeTraceError()], }, }); MockApiClient.addMockResponse({ @@ -92,7 +97,38 @@ describe('EventTraceView', () => { render(<EventTraceView group={group} event={event} organization={organization} />); expect(await screen.findByText('Trace')).toBeInTheDocument(); - expect(await screen.findByText('transaction')).toBeInTheDocument(); + expect( + await screen.findByText('MaybeEncodingError: Error sending result') + ).toBeInTheDocument(); + }); + + it('still renders trace link for performance issues', async () => { + const perfGroup = GroupFixture({issueCategory: IssueCategory.PERFORMANCE}); + const perfEvent = EventFixture({ + occurrence: { + type: 1001, + issueTitle: IssueTitle.PERFORMANCE_SLOW_DB_QUERY, + }, + entries: [ + { + data: [], + type: EntryType.SPANS, + }, + ], + contexts: { + trace: { + trace_id: traceId, + }, + }, + }); + + render( + <EventTraceView group={perfGroup} event={perfEvent} organization={organization} /> + ); + expect(await screen.findByText('Trace')).toBeInTheDocument(); + expect( + await screen.findByRole('link', {name: 'View Full Trace'}) + ).toBeInTheDocument(); }); it('does not render the trace preview if it has no transactions', async () => { diff --git a/static/app/components/events/interfaces/performance/eventTraceView.tsx b/static/app/components/events/interfaces/performance/eventTraceView.tsx index 6b8266734716d6..f47817c661bb32 100644 --- a/static/app/components/events/interfaces/performance/eventTraceView.tsx +++ b/static/app/components/events/interfaces/performance/eventTraceView.tsx @@ -1,28 +1,32 @@ -import {Fragment, useMemo} from 'react'; +import {useMemo} from 'react'; import styled from '@emotion/styled'; -import ErrorBoundary from 'sentry/components/errorBoundary'; -import {ALL_ACCESS_PROJECTS} from 'sentry/constants/pageFilters'; +import {LinkButton} from 'sentry/components/button'; +import {generateTraceTarget} from 'sentry/components/quickTrace/utils'; +import {IconOpen} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {Event} from 'sentry/types/event'; import {type Group, IssueCategory} from 'sentry/types/group'; import type {Organization} from 'sentry/types/organization'; -import EventView from 'sentry/utils/discover/eventView'; import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; import {SectionKey} from 'sentry/views/issueDetails/streamline/context'; import {InterimSection} from 'sentry/views/issueDetails/streamline/interimSection'; import {TraceDataSection} from 'sentry/views/issueDetails/traceDataSection'; -import {TraceViewWaterfall} from 'sentry/views/performance/newTraceDetails'; +import {IssuesTraceWaterfall} from 'sentry/views/performance/newTraceDetails/issuesTraceWaterfall'; +import {useIssuesTraceTree} from 'sentry/views/performance/newTraceDetails/traceApi/useIssuesTraceTree'; import {useTrace} from 'sentry/views/performance/newTraceDetails/traceApi/useTrace'; import {useTraceMeta} from 'sentry/views/performance/newTraceDetails/traceApi/useTraceMeta'; import {useTraceRootEvent} from 'sentry/views/performance/newTraceDetails/traceApi/useTraceRootEvent'; -import {useTraceTree} from 'sentry/views/performance/newTraceDetails/traceApi/useTraceTree'; +import {TraceViewSources} from 'sentry/views/performance/newTraceDetails/traceHeader/breadcrumbs'; import { loadTraceViewPreferences, type TracePreferencesState, } from 'sentry/views/performance/newTraceDetails/traceState/tracePreferences'; import {TraceStateProvider} from 'sentry/views/performance/newTraceDetails/traceState/traceStateProvider'; +import {useTraceEventView} from 'sentry/views/performance/newTraceDetails/useTraceEventView'; +import {useTraceQueryParams} from 'sentry/views/performance/newTraceDetails/useTraceQueryParams'; const DEFAULT_ISSUE_DETAILS_TRACE_VIEW_PREFERENCES: TracePreferencesState = { drawer: { @@ -48,25 +52,26 @@ const DEFAULT_ISSUE_DETAILS_TRACE_VIEW_PREFERENCES: TracePreferencesState = { interface EventTraceViewInnerProps { event: Event; organization: Organization; + traceId: string; } -function EventTraceViewInner({event, organization}: EventTraceViewInnerProps) { - // Assuming profile exists, should be checked in the parent component - const traceId = event.contexts.trace!.trace_id!; - const location = useLocation(); +function EventTraceViewInner({event, organization, traceId}: EventTraceViewInnerProps) { + const timestamp = new Date(event.dateReceived).getTime() / 1e3; const trace = useTrace({ - traceSlug: traceId ? traceId : undefined, + timestamp, + traceSlug: traceId, limit: 10000, }); - const meta = useTraceMeta([{traceSlug: traceId, timestamp: undefined}]); - const tree = useTraceTree({trace, meta, replay: null}); + const params = useTraceQueryParams({ + timestamp, + }); + const meta = useTraceMeta([{traceSlug: traceId, timestamp}]); + const tree = useIssuesTraceTree({trace, meta, replay: null}); - const hasNoTransactions = meta.data?.transactions === 0; - const shouldLoadTraceRoot = !trace.isPending && trace.data && !hasNoTransactions; + const shouldLoadTraceRoot = !trace.isPending && trace.data; const rootEvent = useTraceRootEvent(shouldLoadTraceRoot ? trace.data! : null); - const preferences = useMemo( () => loadTraceViewPreferences('issue-details-trace-view-preferences') || @@ -74,64 +79,88 @@ function EventTraceViewInner({event, organization}: EventTraceViewInnerProps) { [] ); - const traceEventView = useMemo(() => { - const statsPeriod = location.query.statsPeriod as string | undefined; - // Not currently expecting start/end timestamps to be applied to this view - - return EventView.fromSavedQuery({ - id: undefined, - name: `Events with Trace ID ${traceId}`, - fields: ['title', 'event.type', 'project', 'timestamp'], - orderby: '-timestamp', - query: `trace:${traceId}`, - projects: [ALL_ACCESS_PROJECTS], - version: 2, - range: statsPeriod, - }); - }, [location.query.statsPeriod, traceId]); - - const scrollToNode = useMemo(() => { - const firstTransactionEventId = trace.data?.transactions[0]?.event_id; - return {eventId: firstTransactionEventId}; - }, [trace.data]); - - if (trace.isPending || rootEvent.isPending || !rootEvent.data || hasNoTransactions) { + const traceEventView = useTraceEventView(traceId, params); + + if (!traceId) { return null; } return ( - <Fragment> - <TraceStateProvider - initialPreferences={preferences} - preferencesStorageKey="issue-details-view-preferences" - > - <TraceViewWaterfallWrapper> - <TraceViewWaterfall - tree={tree} - trace={trace} - replay={null} - rootEvent={rootEvent} - traceSlug={undefined} - organization={organization} - traceEventView={traceEventView} - meta={meta} - source="issues" - scrollToNode={scrollToNode} - isEmbedded - /> - </TraceViewWaterfallWrapper> - </TraceStateProvider> - </Fragment> + <TraceStateProvider + initialPreferences={preferences} + preferencesStorageKey="issue-details-view-preferences" + > + <IssuesTraceContainer> + <IssuesTraceWaterfall + tree={tree} + trace={trace} + traceSlug={traceId} + rootEvent={rootEvent} + organization={organization} + traceEventView={traceEventView} + meta={meta} + source="issues" + replay={null} + event={event} + /> + <IssuesTraceOverlay event={event} /> + </IssuesTraceContainer> + </TraceStateProvider> ); } -interface EventTraceViewProps extends EventTraceViewInnerProps { +function IssuesTraceOverlay({event}: {event: Event}) { + const location = useLocation(); + const organization = useOrganization(); + + const traceTarget = generateTraceTarget( + event, + organization, + { + ...location, + query: { + ...location.query, + groupId: event.groupID, + }, + }, + TraceViewSources.ISSUE_DETAILS + ); + + return ( + <IssuesTraceOverlayContainer> + <LinkButton + size="sm" + icon={<IconOpen />} + aria-label={t('Open Trace')} + to={traceTarget} + /> + </IssuesTraceOverlayContainer> + ); +} + +const IssuesTraceContainer = styled('div')` + position: relative; +`; + +const IssuesTraceOverlayContainer = styled('div')` + position: absolute; + inset: 0; + z-index: 10; + + a { + position: absolute; + top: ${space(1)}; + right: ${space(1)}; + } +`; + +interface EventTraceViewProps extends Omit<EventTraceViewInnerProps, 'traceId'> { group: Group; } export function EventTraceView({group, event, organization}: EventTraceViewProps) { - // Check trace id exists - if (!event || !event.contexts.trace?.trace_id) { + const traceId = event.contexts.trace?.trace_id; + if (!traceId) { return null; } @@ -139,37 +168,22 @@ export function EventTraceView({group, event, organization}: EventTraceViewProps const hasIssueDetailsTrace = organization.features.includes( 'issue-details-always-show-trace' ); - const hasTracePreviewFeature = hasProfilingFeature && hasIssueDetailsTrace; - - // Only display this for error or default events since performance events are handled elsewhere - if (group.issueCategory === IssueCategory.PERFORMANCE) { - return null; - } + const hasTracePreviewFeature = + hasProfilingFeature && + hasIssueDetailsTrace && + // Only display this for error or default events since performance events are handled elsewhere + group.issueCategory !== IssueCategory.PERFORMANCE; return ( - <ErrorBoundary mini> - <InterimSection type={SectionKey.TRACE} title={t('Trace')}> - <TraceContentWrapper> - <div> - <TraceDataSection event={event} /> - </div> - {hasTracePreviewFeature && ( - <EventTraceViewInner event={event} organization={organization} /> - )} - </TraceContentWrapper> - </InterimSection> - </ErrorBoundary> + <InterimSection type={SectionKey.TRACE} title={t('Trace')}> + <TraceDataSection event={event} /> + {hasTracePreviewFeature && ( + <EventTraceViewInner + event={event} + organization={organization} + traceId={traceId} + /> + )} + </InterimSection> ); } - -const TraceContentWrapper = styled('div')` - display: flex; - flex-direction: column; - gap: ${space(1)}; -`; - -const TraceViewWaterfallWrapper = styled('div')` - display: flex; - flex-direction: column; - height: 500px; -`; diff --git a/static/app/components/events/interfaces/request/index.spec.tsx b/static/app/components/events/interfaces/request/index.spec.tsx index df27c7b78df44f..2ee61131f256e3 100644 --- a/static/app/components/events/interfaces/request/index.spec.tsx +++ b/static/app/components/events/interfaces/request/index.spec.tsx @@ -1,16 +1,22 @@ import {DataScrubbingRelayPiiConfigFixture} from 'sentry-fixture/dataScrubbingRelayPiiConfig'; import {EventFixture} from 'sentry-fixture/event'; +import {UserFixture} from 'sentry-fixture/user'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; import {textWithMarkupMatcher} from 'sentry-test/utils'; import {Request} from 'sentry/components/events/interfaces/request'; +import ConfigStore from 'sentry/stores/configStore'; import type {EntryRequest} from 'sentry/types/event'; import {EntryType} from 'sentry/types/event'; jest.unmock('prismjs'); describe('Request entry', function () { + beforeEach(() => { + ConfigStore.set('user', UserFixture()); + }); + it('display redacted data', async function () { const event = EventFixture({ entries: [ @@ -327,6 +333,38 @@ describe('Request entry', function () { ).not.toThrow(); }); + it('should remove any non-tuple values from array', function () { + const user = UserFixture(); + user.options.prefersIssueDetailsStreamlinedUI = true; + ConfigStore.set('user', user); + + const data: EntryRequest['data'] = { + apiTarget: null, + query: 'a%AFc', + data: '', + headers: [['foo', 'bar'], null], + cookies: [], + env: {}, + method: 'POST', + url: '/Home/PostIndex', + }; + const event = EventFixture({ + entries: [ + { + type: EntryType.REQUEST, + data, + }, + ], + }); + expect(() => + render(<Request event={event} data={event.entries[0].data} />, { + organization: { + relayPiiConfig: JSON.stringify(DataScrubbingRelayPiiConfigFixture()), + }, + }) + ).not.toThrow(); + }); + it("should not cause an invariant violation if data.data isn't a string", function () { const data: EntryRequest['data'] = { apiTarget: null, diff --git a/static/app/components/events/interfaces/request/index.tsx b/static/app/components/events/interfaces/request/index.tsx index 16be046eaa16aa..c1417f34507a58 100644 --- a/static/app/components/events/interfaces/request/index.tsx +++ b/static/app/components/events/interfaces/request/index.tsx @@ -251,10 +251,13 @@ function RequestDataCard({ const contentItems: KeyValueDataContentProps[] = []; if (Array.isArray(data) && data.length > 0) { - data.forEach(([key, value], i: number) => { - const valueMeta = meta?.[i] ? meta[i]?.[1] : undefined; - contentItems.push({item: {key, subject: key, value}, meta: valueMeta}); - }); + data + // Remove any non-tuple values + .filter(x => Array.isArray(x)) + .forEach(([key, value], i: number) => { + const valueMeta = meta?.[i] ? meta[i]?.[1] : undefined; + contentItems.push({item: {key, subject: key, value}, meta: valueMeta}); + }); } else if (typeof data === 'object') { // Spread to flatten if it's a proxy Object.entries({...data}).forEach(([key, value]) => { diff --git a/static/app/components/events/interfaces/spans/spanDescendantGroupBar.tsx b/static/app/components/events/interfaces/spans/spanDescendantGroupBar.tsx index 92f7cec076a058..b4c560a86af6be 100644 --- a/static/app/components/events/interfaces/spans/spanDescendantGroupBar.tsx +++ b/static/app/components/events/interfaces/spans/spanDescendantGroupBar.tsx @@ -3,7 +3,7 @@ import countBy from 'lodash/countBy'; import type {SpanBarType} from 'sentry/components/performance/waterfall/constants'; import { - getSpanBarColours, + getSpanBarColors, ROW_HEIGHT, } from 'sentry/components/performance/waterfall/constants'; import {DurationPill, RowRectangle} from 'sentry/components/performance/waterfall/rowBar'; @@ -150,7 +150,7 @@ export function SpanDescendantGroupBar(props: SpanDescendantGroupBarProps) { return ( <RowRectangle style={{ - backgroundColor: getSpanBarColours(spanBarType, theme).primary, + backgroundColor: getSpanBarColors(spanBarType, theme).primary, left: `min(${toPercent(bounds.left || 0)}, calc(100% - 1px))`, width: toPercent(bounds.width || 0), }} diff --git a/static/app/components/events/interfaces/spans/spanDetail.spec.tsx b/static/app/components/events/interfaces/spans/spanDetail.spec.tsx index f3f03e2cdc85eb..be5a4df59b5b89 100644 --- a/static/app/components/events/interfaces/spans/spanDetail.spec.tsx +++ b/static/app/components/events/interfaces/spans/spanDetail.spec.tsx @@ -6,7 +6,6 @@ import {SpanFixture} from 'sentry-fixture/span'; import {render, screen} from 'sentry-test/reactTestingLibrary'; import SpanDetail from 'sentry/components/events/interfaces/spans/spanDetail'; -import {TransactionProfileIdProvider} from 'sentry/components/profiling/transactionProfileIdProvider'; import type {EventTransaction} from 'sentry/types/event'; describe('SpanDetail', function () { @@ -36,33 +35,20 @@ describe('SpanDetail', function () { description: 'SELECT * FROM users;', }); - beforeEach(function () { - MockApiClient.addMockResponse({ - url: `/organizations/${organization.slug}/events/`, - method: 'GET', - }); - }); - function renderSpanDetail(props: Partial<React.ComponentProps<typeof SpanDetail>>) { return ( - <TransactionProfileIdProvider - projectId={project.id} - transactionId={event.id} - timestamp={event.dateReceived} - > - <SpanDetail - organization={organization} - event={event} - resetCellMeasureCache={jest.fn()} - scrollToHash={jest.fn()} - isRoot={false} - relatedErrors={[]} - trace={trace} - childTransactions={[]} - span={span} - {...props} - /> - </TransactionProfileIdProvider> + <SpanDetail + organization={organization} + event={event} + resetCellMeasureCache={jest.fn()} + scrollToHash={jest.fn()} + isRoot={false} + relatedErrors={[]} + trace={trace} + childTransactions={[]} + span={span} + {...props} + /> ); } @@ -164,7 +150,7 @@ describe('SpanDetail', function () { ).toBeInTheDocument(); expect(screen.getByRole('button', {name: 'View Query Summary'})).toHaveAttribute( 'href', - '/organizations/org-slug/insights/database/spans/span/a7ebd21614897/?project=2' + '/organizations/org-slug/insights/backend/database/spans/span/a7ebd21614897/?project=2' ); }); }); diff --git a/static/app/components/events/interfaces/spans/spanDetail.tsx b/static/app/components/events/interfaces/spans/spanDetail.tsx index 5b6c419541ce61..7630e53cb5b904 100644 --- a/static/app/components/events/interfaces/spans/spanDetail.tsx +++ b/static/app/components/events/interfaces/spans/spanDetail.tsx @@ -22,7 +22,6 @@ import { } from 'sentry/components/performance/waterfall/rowDetails'; import Pill from 'sentry/components/pill'; import Pills from 'sentry/components/pills'; -import {useTransactionProfileId} from 'sentry/components/profiling/transactionProfileIdProvider'; import {TransactionToProfileButton} from 'sentry/components/profiling/transactionToProfileButton'; import { generateIssueEventTarget, @@ -106,7 +105,7 @@ type Props = { function SpanDetail(props: Props) { const [errorsOpened, setErrorsOpened] = useState(false); const location = useLocation(); - const profileId = useTransactionProfileId(); + const profileId = props.event.contexts.profile?.profile_id; const {projects} = useProjects(); const project = projects.find(p => p.id === props.event.projectID); diff --git a/static/app/components/events/interfaces/spans/spanGroupBar.tsx b/static/app/components/events/interfaces/spans/spanGroupBar.tsx index ccca4f7afbfe51..c16ad4ee760fe9 100644 --- a/static/app/components/events/interfaces/spans/spanGroupBar.tsx +++ b/static/app/components/events/interfaces/spans/spanGroupBar.tsx @@ -10,7 +10,7 @@ import { } from 'sentry/components/events/interfaces/spans/spanFrequencyBox'; import type {SpanBarType} from 'sentry/components/performance/waterfall/constants'; import { - getSpanBarColours, + getSpanBarColors, ROW_HEIGHT, } from 'sentry/components/performance/waterfall/constants'; import { @@ -315,7 +315,7 @@ export function SpanGroupBar(props: Props) { }} > <SpanGroupRowTitleContent - color={getSpanBarColours(spanBarType, theme).primary} + color={getSpanBarColors(spanBarType, theme).primary} > {props.renderGroupSpansTitle()} </SpanGroupRowTitleContent> diff --git a/static/app/components/events/interfaces/spans/spanProfileDetails.tsx b/static/app/components/events/interfaces/spans/spanProfileDetails.tsx index b7d1fda8e4ecb7..22f63b9582b4ca 100644 --- a/static/app/components/events/interfaces/spans/spanProfileDetails.tsx +++ b/static/app/components/events/interfaces/spans/spanProfileDetails.tsx @@ -10,8 +10,7 @@ import QuestionTooltip from 'sentry/components/questionTooltip'; import {IconChevron, IconProfiling} from 'sentry/icons'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {EventTransaction, Frame} from 'sentry/types/event'; -import {EntryType} from 'sentry/types/event'; +import {EntryType, type EventTransaction, type Frame} from 'sentry/types/event'; import type {PlatformKey} from 'sentry/types/project'; import {StackView} from 'sentry/types/stacktrace'; import {defined} from 'sentry/utils'; @@ -38,15 +37,7 @@ interface SpanProfileDetailsProps { onNoProfileFound?: () => void; } -export function SpanProfileDetails({ - event, - span, - onNoProfileFound, -}: SpanProfileDetailsProps) { - const organization = useOrganization(); - const {projects} = useProjects(); - const project = projects.find(p => p.id === event.projectID); - +export function useSpanProfileDetails(event, span) { const profileGroup = useProfileGroup(); const processedEvent = useMemo(() => { @@ -140,6 +131,43 @@ export function SpanProfileDetails({ }; }, [index, maxNodes, event, nodes]); + return { + processedEvent, + profileGroup, + profile, + nodes, + index, + setIndex, + totalWeight, + maxNodes, + frames, + hasPrevious, + hasNext, + }; +} + +export function SpanProfileDetails({ + event, + span, + onNoProfileFound, +}: SpanProfileDetailsProps) { + const organization = useOrganization(); + const {projects} = useProjects(); + const project = projects.find(p => p.id === event.projectID); + const { + processedEvent, + profileGroup, + profile, + nodes, + index, + setIndex, + maxNodes, + hasNext, + hasPrevious, + totalWeight, + frames, + } = useSpanProfileDetails(event, span); + const spanTarget = project && profileGroup && diff --git a/static/app/components/events/interfaces/spans/spanRectangle.tsx b/static/app/components/events/interfaces/spans/spanRectangle.tsx index a482f4b7a8de1f..b38aac9dd36e1a 100644 --- a/static/app/components/events/interfaces/spans/spanRectangle.tsx +++ b/static/app/components/events/interfaces/spans/spanRectangle.tsx @@ -1,7 +1,7 @@ import {useTheme} from '@emotion/react'; import type {SpanBarType} from 'sentry/components/performance/waterfall/constants'; -import {getSpanBarColours} from 'sentry/components/performance/waterfall/constants'; +import {getSpanBarColors} from 'sentry/components/performance/waterfall/constants'; import {RowRectangle} from 'sentry/components/performance/waterfall/rowBar'; import toPercent from 'sentry/utils/number/toPercent'; @@ -20,7 +20,7 @@ export default function SpanRectangle({ return ( <RowRectangle style={{ - backgroundColor: getSpanBarColours(spanBarType, theme).primary, + backgroundColor: getSpanBarColors(spanBarType, theme).primary, left: `min(${toPercent(bounds.left || 0)}, calc(100% - 1px))`, width: toPercent(bounds.width || 0), }} diff --git a/static/app/components/events/interfaces/spans/spanSummaryButton.tsx b/static/app/components/events/interfaces/spans/spanSummaryButton.tsx index fd231382221f9d..12750e292fc7bf 100644 --- a/static/app/components/events/interfaces/spans/spanSummaryButton.tsx +++ b/static/app/components/events/interfaces/spans/spanSummaryButton.tsx @@ -23,6 +23,7 @@ interface Props { function SpanSummaryButton(props: Props) { const location = useLocation(); const resourceBaseUrl = useModuleURL(ModuleName.RESOURCE); + const queryBaseUrl = useModuleURL(ModuleName.DB); const {event, organization, span} = props; @@ -41,7 +42,7 @@ function SpanSummaryButton(props: Props) { <LinkButton size="xs" to={querySummaryRouteWithQuery({ - orgSlug: organization.slug, + base: queryBaseUrl, query: location.query, group: sentryTags.group, projectID: event.projectID, diff --git a/static/app/components/events/interfaces/spans/traceView.spec.tsx b/static/app/components/events/interfaces/spans/traceView.spec.tsx index 3d0ac813a423a4..8a916e488f39b3 100644 --- a/static/app/components/events/interfaces/spans/traceView.spec.tsx +++ b/static/app/components/events/interfaces/spans/traceView.spec.tsx @@ -16,7 +16,6 @@ import * as AnchorLinkManager from 'sentry/components/events/interfaces/spans/sp import TraceView from 'sentry/components/events/interfaces/spans/traceView'; import {spanTargetHash} from 'sentry/components/events/interfaces/spans/utils'; import WaterfallModel from 'sentry/components/events/interfaces/spans/waterfallModel'; -import {TransactionProfileIdProvider} from 'sentry/components/profiling/transactionProfileIdProvider'; import ProjectsStore from 'sentry/stores/projectsStore'; import {QuickTraceContext} from 'sentry/utils/performance/quickTrace/quickTraceContext'; import QuickTraceQuery from 'sentry/utils/performance/quickTrace/quickTraceQuery'; @@ -534,11 +533,9 @@ describe('TraceView', () => { const waterfallModel = new WaterfallModel(builder.getEventFixture()); render( - <TransactionProfileIdProvider transactionId={undefined} timestamp={undefined}> - <AnchorLinkManager.Provider> - <TraceView organization={data.organization} waterfallModel={waterfallModel} /> - </AnchorLinkManager.Provider> - </TransactionProfileIdProvider> + <AnchorLinkManager.Provider> + <TraceView organization={data.organization} waterfallModel={waterfallModel} /> + </AnchorLinkManager.Provider> ); expect(await screen.findByText(/0000000000000003/i)).toBeInTheDocument(); @@ -562,11 +559,9 @@ describe('TraceView', () => { const waterfallModel = new WaterfallModel(builder.getEventFixture()); render( - <TransactionProfileIdProvider transactionId={undefined} timestamp={undefined}> - <AnchorLinkManager.Provider> - <TraceView organization={data.organization} waterfallModel={waterfallModel} /> - </AnchorLinkManager.Provider> - </TransactionProfileIdProvider> + <AnchorLinkManager.Provider> + <TraceView organization={data.organization} waterfallModel={waterfallModel} /> + </AnchorLinkManager.Provider> ); expect(await screen.findByText(/0000000000000003/i)).toBeInTheDocument(); diff --git a/static/app/components/events/interfaces/spans/types.tsx b/static/app/components/events/interfaces/spans/types.tsx index 7a2b916262ee33..ccd8afd617f095 100644 --- a/static/app/components/events/interfaces/spans/types.tsx +++ b/static/app/components/events/interfaces/spans/types.tsx @@ -56,6 +56,7 @@ export type RawSpanType = { op?: string; origin?: string; parent_span_id?: string; + project_slug?: string; same_process_as_parent?: boolean; sentry_tags?: Record<string, string>; 'span.averageResults'?: { diff --git a/static/app/components/events/interfaces/threads.spec.tsx b/static/app/components/events/interfaces/threads.spec.tsx index 70c756ac8b5d2e..3ff3b1fce7e1a0 100644 --- a/static/app/components/events/interfaces/threads.spec.tsx +++ b/static/app/components/events/interfaces/threads.spec.tsx @@ -1267,6 +1267,18 @@ describe('Threads', function () { within(threadSelector).getByText('ViewController.causeCrash'); }); + it('can navigate to next/previous thread', async function () { + render(<Threads {...props} />, {organization}); + const threadSelector = await screen.findByTestId('thread-selector'); + expect(threadSelector).toHaveTextContent('Thread #0'); + await userEvent.click(await screen.findByRole('button', {name: 'Next Thread'})); + expect(threadSelector).toHaveTextContent('Thread #1'); + await userEvent.click( + await screen.findByRole('button', {name: 'Previous Thread'}) + ); + expect(threadSelector).toHaveTextContent('Thread #0'); + }); + it('renders raw stack trace', async function () { MockApiClient.addMockResponse({ url: `/projects/${organization.slug}/${project.slug}/events/${event.id}/apple-crash-report?minified=false`, diff --git a/static/app/components/events/interfaces/threads.tsx b/static/app/components/events/interfaces/threads.tsx index 90be7c3fec25b0..40582aecafd6e8 100644 --- a/static/app/components/events/interfaces/threads.tsx +++ b/static/app/components/events/interfaces/threads.tsx @@ -1,6 +1,8 @@ -import {Fragment, useEffect, useState} from 'react'; +import {Fragment, useEffect, useMemo, useState} from 'react'; import styled from '@emotion/styled'; +import {Button} from 'sentry/components/button'; +import ButtonBar from 'sentry/components/buttonBar'; import {CommitRow} from 'sentry/components/commitRow'; import {Flex} from 'sentry/components/container/flex'; import ErrorBoundary from 'sentry/components/errorBoundary'; @@ -15,7 +17,14 @@ import Pill from 'sentry/components/pill'; import Pills from 'sentry/components/pills'; import QuestionTooltip from 'sentry/components/questionTooltip'; import TextOverflow from 'sentry/components/textOverflow'; -import {IconClock, IconInfo, IconLock, IconPlay, IconTimer} from 'sentry/icons'; +import { + IconChevron, + IconClock, + IconInfo, + IconLock, + IconPlay, + IconTimer, +} from 'sentry/icons'; import {t, tn} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {Event, Thread} from 'sentry/types/event'; @@ -101,7 +110,11 @@ const useActiveThreadState = ( }; export function Threads({data, event, projectSlug, groupingCurrentLevel, group}: Props) { - const threads = data.values ?? []; + // Sort threads by crashed first + const threads = useMemo( + () => (data.values ?? []).toSorted((a, b) => Number(b.crashed) - Number(a.crashed)), + [data.values] + ); const hasStreamlinedUI = useHasStreamlinedUI(); const [activeThread, setActiveThread] = useActiveThreadState(event, threads); @@ -109,7 +122,10 @@ export function Threads({data, event, projectSlug, groupingCurrentLevel, group}: const hasMoreThanOneThread = threads.length > 1; - const exception = getThreadException(event, activeThread); + const exception = useMemo( + () => getThreadException(event, activeThread), + [event, activeThread] + ); const entryIndex = exception ? event.entries.findIndex(entry => entry.type === EntryType.EXCEPTION) @@ -226,6 +242,18 @@ export function Threads({data, event, projectSlug, groupingCurrentLevel, group}: const {id: activeThreadId, name: activeThreadName} = activeThread ?? {}; const hideThreadTags = activeThreadId === undefined || !activeThreadName; + function handleChangeThread(direction: 'previous' | 'next') { + const currentIndex = threads.findIndex(thread => thread.id === activeThreadId); + let nextIndex = direction === 'previous' ? currentIndex - 1 : currentIndex + 1; + if (nextIndex < 0) { + nextIndex = threads.length - 1; + } else if (nextIndex >= threads.length) { + nextIndex = 0; + } + + setActiveThread(threads[nextIndex]); + } + const threadComponent = ( <Fragment> {hasMoreThanOneThread && ( @@ -235,6 +263,28 @@ export function Threads({data, event, projectSlug, groupingCurrentLevel, group}: <ThreadHeading>{t('Threads')}</ThreadHeading> {activeThread && ( <Wrapper> + <ButtonBar merged> + <Button + title={t('Previous Thread')} + tooltipProps={{delay: 1000}} + icon={<IconChevron direction="left" />} + aria-label={t('Previous Thread')} + size="xs" + onClick={() => { + handleChangeThread('previous'); + }} + /> + <Button + title={t('Next Thread')} + tooltipProps={{delay: 1000}} + icon={<IconChevron direction="right" />} + aria-label={t('Next Thread')} + size="xs" + onClick={() => { + handleChangeThread('next'); + }} + /> + </ButtonBar> <ThreadSelector threads={threads} activeThread={activeThread} @@ -276,7 +326,7 @@ export function Threads({data, event, projectSlug, groupingCurrentLevel, group}: </Fragment> )} <TraceEventDataSection - type={EntryType.THREADS} + type={SectionKey.THREADS} projectSlug={projectSlug} eventId={event.id} recentFirst={isStacktraceNewestFirst()} @@ -398,6 +448,8 @@ const LockReason = styled(TextOverflow)` `; const Wrapper = styled('div')` + display: flex; + gap: ${space(1)}; align-items: center; flex-wrap: wrap; flex-grow: 1; diff --git a/static/app/components/events/interfaces/threads/threadSelector/filterThreadInfo.tsx b/static/app/components/events/interfaces/threads/threadSelector/filterThreadInfo.tsx index 188a0f10095fbb..42354fc1a8cea1 100644 --- a/static/app/components/events/interfaces/threads/threadSelector/filterThreadInfo.tsx +++ b/static/app/components/events/interfaces/threads/threadSelector/filterThreadInfo.tsx @@ -8,7 +8,6 @@ import type {StacktraceType} from 'sentry/types/stacktrace'; import getRelevantFrame from './getRelevantFrame'; import getThreadException from './getThreadException'; import getThreadStacktrace from './getThreadStacktrace'; -import trimFilename from './trimFilename'; type ThreadInfo = { crashedInfo?: EntryData; @@ -17,6 +16,11 @@ type ThreadInfo = { state?: ThreadStates; }; +function trimFilename(filename: string) { + const pieces = filename.split(/\//g); + return pieces[pieces.length - 1]; +} + function filterThreadInfo( event: Event, thread: Thread, diff --git a/static/app/components/events/interfaces/threads/threadSelector/header.tsx b/static/app/components/events/interfaces/threads/threadSelector/header.tsx deleted file mode 100644 index e12de4884b87cc..00000000000000 --- a/static/app/components/events/interfaces/threads/threadSelector/header.tsx +++ /dev/null @@ -1,21 +0,0 @@ -import {t} from 'sentry/locale'; - -import {Grid, GridCell} from './styles'; - -type Props = { - hasThreadStates: boolean; -}; - -function Header({hasThreadStates}: Props) { - return ( - <Grid hasThreadStates={hasThreadStates}> - <GridCell /> - <GridCell>{t('Id')}</GridCell> - <GridCell>{t('Name')}</GridCell> - <GridCell>{t('Label')}</GridCell> - {hasThreadStates && <GridCell>{t('State')}</GridCell>} - </Grid> - ); -} - -export default Header; diff --git a/static/app/components/events/interfaces/threads/threadSelector/index.tsx b/static/app/components/events/interfaces/threads/threadSelector/index.tsx index ad60590945069c..2c7871acdacf7a 100644 --- a/static/app/components/events/interfaces/threads/threadSelector/index.tsx +++ b/static/app/components/events/interfaces/threads/threadSelector/index.tsx @@ -1,172 +1,149 @@ -import {ClassNames} from '@emotion/react'; +import {useMemo} from 'react'; import styled from '@emotion/styled'; -import partition from 'lodash/partition'; -import DropdownAutoComplete from 'sentry/components/dropdownAutoComplete'; -import DropdownButton from 'sentry/components/dropdownButton'; -import {getMappedThreadState} from 'sentry/components/events/interfaces/threads/threadSelector/threadStates'; +import {CompactSelect} from 'sentry/components/compactSelect'; import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; import type {Event, ExceptionType, Frame, Thread} from 'sentry/types/event'; import {defined} from 'sentry/utils'; import {trackAnalytics} from 'sentry/utils/analytics'; -import theme from 'sentry/utils/theme'; import useOrganization from 'sentry/utils/useOrganization'; import filterThreadInfo from './filterThreadInfo'; -import Header from './header'; import Option from './option'; -import SelectedOption from './selectedOption'; +import {ThreadSelectorGrid, ThreadSelectorGridCell} from './styles'; +import {getMappedThreadState} from './threadStates'; type Props = { activeThread: Thread; event: Event; - threads: Array<Thread>; - exception?: Required<ExceptionType>; - fullWidth?: boolean; - onChange?: (thread: Thread) => void; + exception: Required<ExceptionType> | undefined; + onChange: (thread: Thread) => void; + /** + * Expects threads to be sorted by crashed first + */ + threads: Thread[]; }; -const DROPDOWN_MAX_HEIGHT = 400; +function Header({hasThreadStates}: {hasThreadStates: boolean}) { + return ( + <StyledGrid hasThreadStates={hasThreadStates}> + <ThreadSelectorGridCell /> + <ThreadSelectorGridCell>{t('ID')}</ThreadSelectorGridCell> + <ThreadSelectorGridCell>{t('Name')}</ThreadSelectorGridCell> + <ThreadSelectorGridCell>{t('Label')}</ThreadSelectorGridCell> + {hasThreadStates && <ThreadSelectorGridCell>{t('State')}</ThreadSelectorGridCell>} + </StyledGrid> + ); +} + +function getThreadLabel( + details: ReturnType<typeof filterThreadInfo>, + name: string | null | undefined +) { + if (name?.length) { + return name; + } -function ThreadSelector({ - threads, - event, - exception, - activeThread, - onChange, - fullWidth = false, -}: Props) { + return details?.label || `<${t('unknown')}>`; +} + +function ThreadSelector({threads, event, exception, activeThread, onChange}: Props) { const organization = useOrganization({allowNull: true}); + const hasThreadStates = threads.some(thread => defined(getMappedThreadState(thread.state)) ); - const getDropDownItem = (thread: Thread) => { - const {label, filename, crashedInfo, state} = filterThreadInfo( - event, - thread, - exception - ); - const threadInfo = {label, filename, state}; - return { - value: `#${thread.id}: ${thread.name} ${label} ${filename}`, - threadInfo, - thread, - label: ( - <Option - id={thread.id} - details={threadInfo} - name={thread.name} - crashed={thread.crashed} - crashedInfo={crashedInfo} - hasThreadStates={hasThreadStates} - /> - ), - }; - }; - - const getItems = () => { - const [crashed, notCrashed] = partition(threads, thread => !!thread?.crashed); - return [...crashed, ...notCrashed].map(getDropDownItem); - }; - - const handleChange = (thread: Thread) => { - if (onChange) { - onChange(thread); - } - }; - - const items = getItems(); + const items = useMemo(() => { + return threads.map((thread: Thread) => { + const threadInfo = filterThreadInfo(event, thread, exception); + return { + value: thread.id, + textValue: `#${thread.id}: ${thread.name} ${threadInfo.label} ${threadInfo.filename}`, + label: ( + <Option + thread={thread} + details={threadInfo} + crashedInfo={threadInfo.crashedInfo} + hasThreadStates={hasThreadStates} + /> + ), + }; + }); + }, [threads, event, exception, hasThreadStates]); return ( - <ClassNames> - {({css}) => ( - <StyledDropdownAutoComplete - detached - data-test-id="thread-selector" - items={items} - onOpen={() => { - trackAnalytics('stack_trace.threads.thread_selector_opened', { - organization, - platform: event.platform, - num_threads: items.length, - }); - }} - onSelect={item => { - const selectedThread: Thread = item.thread; - - trackAnalytics('stack_trace.threads.thread_selected', { - organization, - platform: event.platform, - thread_index: items.findIndex( - ({thread}) => thread.id === selectedThread.id - ), - num_threads: items.length, - is_crashed_thread: selectedThread.crashed, - is_current_thread: selectedThread.current, - thread_state: selectedThread.state ?? '', - has_stacktrace: defined(selectedThread.stacktrace), - num_in_app_frames: - selectedThread.stacktrace?.frames?.filter((frame: Frame) => frame.inApp) - .length ?? 0, - }); - handleChange(item.thread); - }} - maxHeight={DROPDOWN_MAX_HEIGHT} - searchPlaceholder={t('Filter Threads')} - emptyMessage={t('You have no threads')} - noResultsMessage={t('No threads found')} - menuHeader={<Header hasThreadStates={hasThreadStates} />} - rootClassName={ - fullWidth - ? css` - width: 100%; - ` - : undefined - } - closeOnSelect - emptyHidesInput - > - {({isOpen, selectedItem}) => ( - <StyledDropdownButton isOpen={isOpen} size="xs"> - {selectedItem ? ( - <SelectedOption - id={selectedItem.thread.id} - name={selectedItem.thread.name} - details={selectedItem.threadInfo} - /> - ) : ( - <SelectedOption - id={activeThread.id} - name={activeThread.name} - details={filterThreadInfo(event, activeThread, exception)} - /> - )} - </StyledDropdownButton> - )} - </StyledDropdownAutoComplete> - )} - </ClassNames> + <CompactSelect + data-test-id="thread-selector" + searchable + searchPlaceholder={t('Filter threads')} + onOpenChange={() => { + trackAnalytics('stack_trace.threads.thread_selector_opened', { + organization, + platform: event.platform, + num_threads: items.length, + }); + }} + value={activeThread.id} + options={items} + menuWidth={450} + triggerProps={{size: 'xs'}} + triggerLabel={ + <ThreadName> + {t('Thread #%s: ', activeThread.id)} + <ActiveThreadName> + {getThreadLabel( + filterThreadInfo(event, activeThread, exception), + activeThread.name + )} + </ActiveThreadName> + </ThreadName> + } + menuBody={<Header hasThreadStates={hasThreadStates} />} + onChange={selected => { + const threadIndex = threads.findIndex(th => th.id === selected.value); + const thread = threads[threadIndex]; + if (thread) { + trackAnalytics('stack_trace.threads.thread_selected', { + organization, + platform: event.platform, + thread_index: threadIndex, + num_threads: items.length, + is_crashed_thread: thread.crashed, + is_current_thread: thread.current, + thread_state: thread.state ?? '', + has_stacktrace: defined(thread.stacktrace), + num_in_app_frames: + thread.stacktrace?.frames?.filter((frame: Frame) => frame.inApp).length ?? + 0, + }); + onChange(thread); + } + }} + /> ); } export default ThreadSelector; -const StyledDropdownAutoComplete = styled(DropdownAutoComplete)` - min-width: 300px; - @media (min-width: ${theme.breakpoints.small}) { - width: 500px; - } - @media (max-width: ${p => p.theme.breakpoints.large}) { - top: calc(100% - 2px); - } +const ThreadName = styled('div')` + display: flex; + gap: ${space(0.5)}; + font-weight: ${p => p.theme.fontWeightBold}; `; -const StyledDropdownButton = styled(DropdownButton)` - > *:first-child { - justify-content: space-between; - width: 100%; - } - width: 100%; - min-width: 150px; +const ActiveThreadName = styled('span')` + font-weight: ${p => p.theme.fontWeightNormal}; + max-width: 200px; + ${p => p.theme.overflowEllipsis}; +`; + +const StyledGrid = styled(ThreadSelectorGrid)` + padding-left: 40px; + padding-right: 40px; + color: ${p => p.theme.subText}; + font-weight: ${p => p.theme.fontWeightBold}; + border-bottom: 1px solid ${p => p.theme.border}; + margin-bottom: 2px; `; diff --git a/static/app/components/events/interfaces/threads/threadSelector/option.tsx b/static/app/components/events/interfaces/threads/threadSelector/option.tsx index 65ae4aa9f68708..36d8cb3974b02a 100644 --- a/static/app/components/events/interfaces/threads/threadSelector/option.tsx +++ b/static/app/components/events/interfaces/threads/threadSelector/option.tsx @@ -1,22 +1,20 @@ import styled from '@emotion/styled'; -import type {ThreadStates} from 'sentry/components/events/interfaces/threads/threadSelector/threadStates'; import TextOverflow from 'sentry/components/textOverflow'; import {Tooltip} from 'sentry/components/tooltip'; import {IconFire} from 'sentry/icons'; import {t, tct} from 'sentry/locale'; +import type {Thread} from 'sentry/types/event'; import type {EntryData} from 'sentry/types/group'; -import type {ColorOrAlias} from 'sentry/utils/theme'; -import {Grid, GridCell} from './styles'; +import {ThreadSelectorGrid} from './styles'; +import type {ThreadStates} from './threadStates'; type Props = { + crashedInfo: EntryData | undefined; details: ThreadInfo; hasThreadStates: boolean; - id: number; - crashed?: boolean; - crashedInfo?: EntryData; - name?: string | null; + thread: Thread; }; type ThreadInfo = { @@ -25,14 +23,14 @@ type ThreadInfo = { state?: ThreadStates; }; -function Option({id, details, name, crashed, crashedInfo, hasThreadStates}: Props) { +function Option({thread, crashedInfo, details, hasThreadStates}: Props) { const label = details.label ?? `<${t('unknown')}>`; - const optionName = name || `<${t('unknown')}>`; + const optionName = thread.name || `<${t('unknown')}>`; return ( - <Grid hasThreadStates={hasThreadStates}> - <GridCell> - {crashed && ( + <ThreadSelectorGrid hasThreadStates={hasThreadStates}> + <div> + {thread.crashed && ( <InnerCell isCentered> {crashedInfo ? ( <Tooltip @@ -50,51 +48,42 @@ function Option({id, details, name, crashed, crashedInfo, hasThreadStates}: Prop )} </InnerCell> )} - </GridCell> - <GridCell> + </div> + <InnerCell> + <Tooltip title={`#${thread.id}`} position="top"> + <TextOverflow>{`#${thread.id}`}</TextOverflow> + </Tooltip> + </InnerCell> + <InnerCell isBold> + <Tooltip title={optionName} position="top"> + <TextOverflow>{optionName}</TextOverflow> + </Tooltip> + </InnerCell> + <InnerCell> + <Tooltip title={label} position="top"> + <TextOverflow>{label}</TextOverflow> + </Tooltip> + </InnerCell> + {hasThreadStates && ( <InnerCell> - <Tooltip title={`#${id}`} position="top"> - <TextOverflow>{`#${id}`}</TextOverflow> - </Tooltip> - </InnerCell> - </GridCell> - <GridCell> - <InnerCell isBold> - <Tooltip title={optionName} position="top"> - <TextOverflow>{optionName}</TextOverflow> + <Tooltip title={details.state} position="top"> + <TextOverflow>{details.state}</TextOverflow> </Tooltip> </InnerCell> - </GridCell> - <GridCell> - <InnerCell color="linkColor"> - <Tooltip title={label} position="top"> - <TextOverflow>{label}</TextOverflow> - </Tooltip> - </InnerCell> - </GridCell> - {hasThreadStates && ( - <GridCell> - <InnerCell> - <Tooltip title={details.state} position="top"> - <TextOverflow>{details.state}</TextOverflow> - </Tooltip> - </InnerCell> - </GridCell> )} - </Grid> + </ThreadSelectorGrid> ); } export default Option; const InnerCell = styled('div')<{ - color?: ColorOrAlias; isBold?: boolean; isCentered?: boolean; }>` + ${p => p.theme.overflowEllipsis} display: flex; align-items: center; justify-content: ${p => (p.isCentered ? 'center' : 'flex-start')}; font-weight: ${p => (p.isBold ? 600 : 400)}; - ${p => p.color && `color: ${p.theme[p.color]}`} `; diff --git a/static/app/components/events/interfaces/threads/threadSelector/selectedOption.tsx b/static/app/components/events/interfaces/threads/threadSelector/selectedOption.tsx deleted file mode 100644 index 53426c473ae89e..00000000000000 --- a/static/app/components/events/interfaces/threads/threadSelector/selectedOption.tsx +++ /dev/null @@ -1,51 +0,0 @@ -import styled from '@emotion/styled'; - -import type {ThreadStates} from 'sentry/components/events/interfaces/threads/threadSelector/threadStates'; -import TextOverflow from 'sentry/components/textOverflow'; -import {t, tct} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; - -type Props = { - details: ThreadInfo; - id: number; - name?: string | null; -}; - -type ThreadInfo = { - filename?: string; - label?: string; - state?: ThreadStates; -}; - -function getThreadLabel(details: ThreadInfo, name?: string | null) { - if (name?.length) { - return name; - } - return details?.label || `<${t('unknown')}>`; -} - -function SelectedOption({id, name, details}: Props) { - return ( - <Wrapper> - <ThreadId>{tct('Thread #[id]:', {id})}</ThreadId> - <Label>{getThreadLabel(details, name)}</Label> - </Wrapper> - ); -} - -export default SelectedOption; - -const Wrapper = styled('div')` - grid-template-columns: auto 1fr; - display: grid; -`; - -const ThreadId = styled(TextOverflow)` - padding-right: ${space(1)}; - max-width: 100%; - text-align: left; -`; - -const Label = styled(ThreadId)` - font-weight: ${p => p.theme.fontWeightNormal}; -`; diff --git a/static/app/components/events/interfaces/threads/threadSelector/styles.tsx b/static/app/components/events/interfaces/threads/threadSelector/styles.tsx index 8a453ea0c1d43c..dca4e5d5bdd1f5 100644 --- a/static/app/components/events/interfaces/threads/threadSelector/styles.tsx +++ b/static/app/components/events/interfaces/threads/threadSelector/styles.tsx @@ -2,16 +2,15 @@ import styled from '@emotion/styled'; import {space} from 'sentry/styles/space'; -const Grid = styled('div')<{hasThreadStates: boolean}>` +export const ThreadSelectorGrid = styled('div')<{hasThreadStates: boolean}>` font-size: ${p => p.theme.fontSizeSmall}; display: grid; - gap: ${space(1)}; + gap: ${space(0.5)}; align-items: center; - grid-template-columns: 16px repeat(${p => (p.hasThreadStates ? '3' : '2')}, 1fr) 1fr; + grid-template-columns: 16px 0.5fr repeat(${p => (p.hasThreadStates ? '2' : '1')}, 1fr) 1fr; + min-height: 18px; `; -const GridCell = styled('div')` +export const ThreadSelectorGridCell = styled('div')` ${p => p.theme.overflowEllipsis}; `; - -export {Grid, GridCell}; diff --git a/static/app/components/events/interfaces/threads/threadSelector/trimFilename.tsx b/static/app/components/events/interfaces/threads/threadSelector/trimFilename.tsx deleted file mode 100644 index 746741dae578a1..00000000000000 --- a/static/app/components/events/interfaces/threads/threadSelector/trimFilename.tsx +++ /dev/null @@ -1,6 +0,0 @@ -function trimFilename(filename: string) { - const pieces = filename.split(/\//g); - return pieces[pieces.length - 1]; -} - -export default trimFilename; diff --git a/static/app/components/events/interfaces/utils.tsx b/static/app/components/events/interfaces/utils.tsx index da8de23bbbd49f..5020068be07e2b 100644 --- a/static/app/components/events/interfaces/utils.tsx +++ b/static/app/components/events/interfaces/utils.tsx @@ -125,22 +125,22 @@ export function getCurlCommand(data: EntryRequest['data']) { result += ' \\\n -X ' + data.method; } - data.headers = data.headers?.filter(defined); + const headers = + data.headers + ?.filter(defined) + // sort headers + .sort(function (a, b) { + return a[0] === b[0] ? 0 : a[0] < b[0] ? -1 : 1; + }) ?? []; // TODO(benvinegar): just gzip? what about deflate? - const compressed = data.headers?.find( + const compressed = headers?.find( h => h[0] === 'Accept-Encoding' && h[1].includes('gzip') ); if (compressed) { result += ' \\\n --compressed'; } - // sort headers - const headers = - data.headers?.sort(function (a, b) { - return a[0] === b[0] ? 0 : a[0] < b[0] ? -1 : 1; - }) ?? []; - for (const header of headers) { result += ' \\\n -H "' + header[0] + ': ' + escapeBashString(header[1] + '') + '"'; } @@ -172,7 +172,9 @@ export function getCurlCommand(data: EntryRequest['data']) { return result; } -export function stringifyQueryList(query: string | [key: string, value: string][]) { +export function stringifyQueryList( + query: string | Array<[key: string, value: string] | null> +) { if (typeof query === 'string') { return query; } diff --git a/static/app/components/events/meta/metaProxy.tsx b/static/app/components/events/meta/metaProxy.tsx index 1637ffd039cd2e..1581658d9b9509 100644 --- a/static/app/components/events/meta/metaProxy.tsx +++ b/static/app/components/events/meta/metaProxy.tsx @@ -1,5 +1,4 @@ import isEmpty from 'lodash/isEmpty'; -import memoize from 'lodash/memoize'; import type {Meta} from 'sentry/types/group'; @@ -64,7 +63,7 @@ export class MetaProxy { } } -export const withMeta = memoize(function withMeta<T>(event: T): T { +export function withMeta<T>(event: T): T { if (!event) { return event; } @@ -80,7 +79,7 @@ export const withMeta = memoize(function withMeta<T>(event: T): T { // // https://github.com/microsoft/TypeScript/issues/20846 return new Proxy(event, new MetaProxy((event as any)._meta)) as T; -}); +} export function getMeta<T extends {}>( obj: T | undefined, diff --git a/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx b/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx index dbe4818d223422..d896c90b390333 100644 --- a/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx +++ b/static/app/components/feedback/feedbackItem/feedbackAssignedTo.tsx @@ -72,7 +72,7 @@ export default function FeedbackAssignedTo({ }} owners={owners} group={feedbackIssue} - alignMenu="left" + alignMenu="right" > {({isOpen, getActorProps}) => ( <Button size="xs" aria-label={t('Assigned dropdown')} {...getActorProps({})}> diff --git a/static/app/components/feedback/feedbackItem/feedbackItemUsername.tsx b/static/app/components/feedback/feedbackItem/feedbackItemUsername.tsx index 67d348ac9949c6..6f4abca146d595 100644 --- a/static/app/components/feedback/feedbackItem/feedbackItemUsername.tsx +++ b/static/app/components/feedback/feedbackItem/feedbackItemUsername.tsx @@ -11,6 +11,7 @@ import {space} from 'sentry/styles/space'; import type {FeedbackIssue} from 'sentry/utils/feedback/types'; import {selectText} from 'sentry/utils/selectText'; import useCopyToClipboard from 'sentry/utils/useCopyToClipboard'; +import useOrganization from 'sentry/utils/useOrganization'; interface Props { feedbackIssue: FeedbackIssue; @@ -22,6 +23,7 @@ export default function FeedbackItemUsername({className, feedbackIssue, style}: const name = feedbackIssue.metadata.name; const email = feedbackIssue.metadata.contact_email; + const organization = useOrganization(); const nameOrEmail = name || email; const isSameNameAndEmail = name === email; @@ -52,6 +54,13 @@ export default function FeedbackItemUsername({className, feedbackIssue, style}: return <strong>{t('Anonymous User')}</strong>; } + const mailToHref = `mailto:${email}?subject=${encodeURIComponent(`Following up from ${organization.name}`)}&body=${encodeURIComponent( + feedbackIssue.metadata.message + .split('\n') + .map(s => `> ${s}`) + .join('\n') + )}`; + return ( <Flex align="center" gap={space(1)} className={className} style={style}> <Tooltip title={t('Click to copy')} containerDisplayMode="flex"> @@ -79,7 +88,7 @@ export default function FeedbackItemUsername({className, feedbackIssue, style}: {email ? ( <Tooltip title={t(`Email %s`, user)} containerDisplayMode="flex"> <LinkButton - href={`mailto:${email}`} + href={mailToHref} external icon={<IconMail color="gray300" />} aria-label={t(`Email %s`, user)} diff --git a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx index cffec03850be74..921adbf3424630 100644 --- a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx +++ b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx @@ -246,15 +246,12 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { /> ), })} - {jsFrameworkDocs?.platformOptions && - tct('with [optionSelect]', { - optionSelect: ( - <PlatformOptionDropdown - platformOptions={jsFrameworkDocs?.platformOptions} - disabled={setupMode() === 'jsLoader'} - /> - ), - })} + {jsFrameworkDocs?.platformOptions && ( + <PlatformOptionDropdown + platformOptions={jsFrameworkDocs?.platformOptions} + disabled={setupMode() === 'jsLoader'} + /> + )} </PlatformSelect> ) : ( t('I use NPM or Yarn') diff --git a/static/app/components/feedback/list/useHasLinkedIssues.tsx b/static/app/components/feedback/list/useHasLinkedIssues.tsx index b829cdca9848dc..aab310f37c71af 100644 --- a/static/app/components/feedback/list/useHasLinkedIssues.tsx +++ b/static/app/components/feedback/list/useHasLinkedIssues.tsx @@ -1,5 +1,6 @@ import type {ExternalIssueComponent} from 'sentry/components/group/externalIssuesList/types'; import useIssueTrackingFilter from 'sentry/components/group/externalIssuesList/useIssueTrackingFilter'; +import {sentryAppComponentIsDisabled} from 'sentry/components/sentryAppComponentIcon'; import SentryAppInstallationStore from 'sentry/stores/sentryAppInstallationsStore'; import {useLegacyStore} from 'sentry/stores/useLegacyStore'; import type {Event} from 'sentry/types/event'; @@ -23,7 +24,8 @@ export default function useExternalIssueData({group, event, project}: Props) { const renderSentryAppIssues = (): ExternalIssueComponent[] => { return components .map<ExternalIssueComponent | null>(component => { - const {sentryApp, error: disabled} = component; + const {sentryApp} = component; + const disabled = sentryAppComponentIsDisabled(component); const installation = sentryAppInstallations.find( i => i.app.uuid === sentryApp.uuid ); diff --git a/static/app/components/forms/fieldGroup/index.tsx b/static/app/components/forms/fieldGroup/index.tsx index 23cdfa471d1a7c..566f21f4fd8a14 100644 --- a/static/app/components/forms/fieldGroup/index.tsx +++ b/static/app/components/forms/fieldGroup/index.tsx @@ -96,7 +96,12 @@ function FieldGroup({ </span> {helpElement && showHelpInTooltip && ( <FieldQuestion> - <QuestionTooltip position="top" size="sm" title={helpElement} /> + <QuestionTooltip + position="top" + size="sm" + {...(showHelpInTooltip !== true ? showHelpInTooltip : {})} + title={helpElement} + /> </FieldQuestion> )} </FieldLabel> diff --git a/static/app/components/forms/fieldGroup/types.tsx b/static/app/components/forms/fieldGroup/types.tsx index dd15c4f454dba9..bb5078ec281d23 100644 --- a/static/app/components/forms/fieldGroup/types.tsx +++ b/static/app/components/forms/fieldGroup/types.tsx @@ -1,3 +1,5 @@ +import type {TooltipProps} from 'sentry/components/tooltip'; + /** * Props that control UI elements that are part of a Form Group */ @@ -91,9 +93,10 @@ export interface FieldGroupProps { */ required?: boolean; /** - * Displays the help element in the tooltip + * Displays the help element in the tooltip. Tooltip props may be passed to + * customize the help tooltip. */ - showHelpInTooltip?: boolean; + showHelpInTooltip?: boolean | Omit<TooltipProps, 'title'>; /** * When stacking forms the bottom border is hidden and padding is adjusted * for form elements to be stacked on each other. diff --git a/static/app/components/globalEventProcessingAlert.tsx b/static/app/components/globalEventProcessingAlert.tsx index 3565284f6ed921..f3e569a5c96de7 100644 --- a/static/app/components/globalEventProcessingAlert.tsx +++ b/static/app/components/globalEventProcessingAlert.tsx @@ -15,7 +15,7 @@ type Props = { // This alert makes the user aware that one or more projects have been selected for the Low Priority Queue function GlobalEventProcessingAlert({className, projects}: Props) { const projectsInTheLowPriorityQueue = projects.filter( - project => project.eventProcessing.symbolicationDegraded + project => project?.eventProcessing?.symbolicationDegraded ); if (!projectsInTheLowPriorityQueue.length) { diff --git a/static/app/components/gridEditable/index.tsx b/static/app/components/gridEditable/index.tsx index e301366dd19a62..e28ef18e1e8f55 100644 --- a/static/app/components/gridEditable/index.tsx +++ b/static/app/components/gridEditable/index.tsx @@ -94,6 +94,7 @@ type GridEditableProps<DataRow, ColumnKey> = { ) => React.ReactNode[]; }; 'aria-label'?: string; + bodyStyle?: React.CSSProperties; emptyMessage?: React.ReactNode; error?: unknown | null; /** @@ -450,6 +451,7 @@ class GridEditable< scrollable, height, 'aria-label': ariaLabel, + bodyStyle, } = this.props; const showHeader = title || headerButtons; return ( @@ -463,7 +465,7 @@ class GridEditable< )} </Header> )} - <Body> + <Body style={bodyStyle}> <Grid aria-label={ariaLabel} data-test-id="grid-editable" diff --git a/static/app/components/group/assigneeSelector.tsx b/static/app/components/group/assigneeSelector.tsx index 8f192826ffa5b7..90b6f6a65da19c 100644 --- a/static/app/components/group/assigneeSelector.tsx +++ b/static/app/components/group/assigneeSelector.tsx @@ -20,6 +20,7 @@ interface AssigneeSelectorProps { assigneeLoading: boolean; group: Group; handleAssigneeChange: (assignedActor: AssignableEntity | null) => void; + additionalMenuFooterItems?: React.ReactNode; memberList?: User[]; owners?: Omit<SuggestedAssignee, 'assignee'>[]; } @@ -76,6 +77,7 @@ export function AssigneeSelector({ assigneeLoading, handleAssigneeChange, owners, + additionalMenuFooterItems, }: AssigneeSelectorProps) { return ( <AssigneeSelectorDropdown @@ -107,6 +109,7 @@ export function AssigneeSelector({ /> </StyledDropdownButton> )} + additionalMenuFooterItems={additionalMenuFooterItems} /> ); } diff --git a/static/app/components/group/externalIssuesList/hooks/useSentryAppExternalIssues.tsx b/static/app/components/group/externalIssuesList/hooks/useSentryAppExternalIssues.tsx index 42b54637032883..c16d5e4e8a112c 100644 --- a/static/app/components/group/externalIssuesList/hooks/useSentryAppExternalIssues.tsx +++ b/static/app/components/group/externalIssuesList/hooks/useSentryAppExternalIssues.tsx @@ -51,16 +51,21 @@ export function useSentryAppExternalIssues({ const externalIssue = externalIssues.find( i => i.serviceType === component.sentryApp.slug ); - const displayName = component.sentryApp.name; + const appDisplayName = component.sentryApp.name; const displayIcon = ( <SentryAppComponentIcon sentryAppComponent={component} size={14} /> ); if (externalIssue) { result.linkedIssues.push({ key: externalIssue.id, - displayName: `${displayName} Issue`, + displayName: externalIssue.displayName, url: externalIssue.webUrl, - title: externalIssue.displayName, + // Some display names look like PROJ#1234 + // Others look like ClickUp: Title + // Add the integration name if it's not already included + title: externalIssue.displayName.includes(appDisplayName) + ? externalIssue.displayName + : `${appDisplayName}: ${externalIssue.displayName}`, displayIcon, onUnlink: () => { deleteExternalIssue(api, group.id, externalIssue.id) @@ -76,10 +81,10 @@ export function useSentryAppExternalIssues({ } else { result.integrations.push({ key: component.sentryApp.slug, - displayName, + displayName: appDisplayName, displayIcon, disabled: Boolean(component.error), - disabledText: t('Unable to connect to %s', displayName), + disabledText: t('Unable to connect to %s', appDisplayName), actions: [ { id: component.sentryApp.slug, diff --git a/static/app/components/group/externalIssuesList/useExternalIssueData.tsx b/static/app/components/group/externalIssuesList/useExternalIssueData.tsx index 47104210634fec..767c96371e5cab 100644 --- a/static/app/components/group/externalIssuesList/useExternalIssueData.tsx +++ b/static/app/components/group/externalIssuesList/useExternalIssueData.tsx @@ -2,6 +2,7 @@ import type {ExternalIssueComponent} from 'sentry/components/group/externalIssue import {useExternalIssues} from 'sentry/components/group/externalIssuesList/useExternalIssues'; import useFetchIntegrations from 'sentry/components/group/externalIssuesList/useFetchIntegrations'; import useIssueTrackingFilter from 'sentry/components/group/externalIssuesList/useIssueTrackingFilter'; +import {sentryAppComponentIsDisabled} from 'sentry/components/sentryAppComponentIcon'; import SentryAppInstallationStore from 'sentry/stores/sentryAppInstallationsStore'; import {useLegacyStore} from 'sentry/stores/useLegacyStore'; import type {Event} from 'sentry/types/event'; @@ -71,7 +72,8 @@ export default function useExternalIssueData({group, event, project}: Props) { const renderSentryAppIssues = (): ExternalIssueComponent[] => { return components .map<ExternalIssueComponent | null>(component => { - const {sentryApp, error: disabled} = component; + const {sentryApp} = component; + const disabled = sentryAppComponentIsDisabled(component); const installation = sentryAppInstallations.find( i => i.app.uuid === sentryApp.uuid ); diff --git a/static/app/components/group/groupSummary.spec.tsx b/static/app/components/group/groupSummary.spec.tsx index 7ce89a68ba8c6f..010a073c42dda1 100644 --- a/static/app/components/group/groupSummary.spec.tsx +++ b/static/app/components/group/groupSummary.spec.tsx @@ -1,101 +1,19 @@ -import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import {render, screen} from 'sentry-test/reactTestingLibrary'; -import { - GroupSummary, - makeGroupSummaryQueryKey, -} from 'sentry/components/group/groupSummary'; -import {IssueCategory} from 'sentry/types/group'; +import {GroupSummary} from 'sentry/components/group/groupSummary'; describe('GroupSummary', function () { - beforeEach(() => { - MockApiClient.clearMockResponses(); - }); - - it('renders the collapsed group summary', async function () { - const groupId = '1'; - const organizationSlug = 'org-slug'; - - MockApiClient.addMockResponse({ - url: makeGroupSummaryQueryKey(organizationSlug, groupId)[0], - method: 'POST', - body: { - groupId, - whatsWrong: 'Test whats wrong', - trace: 'Test trace', - possibleCause: 'Test possible cause', - headline: 'Test headline', - }, - }); - - MockApiClient.addMockResponse({ - url: `/issues/${groupId}/autofix/setup/`, - body: { - genAIConsent: {ok: true}, - integration: {ok: true}, - githubWriteIntegration: { - ok: true, - repos: [ - { - provider: 'integrations:github', - owner: 'getsentry', - name: 'sentry', - external_id: '123', - }, - ], - }, - }, - }); - - render(<GroupSummary groupId={groupId} groupCategory={IssueCategory.ERROR} />); - - // Verify the summary loads and renders the collapsed view with TL;DR prefix - expect(await screen.findByText('TL;DR: Test headline')).toBeInTheDocument(); - expect( - screen.getByText('Details: Test whats wrong Test trace Test possible cause') - ).toBeInTheDocument(); - }); - - it('expands the summary when clicked', async function () { - const groupId = '1'; - const organizationSlug = 'org-slug'; - - MockApiClient.addMockResponse({ - url: makeGroupSummaryQueryKey(organizationSlug, groupId)[0], - method: 'POST', - body: { - groupId, - whatsWrong: 'Test whats wrong', - trace: 'Test trace', - possibleCause: 'Test possible cause', - headline: 'Test headline', - }, - }); - - MockApiClient.addMockResponse({ - url: `/issues/${groupId}/autofix/setup/`, - body: { - genAIConsent: {ok: true}, - integration: {ok: true}, - githubWriteIntegration: { - ok: true, - repos: [ - { - provider: 'integrations:github', - owner: 'getsentry', - name: 'sentry', - external_id: '123', - }, - ], - }, - }, - }); - - render(<GroupSummary groupId={groupId} groupCategory={IssueCategory.ERROR} />); - expect(await screen.findByText('TL;DR: Test headline')).toBeInTheDocument(); + const mockSummaryData = { + groupId: '1', + whatsWrong: 'Test whats wrong', + trace: 'Test trace', + possibleCause: 'Test possible cause', + headline: 'Test headline', + }; - await userEvent.click(screen.getByText('TL;DR: Test headline')); + it('renders the summary with all sections', function () { + render(<GroupSummary data={mockSummaryData} isError={false} isPending={false} />); - // Verify expanded view shows the individual sections expect(screen.getByText("What's wrong")).toBeInTheDocument(); expect(screen.getByText('Test whats wrong')).toBeInTheDocument(); expect(screen.getByText('In the trace')).toBeInTheDocument(); @@ -104,93 +22,40 @@ describe('GroupSummary', function () { expect(screen.getByText('Test possible cause')).toBeInTheDocument(); }); - it('does not render the summary if no consent', async function () { - const groupId = '1'; - const organizationSlug = 'org-slug'; + it('shows loading state', function () { + render(<GroupSummary data={undefined} isError={false} isPending />); - MockApiClient.addMockResponse({ - url: makeGroupSummaryQueryKey(organizationSlug, groupId)[0], - method: 'POST', - body: { - groupId, - whatsWrong: 'Test whats wrong', - trace: 'Test trace', - possibleCause: 'Test possible cause', - headline: 'Test headline', - }, - }); - - const setupCall = MockApiClient.addMockResponse({ - url: `/issues/${groupId}/autofix/setup/`, - body: { - genAIConsent: {ok: false}, - integration: {ok: true}, - githubWriteIntegration: { - ok: true, - repos: [ - { - provider: 'integrations:github', - owner: 'getsentry', - name: 'sentry', - external_id: '123', - }, - ], - }, - }, - }); - - render(<GroupSummary groupId={groupId} groupCategory={IssueCategory.ERROR} />); + // Should show loading placeholders + expect(screen.getAllByTestId('loading-placeholder')).toHaveLength(2); + }); - await waitFor(() => { - expect(setupCall).toHaveBeenCalled(); - }); + it('shows error state', function () { + render(<GroupSummary data={undefined} isError isPending={false} />); - expect(screen.queryByText('TL;DR: Test headline')).not.toBeInTheDocument(); - expect( - screen.queryByText('Details: Test whats wrong Test trace Test possible cause') - ).not.toBeInTheDocument(); + expect(screen.getByText('Error loading summary')).toBeInTheDocument(); }); - it('does not render the summary if the issue is not in the error category', function () { - const groupId = '1'; - const organizationSlug = 'org-slug'; + it('hides cards with no content', function () { + const dataWithNulls = { + ...mockSummaryData, + trace: null, + }; - MockApiClient.addMockResponse({ - url: makeGroupSummaryQueryKey(organizationSlug, groupId)[0], - method: 'POST', - body: { - groupId, - whatsWrong: 'Test whats wrong', - trace: 'Test trace', - possibleCause: 'Test possible cause', - headline: 'Test headline', - }, - }); + render(<GroupSummary data={dataWithNulls} isError={false} isPending={false} />); - MockApiClient.addMockResponse({ - url: `/issues/${groupId}/autofix/setup/`, - body: { - genAIConsent: {ok: true}, - integration: {ok: true}, - githubWriteIntegration: { - ok: true, - repos: [ - { - provider: 'integrations:github', - owner: 'getsentry', - name: 'sentry', - external_id: '123', - }, - ], - }, - }, - }); + expect(screen.getByText("What's wrong")).toBeInTheDocument(); + expect(screen.getByText('Test whats wrong')).toBeInTheDocument(); + expect(screen.queryByText('In the trace')).not.toBeInTheDocument(); + expect(screen.getByText('Possible cause')).toBeInTheDocument(); + expect(screen.getByText('Test possible cause')).toBeInTheDocument(); + }); - render(<GroupSummary groupId={groupId} groupCategory={IssueCategory.PERFORMANCE} />); + it('renders in preview mode', function () { + render( + <GroupSummary data={mockSummaryData} isError={false} isPending={false} preview /> + ); - expect(screen.queryByText('TL;DR: Test headline')).not.toBeInTheDocument(); - expect( - screen.queryByText('Details: Test whats wrong Test trace Test possible cause') - ).not.toBeInTheDocument(); + expect(screen.getByText("What's wrong")).toBeInTheDocument(); + expect(screen.getByText('Test whats wrong')).toBeInTheDocument(); }); }); diff --git a/static/app/components/group/groupSummary.tsx b/static/app/components/group/groupSummary.tsx index e810542c322e09..bda98a71497b83 100644 --- a/static/app/components/group/groupSummary.tsx +++ b/static/app/components/group/groupSummary.tsx @@ -1,24 +1,16 @@ -import {Fragment, useState} from 'react'; import styled from '@emotion/styled'; -import FeatureBadge from 'sentry/components/badge/featureBadge'; -import {Button} from 'sentry/components/button'; -import {useAutofixSetup} from 'sentry/components/events/autofix/useAutofixSetup'; -import Panel from 'sentry/components/panels/panel'; import Placeholder from 'sentry/components/placeholder'; -import {IconChevron, IconFatal, IconFocus, IconMegaphone, IconSpan} from 'sentry/icons'; +import {IconFatal, IconFocus, IconSpan} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import {IssueCategory} from 'sentry/types/group'; -import marked, {singleLineRenderer} from 'sentry/utils/marked'; +import type {Event} from 'sentry/types/event'; +import type {Group} from 'sentry/types/group'; +import type {Project} from 'sentry/types/project'; +import marked from 'sentry/utils/marked'; import {type ApiQueryKey, useApiQuery} from 'sentry/utils/queryClient'; -import {useFeedbackForm} from 'sentry/utils/useFeedbackForm'; import useOrganization from 'sentry/utils/useOrganization'; - -interface GroupSummaryProps { - groupCategory: IssueCategory; - groupId: string; -} +import {useAiConfig} from 'sentry/views/issueDetails/streamline/hooks/useAiConfig'; interface GroupSummaryData { groupId: string; @@ -28,14 +20,6 @@ interface GroupSummaryData { whatsWrong?: string | null; } -const isSummaryEnabled = ( - hasGenAIConsent: boolean, - hideAiFeatures: boolean, - groupCategory: IssueCategory -) => { - return hasGenAIConsent && !hideAiFeatures && groupCategory === IssueCategory.ERROR; -}; - export const makeGroupSummaryQueryKey = ( organizationSlug: string, groupId: string @@ -44,50 +28,39 @@ export const makeGroupSummaryQueryKey = ( {method: 'POST'}, ]; -export function useGroupSummary(groupId: string, groupCategory: IssueCategory) { +export function useGroupSummary( + group: Group, + event: Event | null | undefined, + project: Project +) { const organization = useOrganization(); - // We piggyback and use autofix's genai consent check for now. - const { - data: autofixSetupData, - isPending: isAutofixSetupLoading, - isError: isAutofixSetupError, - } = useAutofixSetup({groupId}); - const hasGenAIConsent = autofixSetupData?.genAIConsent.ok ?? false; - const hideAiFeatures = organization.hideAiFeatures; + const aiConfig = useAiConfig(group, event, project); const queryData = useApiQuery<GroupSummaryData>( - makeGroupSummaryQueryKey(organization.slug, groupId), + makeGroupSummaryQueryKey(organization.slug, group.id), { staleTime: Infinity, // Cache the result indefinitely as it's unlikely to change if it's already computed - enabled: isSummaryEnabled(hasGenAIConsent, hideAiFeatures, groupCategory), + enabled: aiConfig.hasSummary, } ); return { ...queryData, - isPending: isAutofixSetupLoading || queryData.isPending, - isError: queryData.isError || isAutofixSetupError, - hasGenAIConsent, + isPending: aiConfig.isAutofixSetupLoading || queryData.isPending, + isError: queryData.isError, }; } -function GroupSummaryFeatureBadge() { - return ( - <StyledFeatureBadge - type="experimental" - title={t( - 'This feature is experimental and may produce inaccurate results. Please share feedback to help us improve the experience.' - )} - /> - ); -} - -export function GroupSummaryBody({ +export function GroupSummary({ data, isError, + isPending, + preview = false, }: { data: GroupSummaryData | undefined; isError: boolean; + isPending: boolean; + preview?: boolean; }) { const insightCards = [ { @@ -95,247 +68,106 @@ export function GroupSummaryBody({ title: t("What's wrong"), insight: data?.whatsWrong, icon: <IconFatal size="sm" />, + showWhenLoading: true, }, { id: 'trace', title: t('In the trace'), insight: data?.trace, icon: <IconSpan size="sm" />, + showWhenLoading: false, }, { id: 'possible_cause', title: t('Possible cause'), insight: data?.possibleCause, icon: <IconFocus size="sm" />, + showWhenLoading: true, }, - ].filter(card => card.insight); + ]; return ( - <Body> + <div data-testid="group-summary"> {isError ? <div>{t('Error loading summary')}</div> : null} - {data && ( - <Content> - <InsightGrid> - {insightCards.map(card => ( + <Content> + <InsightGrid> + {insightCards.map(card => { + // Hide the card if we're not loading and there's no insight + // Also hide if we're loading and the card shouldn't show when loading + if ((!isPending && !card.insight) || (isPending && !card.showWhenLoading)) { + return null; + } + + return ( <InsightCard key={card.id}> - <CardTitle> + <CardTitle preview={preview}> <CardTitleIcon>{card.icon}</CardTitleIcon> <CardTitleText>{card.title}</CardTitleText> </CardTitle> - <CardContent - dangerouslySetInnerHTML={{ - __html: marked(card.insight ?? ''), - }} - /> + <CardContentContainer> + <CardLineDecorationWrapper> + <CardLineDecoration /> + </CardLineDecorationWrapper> + {isPending ? ( + <CardContent> + <Placeholder height="1.5rem" /> + </CardContent> + ) : ( + card.insight && ( + <CardContent + dangerouslySetInnerHTML={{ + __html: marked( + preview + ? card.insight.replace(/\*\*/g, '') ?? '' + : card.insight ?? '' + ), + }} + /> + ) + )} + </CardContentContainer> </InsightCard> - ))} - </InsightGrid> - </Content> - )} - </Body> - ); -} - -export function GroupSummary({groupId, groupCategory}: GroupSummaryProps) { - const {data, isPending, isError, hasGenAIConsent} = useGroupSummary( - groupId, - groupCategory - ); - - const organization = useOrganization(); - const [expanded, setExpanded] = useState(false); - const openForm = useFeedbackForm(); - - if (!isSummaryEnabled(hasGenAIConsent, organization.hideAiFeatures, groupCategory)) { - return null; - } - - return ( - <Wrapper> - <StyledTitleRow onClick={() => setExpanded(!data ? false : !expanded)}> - <CollapsedRow> - <IconContainer> - <IconFocus /> - </IconContainer> - {isPending && <Placeholder height="19px" width="95%" />} - {isError ? <div>{t('Error loading summary')}</div> : null} - {data && !expanded && ( - <Fragment> - <HeadlinePreview - dangerouslySetInnerHTML={{ - __html: singleLineRenderer(`TL;DR: ${data.headline ?? ''}`), - }} - /> - <SummaryPreview - dangerouslySetInnerHTML={{ - __html: singleLineRenderer( - `Details: ${[data.whatsWrong, data.trace, data.possibleCause] - .filter(Boolean) - .join(' ') - .replaceAll('\n', ' ') - .replaceAll('-', '')}` - ), - }} - /> - </Fragment> - )} - {data && expanded && ( - <HeadlineContent - dangerouslySetInnerHTML={{ - __html: singleLineRenderer(`TL;DR: ${data.headline ?? ''}`), - }} - /> - )} - </CollapsedRow> - <IconContainerRight> - <IconChevron direction={expanded ? 'up' : 'down'} /> - </IconContainerRight> - </StyledTitleRow> - {expanded && ( - <Fragment> - <GroupSummaryBody data={data} isError={isError} /> - {openForm && !isPending && ( - <ButtonContainer> - <Button - onClick={() => { - openForm({ - messagePlaceholder: t( - 'How can we make this issue summary more useful?' - ), - tags: { - ['feedback.source']: 'issue_details_ai_issue_summary', - ['feedback.owner']: 'ml-ai', - }, - }); - }} - size="xs" - icon={<IconMegaphone />} - > - Give Feedback - </Button> - <GroupSummaryFeatureBadge /> - </ButtonContainer> - )} - </Fragment> - )} - </Wrapper> + ); + })} + </InsightGrid> + </Content> + </div> ); } -const Body = styled('div')` - padding: 0 ${space(2)} ${space(0.5)} ${space(2)}; -`; - -const HeadlinePreview = styled('span')` - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; - margin-right: ${space(0.5)}; - flex-shrink: 0; - max-width: 92%; -`; - -const Wrapper = styled(Panel)` - margin-bottom: ${space(1)}; - padding: ${space(0.5)}; -`; - -const StyledTitleRow = styled('div')` - display: flex; - align-items: flex-start; - justify-content: space-between; - padding: ${space(1)} ${space(1)} ${space(1)} ${space(1)}; - border-radius: ${p => p.theme.borderRadius}; - - &:hover { - cursor: pointer; - background: ${p => p.theme.backgroundSecondary}; - } -`; - -const CollapsedRow = styled('div')` - display: flex; - width: 100%; - align-items: flex-start; - overflow: hidden; -`; - -const StyledFeatureBadge = styled(FeatureBadge)``; - -const HeadlineContent = styled('span')` - overflow-wrap: break-word; - p { - margin: 0; - } - code { - word-break: break-all; - } - width: 100%; -`; - const Content = styled('div')` display: flex; flex-direction: column; gap: ${space(1)}; `; -const ButtonContainer = styled('div')` - align-items: center; - display: flex; - margin: ${space(1)} 0 ${space(1)} ${space(2)}; -`; - -const IconContainer = styled('div')` - flex-shrink: 0; - margin-right: ${space(1)}; - margin-top: ${space(0.25)}; - max-height: ${space(2)}; -`; - -const IconContainerRight = styled('div')` - flex-shrink: 0; - margin-left: ${space(1)}; - margin-top: ${space(0.25)}; - max-height: ${space(2)}; -`; - const InsightGrid = styled('div')` display: flex; flex-direction: column; gap: ${space(1)}; - margin-top: ${space(1)}; `; const InsightCard = styled('div')` display: flex; flex-direction: column; - padding: ${space(0.5)}; border-radius: ${p => p.theme.borderRadius}; background: ${p => p.theme.background}; width: 100%; min-height: 0; `; -const SummaryPreview = styled('span')` - white-space: nowrap; - overflow: hidden; - text-overflow: ellipsis; - flex-grow: 1; - color: ${p => p.theme.subText}; -`; - -const CardTitle = styled('div')` +const CardTitle = styled('div')<{preview?: boolean}>` display: flex; align-items: center; gap: ${space(1)}; color: ${p => p.theme.subText}; - font-weight: ${p => p.theme.fontWeightBold}; padding-bottom: ${space(0.5)}; `; const CardTitleText = styled('p')` margin: 0; font-size: ${p => p.theme.fontSizeMedium}; + font-weight: ${p => p.theme.fontWeightBold}; `; const CardTitleIcon = styled('div')` @@ -344,12 +176,30 @@ const CardTitleIcon = styled('div')` color: ${p => p.theme.subText}; `; +const CardContentContainer = styled('div')` + display: flex; + align-items: center; + gap: ${space(1)}; +`; + +const CardLineDecorationWrapper = styled('div')` + display: flex; + width: 14px; + align-self: stretch; + justify-content: center; + flex-shrink: 0; + padding: 0.275rem 0; +`; + +const CardLineDecoration = styled('div')` + width: 2px; + align-self: stretch; + background-color: ${p => p.theme.border}; +`; + const CardContent = styled('div')` overflow-wrap: break-word; word-break: break-word; - padding-left: ${space(2)}; - border-left: 3px solid ${p => p.theme.border}; - margin-left: ${space(0.5)}; p { margin: 0; white-space: pre-wrap; @@ -357,4 +207,5 @@ const CardContent = styled('div')` code { word-break: break-all; } + flex: 1; `; diff --git a/static/app/components/hovercard.tsx b/static/app/components/hovercard.tsx index ef82873eae8e6c..058d1161495658 100644 --- a/static/app/components/hovercard.tsx +++ b/static/app/components/hovercard.tsx @@ -177,6 +177,7 @@ const Header = styled('div')` const Body = styled('div')` padding: ${space(2)}; min-height: 30px; + word-wrap: break-word; `; const Divider = styled('div')` diff --git a/static/app/components/interactionStateLayer.stories.tsx b/static/app/components/interactionStateLayer.stories.tsx new file mode 100644 index 00000000000000..8b892285079921 --- /dev/null +++ b/static/app/components/interactionStateLayer.stories.tsx @@ -0,0 +1,66 @@ +import {Fragment} from 'react'; +import styled from '@emotion/styled'; + +import storyBook from 'sentry/stories/storyBook'; +import {space} from 'sentry/styles/space'; + +import Panel from './panels/panel'; +import JSXNode from './stories/jsxNode'; +import SideBySide from './stories/sideBySide'; +import InteractionStateLayer from './interactionStateLayer'; + +export default storyBook(InteractionStateLayer, story => { + story('Getting Started', () => { + return ( + <Fragment> + <p> + <JSXNode name="InteractionStateLayer" /> is a visual component that adds a + visible hover and active state. <JSXNode name="InteractionStateLayer" />. It + uses opacity to manage the visual state, which is more mindful of contrast + requirements. Our own components (e.g., <code>Button</code>,{' '} + <code>Checkbox</code>, etc.) use <JSXNode name="InteractionStateLayer" /> under + the hood. Here is an example of a <JSXNode name="Panel" /> with an interaction + state, and one without: + </p> + + <SideBySide> + <StyledPanel tabIndex={1}> + <InteractionStateLayer /> + <Activity /> + </StyledPanel> + + <StyledPanel tabIndex={1}> + <Activity /> + </StyledPanel> + </SideBySide> + </Fragment> + ); + }); + + story('Manual State', () => { + return ( + <p> + <JSXNode name="InteractionStateLayer" /> uses normal CSS selectors for hover and + focus to add the visual effect. If this is not suitable, you can manually set the{' '} + <code>isHovered</code> and <code>isPressed</code> props. + </p> + ); + }); +}); + +const StyledPanel = styled(Panel)` + padding: ${space(1)}; + max-width: 300px; +`; + +function Activity() { + return ( + <Fragment> + <h3>Summary of Activity</h3> + <p> + In the last three days you drank 7 bottles of ketchup, ate 67 jars of mayo, and + smelled 0.5 tablespoons of mustard. Relish stats are not available. + </p> + </Fragment> + ); +} diff --git a/static/app/components/issueDiff/index.spec.tsx b/static/app/components/issueDiff/index.spec.tsx index 51aacbd7939ff9..ea0d60a0d7363f 100644 --- a/static/app/components/issueDiff/index.spec.tsx +++ b/static/app/components/issueDiff/index.spec.tsx @@ -15,7 +15,7 @@ describe('IssueDiff', function () { const entries123Base = Entries123Base(); const api = new MockApiClient(); const organization = OrganizationFixture(); - const project = ProjectFixture({features: ['similarity-embeddings']}); + const project = ProjectFixture(); beforeEach(function () { MockApiClient.addMockResponse({ @@ -92,6 +92,7 @@ describe('IssueDiff', function () { action: 'PUSH', key: 'default', }} + hasSimilarityEmbeddingsProjectFeature /> ); diff --git a/static/app/components/issueDiff/index.tsx b/static/app/components/issueDiff/index.tsx index 1bbb3dd50a1e64..859368cbbe45bd 100644 --- a/static/app/components/issueDiff/index.tsx +++ b/static/app/components/issueDiff/index.tsx @@ -31,6 +31,7 @@ type Props = { targetIssueId: string; baseEventId?: string; className?: string; + hasSimilarityEmbeddingsProjectFeature?: boolean; organization?: Organization; shouldBeGrouped?: string; targetEventId?: string; @@ -67,12 +68,12 @@ class IssueDiff extends Component<Props, State> { baseEventId, targetEventId, organization, - project, shouldBeGrouped, location, + hasSimilarityEmbeddingsProjectFeature, } = this.props; const hasSimilarityEmbeddingsFeature = - project.features.includes('similarity-embeddings') || + hasSimilarityEmbeddingsProjectFeature || location.query.similarityEmbeddings === '1'; // Fetch component and event data diff --git a/static/app/components/keyValueData/index.tsx b/static/app/components/keyValueData/index.tsx index 2f7f99c4720dd0..ab9bcdb45ea1c5 100644 --- a/static/app/components/keyValueData/index.tsx +++ b/static/app/components/keyValueData/index.tsx @@ -239,7 +239,7 @@ const ContentWrapper = styled('div')<{ expandLeft?: boolean; }>` display: grid; - grid-template-columns: ${p => (p.expandLeft ? '0.95fr 0.41fr' : 'subgrid')}; + grid-template-columns: ${p => (p.expandLeft ? '2fr 0.8fr' : 'subgrid')}; grid-column: span 2; column-gap: ${space(1.5)}; padding: ${space(0.25)} ${space(0.75)}; diff --git a/static/app/components/loadingTriangle.tsx b/static/app/components/loadingTriangle.tsx index 2477a5069f6e27..d1a4ab986f831a 100644 --- a/static/app/components/loadingTriangle.tsx +++ b/static/app/components/loadingTriangle.tsx @@ -3,15 +3,19 @@ import styled from '@emotion/styled'; import sentryLoader from 'sentry-images/sentry-loader.svg'; import {space} from 'sentry/styles/space'; +import {useUser} from 'sentry/utils/useUser'; type Props = { children?: React.ReactNode; }; function LoadingTriangle({children}: Props) { + const user = useUser(); return ( <LoadingTriangleWrapper data-test-id="loading-indicator"> - <CircleBackground> + <CircleBackground + className={user?.options.theme ? `theme-${user.options.theme}` : ''} + > <img src={sentryLoader} /> </CircleBackground> {children && <div>{children}</div>} @@ -37,8 +41,19 @@ const CircleBackground = styled('div')` display: flex; align-items: center; justify-content: center; - background: ${p => p.theme.surface300}; + background: #fff; border-radius: 50%; + + &.theme-dark { + filter: invert(100%); + opacity: 0.8; + } + &.theme-system { + @media (prefers-color-scheme: dark) { + filter: invert(100%); + opacity: 0.8; + } + } `; export default LoadingTriangle; diff --git a/static/app/components/metrics/metricSearchBar.spec.tsx b/static/app/components/metrics/metricSearchBar.spec.tsx index ec8b3083e27c63..023be4ebef3604 100644 --- a/static/app/components/metrics/metricSearchBar.spec.tsx +++ b/static/app/components/metrics/metricSearchBar.spec.tsx @@ -55,20 +55,5 @@ describe('metricSearchBar', function () { screen.getByText('Invalid key. "span.module" is not a supported search key.'); expect(onChange).not.toHaveBeenCalled(); }); - it('allows insights specific filters when using an insights mri', async function () { - render( - <MetricSearchBar onChange={onChange} mri="d:spans/exclusive_time@millisecond" /> - ); - await screen.findByPlaceholderText('Filter by tags'); - await userEvent.type( - screen.getByPlaceholderText('Filter by tags'), - 'span.module:db' - ); - await userEvent.keyboard('{enter}'); - expect( - screen.queryByText('Invalid key. "span.module" is not a supported search key.') - ).not.toBeInTheDocument(); - expect(onChange).toHaveBeenCalledWith('span.module:"db"'); - }); }); }); diff --git a/static/app/components/metrics/metricSearchBar.tsx b/static/app/components/metrics/metricSearchBar.tsx index f3d8b58a7375c2..b73d67a407aa78 100644 --- a/static/app/components/metrics/metricSearchBar.tsx +++ b/static/app/components/metrics/metricSearchBar.tsx @@ -12,13 +12,10 @@ import {SavedSearchType, type TagCollection} from 'sentry/types/group'; import type {MRI} from 'sentry/types/metrics'; import {hasMetricsNewInputs} from 'sentry/utils/metrics/features'; import {getUseCaseFromMRI} from 'sentry/utils/metrics/mri'; -import type {MetricTag} from 'sentry/utils/metrics/types'; import {useMetricsTags} from 'sentry/utils/metrics/useMetricsTags'; import useApi from 'sentry/utils/useApi'; import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; -import {INSIGHTS_METRICS} from 'sentry/views/alerts/rules/metric/utils/isInsightsMetricAlert'; -import {SpanMetricsField} from 'sentry/views/insights/types'; import {ensureQuotedTextFilters} from 'sentry/views/metrics/utils'; import {useSelectedProjects} from 'sentry/views/metrics/utils/useSelectedProjects'; @@ -34,23 +31,6 @@ export interface MetricSearchBarProps const EMPTY_ARRAY = []; const EMPTY_SET = new Set<never>(); -const INSIGHTS_ADDITIONAL_TAG_FILTERS: MetricTag[] = [ - { - key: 'has', - }, - { - key: SpanMetricsField.SPAN_MODULE, - }, - { - key: SpanMetricsField.FILE_EXTENSION, - }, - { - key: SpanMetricsField.SPAN_SYSTEM, - }, - { - key: SpanMetricsField.SPAN_GROUP, - }, -]; export function MetricSearchBar({ mri, @@ -80,18 +60,9 @@ export function MetricSearchBar({ blockedTags ); - const additionalTags: MetricTag[] = useMemo( - () => - // Insights metrics allow the `has` filter. - // `span.module` is a discover field alias that does not appear in the metrics meta endpoint. - INSIGHTS_METRICS.includes(mri as string) ? INSIGHTS_ADDITIONAL_TAG_FILTERS : [], - [mri] - ); - const supportedTags: TagCollection = useMemo( - () => - [...tags, ...additionalTags].reduce((acc, tag) => ({...acc, [tag.key]: tag}), {}), - [tags, additionalTags] + () => tags.reduce((acc, tag) => ({...acc, [tag.key]: tag}), {}), + [tags] ); const searchConfig = useMemo( diff --git a/static/app/components/modals/diffModal.spec.tsx b/static/app/components/modals/diffModal.spec.tsx index c763244ee561e1..26417688ea4799 100644 --- a/static/app/components/modals/diffModal.spec.tsx +++ b/static/app/components/modals/diffModal.spec.tsx @@ -28,6 +28,10 @@ describe('DiffModal', function () { url: '/projects/123/project-slug/events/789/', body: [], }); + MockApiClient.addMockResponse({ + url: `/projects/org-slug/project-slug/`, + body: {features: []}, + }); const styledWrapper = styled(c => c.children); diff --git a/static/app/components/modals/diffModal.tsx b/static/app/components/modals/diffModal.tsx index e472aea915c71d..816f033f22777e 100644 --- a/static/app/components/modals/diffModal.tsx +++ b/static/app/components/modals/diffModal.tsx @@ -2,16 +2,32 @@ import {css} from '@emotion/react'; import type {ModalRenderProps} from 'sentry/actionCreators/modal'; import IssueDiff from 'sentry/components/issueDiff'; +import {useDetailedProject} from 'sentry/utils/useDetailedProject'; import useOrganization from 'sentry/utils/useOrganization'; type Props = ModalRenderProps & React.ComponentProps<typeof IssueDiff>; function DiffModal({className, Body, CloseButton, ...props}: Props) { const organization = useOrganization(); + const {project} = props; + const {data: projectData} = useDetailedProject({ + orgSlug: organization.slug, + projectSlug: project.slug, + }); + // similarity-embeddings feature is only available on project details + const similarityEmbeddingsProjectFeature = projectData?.features.includes( + 'similarity-embeddings' + ); + return ( <Body> <CloseButton /> - <IssueDiff className={className} organization={organization} {...props} /> + <IssueDiff + className={className} + organization={organization} + hasSimilarityEmbeddingsProjectFeature={similarityEmbeddingsProjectFeature} + {...props} + /> </Body> ); } diff --git a/static/app/components/modals/inviteMembersModal/index.tsx b/static/app/components/modals/inviteMembersModal/index.tsx index 1747499c0d5324..d32a3f20e192c5 100644 --- a/static/app/components/modals/inviteMembersModal/index.tsx +++ b/static/app/components/modals/inviteMembersModal/index.tsx @@ -82,7 +82,12 @@ function InviteMembersModal({ willInvite={willInvite} onSendInvites={sendInvites} > - {({sendInvites: inviteModalSendInvites, canSend, headerInfo}) => { + {({ + sendInvites: inviteModalSendInvites, + canSend: canSend, + headerInfo: headerInfo, + isOverMemberLimit: isOverMemberLimit, + }) => { return organization.features.includes('invite-members-new-modal') ? ( <InviteMembersContext.Provider value={{ @@ -133,6 +138,9 @@ function InviteMembersModal({ headerInfo={headerInfo} invites={invites} inviteStatus={inviteStatus} + isOverMemberLimit={ + isOverMemberLimit && organization.features?.includes('invite-billing') + } member={memberResult.data} pendingInvites={pendingInvites} removeInviteRow={removeInviteRow} diff --git a/static/app/components/modals/inviteMembersModal/inviteMembersModalview.spec.tsx b/static/app/components/modals/inviteMembersModal/inviteMembersModalview.spec.tsx index 66c8d4a035bc64..2cdf934076f726 100644 --- a/static/app/components/modals/inviteMembersModal/inviteMembersModalview.spec.tsx +++ b/static/app/components/modals/inviteMembersModal/inviteMembersModalview.spec.tsx @@ -26,6 +26,29 @@ describe('InviteMembersModalView', function () { setRole: () => {}, setTeams: () => {}, willInvite: false, + isOverMemberLimit: false, + }; + + const overMemberLimitModalProps: ComponentProps<typeof InviteMembersModalView> = { + Footer: styledWrapper(), + addInviteRow: () => {}, + canSend: true, + closeModal: () => {}, + complete: false, + headerInfo: null, + inviteStatus: {}, + invites: [], + member: undefined, + pendingInvites: [], + removeInviteRow: () => {}, + reset: () => {}, + sendInvites: () => {}, + sendingInvites: false, + setEmails: () => {}, + setRole: () => {}, + setTeams: () => {}, + willInvite: true, + isOverMemberLimit: true, }; it('renders', function () { @@ -45,4 +68,10 @@ describe('InviteMembersModalView', function () { // Check that the Alert component renders with the provided error message expect(screen.getByText('This is an error message')).toBeInTheDocument(); }); + + it('renders when over member limit', function () { + render(<InviteMembersModalView {...overMemberLimitModalProps} />); + + expect(screen.getByText('Invite New Members')).toBeInTheDocument(); + }); }); diff --git a/static/app/components/modals/inviteMembersModal/inviteMembersModalview.tsx b/static/app/components/modals/inviteMembersModal/inviteMembersModalview.tsx index 9e9bedfbd9ec88..9d63b10faa7a96 100644 --- a/static/app/components/modals/inviteMembersModal/inviteMembersModalview.tsx +++ b/static/app/components/modals/inviteMembersModal/inviteMembersModalview.tsx @@ -1,5 +1,4 @@ -import type {ReactNode} from 'react'; -import {Fragment} from 'react'; +import {Fragment, type ReactNode, useEffect, useRef} from 'react'; import {css} from '@emotion/react'; import styled from '@emotion/styled'; @@ -30,6 +29,7 @@ interface Props { headerInfo: ReactNode; inviteStatus: InviteStatus; invites: NormalizedInvite[]; + isOverMemberLimit: boolean; member: Member | undefined; pendingInvites: InviteRow[]; removeInviteRow: (index: number) => void; @@ -52,6 +52,7 @@ export default function InviteMembersModalView({ headerInfo, invites, inviteStatus, + isOverMemberLimit, member, pendingInvites, removeInviteRow, @@ -76,6 +77,16 @@ export default function InviteMembersModalView({ </Alert> ) : null; + const canSendRef = useRef(canSend); + + useEffect(() => { + if (isOverMemberLimit) { + setRole('billing', 0); + setTeams([], 0); + canSendRef.current = true; + } + }); + return ( <Fragment> {errorAlert} @@ -115,10 +126,10 @@ export default function InviteMembersModalView({ onChangeRole={value => setRole(value?.value, i)} onChangeTeams={opts => setTeams(opts ? opts.map(v => v.value) : [], i)} disableRemove={disableInputs || pendingInvites.length === 1} + isOverMemberLimit={isOverMemberLimit} /> ))} </Rows> - <AddButton disabled={disableInputs} size="sm" @@ -128,7 +139,6 @@ export default function InviteMembersModalView({ > {t('Add another')} </AddButton> - <Footer> <FooterContent> <div> @@ -140,7 +150,6 @@ export default function InviteMembersModalView({ willInvite={willInvite} /> </div> - <ButtonBar gap={1}> {complete ? ( <Fragment> @@ -172,7 +181,7 @@ export default function InviteMembersModalView({ size="sm" data-test-id="send-invites" priority="primary" - disabled={!canSend || !isValidInvites || disableInputs} + disabled={!canSendRef.current || !isValidInvites || disableInputs} onClick={sendInvites} /> </Fragment> diff --git a/static/app/components/modals/inviteMembersModal/inviteRowControl.tsx b/static/app/components/modals/inviteMembersModal/inviteRowControl.tsx index 529502f202c633..cd73db279ef038 100644 --- a/static/app/components/modals/inviteMembersModal/inviteRowControl.tsx +++ b/static/app/components/modals/inviteMembersModal/inviteRowControl.tsx @@ -23,6 +23,7 @@ type Props = { disabled: boolean; emails: string[]; inviteStatus: InviteStatus; + isOverMemberLimit: boolean; onChangeEmails: (emails: SelectOption[]) => void; onChangeRole: (role: SelectOption) => void; onChangeTeams: (teams: SelectOption[]) => void; @@ -52,6 +53,7 @@ function InviteRowControl({ onChangeRole, onChangeTeams, disableRemove, + isOverMemberLimit, }: Props) { const [inputValue, setInputValue] = useState(''); @@ -118,7 +120,7 @@ function InviteRowControl({ <RoleSelectControl aria-label={t('Role')} data-test-id="select-role" - disabled={disabled} + disabled={isOverMemberLimit ? true : disabled} value={role} roles={roleOptions} disableUnallowed={roleDisabledUnallowed} diff --git a/static/app/components/modals/memberInviteModalCustomization.tsx b/static/app/components/modals/memberInviteModalCustomization.tsx index 26e5be1f25293e..cbbaa49b0e4c33 100644 --- a/static/app/components/modals/memberInviteModalCustomization.tsx +++ b/static/app/components/modals/memberInviteModalCustomization.tsx @@ -3,7 +3,7 @@ import HookOrDefault from 'sentry/components/hookOrDefault'; export const InviteModalHook = HookOrDefault({ hookName: 'member-invite-modal:customization', defaultComponent: ({onSendInvites, children}) => - children({sendInvites: onSendInvites, canSend: true}), + children({sendInvites: onSendInvites, canSend: true, isOverMemberLimit: false}), }); export type InviteModalRenderFunc = React.ComponentProps< diff --git a/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx b/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx index 268919cd4399a9..94543d87695743 100644 --- a/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx +++ b/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx @@ -97,6 +97,11 @@ describe('add to dashboard modal', () => { body: testDashboard, }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/releases/stats/', + body: [], + }); + eventsStatsMock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/events-stats/', body: [], diff --git a/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx b/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx index f03b399feaf527..7da42feb17ae34 100644 --- a/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx +++ b/static/app/components/modals/widgetBuilder/addToDashboardModal.tsx @@ -306,11 +306,11 @@ function AddToDashboardModal({ widgetLegendState={widgetLegendState} onLegendSelectChanged={() => {}} legendOptions={ - organization.features.includes('dashboards-releases-on-charts') && widgetLegendState.widgetRequiresLegendUnselection(widget) ? {selected: unselectedReleasesForCharts} : undefined } + disableFullscreen /> <IndexedEventsSelectionAlert widget={widget} /> diff --git a/static/app/components/modals/widgetViewerModal.spec.tsx b/static/app/components/modals/widgetViewerModal.spec.tsx index 33a1bc37770931..8edf9f12b1edd4 100644 --- a/static/app/components/modals/widgetViewerModal.spec.tsx +++ b/static/app/components/modals/widgetViewerModal.spec.tsx @@ -2,6 +2,7 @@ import ReactEchartsCore from 'echarts-for-react/lib/core'; import {DashboardFixture} from 'sentry-fixture/dashboard'; import {MetricsTotalCountByReleaseIn24h} from 'sentry-fixture/metrics'; import {ProjectFixture} from 'sentry-fixture/project'; +import {WidgetFixture} from 'sentry-fixture/widget'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {act, render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; @@ -130,6 +131,11 @@ describe('Modals -> WidgetViewerModal', function () { body: [], }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/releases/stats/', + body: [], + }); + eventsMetaMock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/events-meta/', body: {count: 33323612}, @@ -1479,4 +1485,35 @@ describe('Modals -> WidgetViewerModal', function () { }); }); }); + + describe('Span Widgets', function () { + beforeEach(function () { + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events/', + body: {}, + }); + MockApiClient.addMockResponse({ + url: '/organizations/org-slug/events-stats/', + body: {}, + }); + }); + + it('renders the Open in Explore button', async function () { + const mockWidget = WidgetFixture({ + widgetType: WidgetType.SPANS, + queries: [ + { + fields: ['span.description', 'avg(span.duration)'], + aggregates: ['avg(span.duration)'], + columns: ['span.description'], + conditions: '', + orderby: '', + name: '', + }, + ], + }); + await renderModal({initialData, widget: mockWidget}); + expect(await screen.findByText('Open in Explore')).toBeInTheDocument(); + }); + }); }); diff --git a/static/app/components/modals/widgetViewerModal.tsx b/static/app/components/modals/widgetViewerModal.tsx index bcb3e4f7064f54..f343822c3a8797 100644 --- a/static/app/components/modals/widgetViewerModal.tsx +++ b/static/app/components/modals/widgetViewerModal.tsx @@ -31,6 +31,7 @@ import {space} from 'sentry/styles/space'; import type {PageFilters, SelectValue} from 'sentry/types/core'; import type {Series} from 'sentry/types/echarts'; import type {Organization} from 'sentry/types/organization'; +import type {User} from 'sentry/types/user'; import {defined} from 'sentry/utils'; import {trackAnalytics} from 'sentry/utils/analytics'; import {getUtcDateString} from 'sentry/utils/dates'; @@ -54,9 +55,16 @@ import useApi from 'sentry/utils/useApi'; import {useLocation} from 'sentry/utils/useLocation'; import {useNavigate} from 'sentry/utils/useNavigate'; import useProjects from 'sentry/utils/useProjects'; +import {useUser} from 'sentry/utils/useUser'; +import {useUserTeams} from 'sentry/utils/useUserTeams'; import withPageFilters from 'sentry/utils/withPageFilters'; +import {checkUserHasEditAccess} from 'sentry/views/dashboards/detail'; import {DiscoverSplitAlert} from 'sentry/views/dashboards/discoverSplitAlert'; -import type {DashboardFilters, Widget} from 'sentry/views/dashboards/types'; +import type { + DashboardFilters, + DashboardPermissions, + Widget, +} from 'sentry/views/dashboards/types'; import {DisplayType, WidgetType} from 'sentry/views/dashboards/types'; import { dashboardFiltersToString, @@ -71,6 +79,7 @@ import { isUsingPerformanceScore, performanceScoreTooltip, } from 'sentry/views/dashboards/utils'; +import {getWidgetExploreUrl} from 'sentry/views/dashboards/utils/getWidgetExploreUrl'; import { SESSION_DURATION_ALERT, WidgetDescription, @@ -103,7 +112,9 @@ export interface WidgetViewerModalOptions { organization: Organization; widget: Widget; widgetLegendState: WidgetLegendSelectionState; + dashboardCreator?: User; dashboardFilters?: DashboardFilters; + dashboardPermissions?: DashboardPermissions; onEdit?: () => void; onMetricWidgetEdit?: (widget: Widget) => void; pageLinks?: string; @@ -193,6 +204,8 @@ function WidgetViewerModal(props: Props) { seriesResultsType, dashboardFilters, widgetLegendState, + dashboardPermissions, + dashboardCreator, } = props; const location = useLocation(); const {projects} = useProjects(); @@ -842,6 +855,18 @@ function WidgetViewerModal(props: Props) { } } + const currentUser = useUser(); + const {teams: userTeams} = useUserTeams(); + let hasEditAccess = true; + if (organization.features.includes('dashboards-edit-access')) { + hasEditAccess = checkUserHasEditAccess( + currentUser, + userTeams, + organization, + dashboardPermissions, + dashboardCreator + ); + } function renderWidgetViewer() { return ( <Fragment> @@ -1058,6 +1083,11 @@ function WidgetViewerModal(props: Props) { display_type: widget.displayType, }); }} + disabled={!hasEditAccess} + title={ + !hasEditAccess && + t('You do not have permission to edit this widget') + } > {t('Edit Widget')} </Button> @@ -1123,6 +1153,10 @@ function OpenButton({ openLabel = t('Open in Metrics'); path = getWidgetMetricsUrl(widget, selection, organization); break; + case WidgetType.SPANS: + openLabel = t('Open in Explore'); + path = getWidgetExploreUrl(widget, selection, organization); + break; case WidgetType.DISCOVER: default: openLabel = t('Open in Discover'); diff --git a/static/app/components/nav/config.tsx b/static/app/components/nav/config.tsx index b0b058936d6881..0c128bff586ea0 100644 --- a/static/app/components/nav/config.tsx +++ b/static/app/components/nav/config.tsx @@ -1,5 +1,5 @@ import {openHelpSearchModal} from 'sentry/actionCreators/modal'; -import type {NavConfig, NavSidebarItem} from 'sentry/components/nav/utils'; +import type {NavConfig} from 'sentry/components/nav/utils'; import { IconDashboard, IconGraph, @@ -15,26 +15,23 @@ import {t} from 'sentry/locale'; import ConfigStore from 'sentry/stores/configStore'; import type {Organization} from 'sentry/types/organization'; import {getDiscoverLandingUrl} from 'sentry/utils/discover/urls'; -import {MODULE_BASE_URLS} from 'sentry/views/insights/common/utils/useModuleURL'; -import {MODULE_SIDEBAR_TITLE as MODULE_TITLE_HTTP} from 'sentry/views/insights/http/settings'; import { AI_LANDING_SUB_PATH, - AI_LANDING_TITLE, + AI_SIDEBAR_LABEL, } from 'sentry/views/insights/pages/ai/settings'; import { BACKEND_LANDING_SUB_PATH, - BACKEND_LANDING_TITLE, + BACKEND_SIDEBAR_LABEL, } from 'sentry/views/insights/pages/backend/settings'; import { FRONTEND_LANDING_SUB_PATH, - FRONTEND_LANDING_TITLE, + FRONTEND_SIDEBAR_LABEL, } from 'sentry/views/insights/pages/frontend/settings'; import { MOBILE_LANDING_SUB_PATH, - MOBILE_LANDING_TITLE, + MOBILE_SIDEBAR_LABEL, } from 'sentry/views/insights/pages/mobile/settings'; import {DOMAIN_VIEW_BASE_URL} from 'sentry/views/insights/pages/settings'; -import {INSIGHTS_BASE_URL, MODULE_TITLES} from 'sentry/views/insights/settings'; import {getSearchForIssueGroup, IssueGroup} from 'sentry/views/issueList/utils'; /** @@ -45,91 +42,13 @@ import {getSearchForIssueGroup, IssueGroup} from 'sentry/views/issueList/utils'; */ export function createNavConfig({organization}: {organization: Organization}): NavConfig { const prefix = `organizations/${organization.slug}`; - const insightsPrefix = `${prefix}/${INSIGHTS_BASE_URL}`; - const hasPerfDomainViews = organization.features.includes('insights-domain-view'); - - const insights: NavSidebarItem = { - label: t('Insights'), - icon: <IconGraph />, - feature: {features: 'insights-entry-points'}, - submenu: [ - { - label: MODULE_TITLE_HTTP, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.http}/`, - }, - {label: MODULE_TITLES.db, to: `/${insightsPrefix}/${MODULE_BASE_URLS.db}/`}, - { - label: MODULE_TITLES.resource, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.resource}/`, - }, - { - label: MODULE_TITLES.app_start, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.app_start}/`, - }, - { - label: MODULE_TITLES['mobile-screens'], - to: `/${insightsPrefix}/${MODULE_BASE_URLS['mobile-screens']}/`, - feature: {features: 'insights-mobile-screens-module'}, - }, - { - label: MODULE_TITLES.vital, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.vital}/`, - }, - { - label: MODULE_TITLES.cache, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.cache}/`, - }, - { - label: MODULE_TITLES.queue, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.queue}/`, - }, - { - label: MODULE_TITLES.ai, - to: `/${insightsPrefix}/${MODULE_BASE_URLS.ai}/`, - feature: {features: 'insights-entry-points'}, - }, - ], - }; - - const perf: NavSidebarItem = { - label: t('Perf.'), - to: '/performance/', - icon: <IconLightning />, - feature: { - features: 'performance-view', - hookName: 'feature-disabled:performance-sidebar-item', - }, - }; - - const perfDomainViews: NavSidebarItem = { - label: t('Perf.'), - icon: <IconLightning />, - feature: {features: ['insights-domain-view', 'performance-view']}, - submenu: [ - { - label: FRONTEND_LANDING_TITLE, - to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${FRONTEND_LANDING_SUB_PATH}/`, - }, - { - label: BACKEND_LANDING_TITLE, - to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${BACKEND_LANDING_SUB_PATH}/`, - }, - { - label: AI_LANDING_TITLE, - to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${AI_LANDING_SUB_PATH}/`, - }, - { - label: MOBILE_LANDING_TITLE, - to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${MOBILE_LANDING_SUB_PATH}/`, - }, - ], - }; return { main: [ { label: t('Issues'), icon: <IconIssues />, + analyticsKey: 'issues', submenu: [ { label: t('All'), @@ -154,10 +73,16 @@ export function createNavConfig({organization}: {organization: Organization}): N {label: t('Feedback'), to: `/${prefix}/feedback/`}, ], }, - {label: t('Projects'), to: `/${prefix}/projects/`, icon: <IconProject />}, + { + label: t('Projects'), + analyticsKey: 'projects', + to: `/${prefix}/projects/`, + icon: <IconProject />, + }, { label: t('Explore'), icon: <IconSearch />, + analyticsKey: 'explore', submenu: [ { label: t('Traces'), @@ -198,9 +123,43 @@ export function createNavConfig({organization}: {organization: Organization}): N {label: t('Crons'), to: `/${prefix}/crons/`}, ], }, - ...(hasPerfDomainViews ? [perfDomainViews, perf] : [insights, perf]), + { + label: t('Insights'), + icon: <IconGraph />, + analyticsKey: 'insights-domains', + feature: {features: ['performance-view']}, + submenu: [ + { + label: FRONTEND_SIDEBAR_LABEL, + to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${FRONTEND_LANDING_SUB_PATH}/`, + }, + { + label: BACKEND_SIDEBAR_LABEL, + to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${BACKEND_LANDING_SUB_PATH}/`, + }, + { + label: MOBILE_SIDEBAR_LABEL, + to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${MOBILE_LANDING_SUB_PATH}/`, + }, + { + label: AI_SIDEBAR_LABEL, + to: `/${prefix}/${DOMAIN_VIEW_BASE_URL}/${AI_LANDING_SUB_PATH}/`, + }, + ], + }, + { + label: t('Perf.'), + to: '/performance/', + analyticsKey: 'performance', + icon: <IconLightning />, + feature: { + features: 'performance-view', + hookName: 'feature-disabled:performance-sidebar-item', + }, + }, { label: t('Boards'), + analyticsKey: 'customizable-dashboards', to: '/dashboards/', icon: <IconDashboard />, feature: { @@ -209,12 +168,18 @@ export function createNavConfig({organization}: {organization: Organization}): N requireAll: false, }, }, - {label: t('Alerts'), to: `/${prefix}/alerts/rules/`, icon: <IconSiren />}, + { + label: t('Alerts'), + analyticsKey: 'alerts', + to: `/${prefix}/alerts/rules/`, + icon: <IconSiren />, + }, ], footer: [ { label: t('Help'), icon: <IconQuestion />, + analyticsKey: 'help', dropdown: [ { key: 'search', @@ -242,6 +207,7 @@ export function createNavConfig({organization}: {organization: Organization}): N }, { label: t('Settings'), + analyticsKey: 'settings', to: `/settings/${organization.slug}/`, icon: <IconSettings />, }, diff --git a/static/app/components/nav/index.spec.tsx b/static/app/components/nav/index.spec.tsx index 0ca7e1efb43bde..2572f9e5d50cbb 100644 --- a/static/app/components/nav/index.spec.tsx +++ b/static/app/components/nav/index.spec.tsx @@ -2,7 +2,13 @@ import {LocationFixture} from 'sentry-fixture/locationFixture'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {RouterFixture} from 'sentry-fixture/routerFixture'; -import {getAllByRole, render, screen} from 'sentry-test/reactTestingLibrary'; +import {trackAnalytics} from 'sentry/utils/analytics'; + +jest.mock('sentry/utils/analytics', () => ({ + trackAnalytics: jest.fn(), +})); + +import {getAllByRole, render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; import Nav from 'sentry/components/nav'; @@ -101,7 +107,9 @@ describe('Nav', function () { beforeEach(() => { render(<Nav />, { router: RouterFixture({ - location: LocationFixture({pathname: '/organizations/org-slug/insights/http/'}), + location: LocationFixture({ + pathname: '/organizations/org-slug/insights/backend/', + }), }), organization: OrganizationFixture({features: ALL_AVAILABLE_FEATURES}), }); @@ -116,17 +124,8 @@ describe('Nav', function () { it('includes expected submenu items', function () { const container = screen.getByRole('navigation', {name: 'Secondary Navigation'}); const links = getAllByRole(container, 'link'); - expect(links).toHaveLength(8); - [ - 'Requests', - 'Queries', - 'Assets', - 'App Starts', - 'Web Vitals', - 'Caches', - 'Queues', - 'LLM Monitoring', - ].forEach((title, index) => { + expect(links).toHaveLength(4); + ['Frontend', 'Backend', 'Mobile', 'AI'].forEach((title, index) => { expect(links[index]).toHaveAccessibleName(title); }); }); @@ -165,4 +164,26 @@ describe('Nav', function () { }); }); }); + + describe('analytics', function () { + beforeEach(() => { + render(<Nav />, { + router: RouterFixture({ + location: LocationFixture({pathname: '/organizations/org-slug/traces/'}), + }), + organization: OrganizationFixture({features: ALL_AVAILABLE_FEATURES}), + }); + }); + + it('tracks primary sidebar item', async function () { + const issues = screen.getByRole('link', {name: 'Issues'}); + await userEvent.click(issues); + expect(trackAnalytics).toHaveBeenCalledWith( + 'growth.clicked_sidebar', + expect.objectContaining({ + item: 'issues', + }) + ); + }); + }); }); diff --git a/static/app/components/nav/sidebar.tsx b/static/app/components/nav/sidebar.tsx index f2f47ec04d4877..2911ca134206ce 100644 --- a/static/app/components/nav/sidebar.tsx +++ b/static/app/components/nav/sidebar.tsx @@ -1,4 +1,5 @@ -import {Fragment} from 'react'; +import type {MouseEventHandler} from 'react'; +import {Fragment, useCallback} from 'react'; import styled from '@emotion/styled'; import Feature from 'sentry/components/acl/feature'; @@ -18,8 +19,10 @@ import { } from 'sentry/components/nav/utils'; import SidebarDropdown from 'sentry/components/sidebar/sidebarDropdown'; import {space} from 'sentry/styles/space'; +import {trackAnalytics} from 'sentry/utils/analytics'; import theme from 'sentry/utils/theme'; import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; function Sidebar() { return ( @@ -93,19 +96,28 @@ const SidebarItemList = styled('ul')` interface SidebarItemProps { item: NavSidebarItem; + children?: React.ReactNode; + onClick?: MouseEventHandler<HTMLElement>; } function SidebarItem({item}: SidebarItemProps) { const to = resolveNavItemTo(item); const SidebarChild = to ? SidebarLink : SidebarMenu; + const organization = useOrganization(); const FeatureGuard = item.feature ? Feature : Fragment; const featureGuardProps: any = item.feature ?? {}; + const recordAnalytics = useCallback( + () => + trackAnalytics('growth.clicked_sidebar', {item: item.analyticsKey, organization}), + [organization, item.analyticsKey] + ); + return ( <FeatureGuard {...featureGuardProps}> <SidebarItemWrapper> - <SidebarChild item={item} key={item.label}> + <SidebarChild item={item} key={item.label} onClick={recordAnalytics}> {item.icon} <span>{item.label}</span> </SidebarChild> @@ -127,7 +139,7 @@ const NavButton = styled('button')` ${linkStyles} `; -function SidebarLink({children, item}: SidebarItemProps & {children: React.ReactNode}) { +function SidebarLink({children, item, onClick}: SidebarItemProps) { const location = useLocation(); const isActive = isNavItemActive(item, location); const isSubmenuActive = isSubmenuItemActive(item, location); @@ -142,6 +154,7 @@ function SidebarLink({children, item}: SidebarItemProps & {children: React.React return ( <NavLink {...linkProps} + onClick={onClick} className={isActive || isSubmenuActive ? 'active' : undefined} aria-current={isActive ? 'page' : undefined} > @@ -151,7 +164,7 @@ function SidebarLink({children, item}: SidebarItemProps & {children: React.React ); } -function SidebarMenu({item, children}: SidebarItemProps & {children: React.ReactNode}) { +function SidebarMenu({item, children, onClick}: SidebarItemProps) { if (!item.dropdown) { throw new Error( `Nav item "${item.label}" must have either a \`dropdown\` or \`to\` value!` @@ -162,7 +175,13 @@ function SidebarMenu({item, children}: SidebarItemProps & {children: React.React position="right-end" trigger={(props, isOpen) => { return ( - <NavButton {...props}> + <NavButton + {...props} + onClick={event => { + onClick?.(event); + props.onClick?.(event); + }} + > <InteractionStateLayer hasSelectedBackground={isOpen} /> {children} </NavButton> diff --git a/static/app/components/nav/utils.tsx b/static/app/components/nav/utils.tsx index 98193be8222d9f..498162e743828d 100644 --- a/static/app/components/nav/utils.tsx +++ b/static/app/components/nav/utils.tsx @@ -33,6 +33,10 @@ export interface NavItemLayout<Item extends NavSidebarItem | NavSubmenuItem> { * SidebarItem is a top-level NavItem which is always displayed in the app sidebar */ export interface NavSidebarItem extends NavItem { + /** + * A unique identifier string, used as a key for analytics + */ + analyticsKey: string; /** * The icon to render in the sidebar */ diff --git a/static/app/components/onboarding/gettingStartedDoc/types.ts b/static/app/components/onboarding/gettingStartedDoc/types.ts index 81f16159610cad..54e05444dd473f 100644 --- a/static/app/components/onboarding/gettingStartedDoc/types.ts +++ b/static/app/components/onboarding/gettingStartedDoc/types.ts @@ -62,6 +62,9 @@ export interface DocsParams< * The page where the docs are being displayed */ docsLocation?: DocsPageLocation; + featureFlagOptions?: { + integration: string; + }; feedbackOptions?: { email?: boolean; name?: boolean; @@ -106,6 +109,7 @@ export interface Docs<PlatformOptions extends BasePlatformOptions = BasePlatform onboarding: OnboardingConfig<PlatformOptions>; crashReportOnboarding?: OnboardingConfig<PlatformOptions>; customMetricsOnboarding?: OnboardingConfig<PlatformOptions>; + featureFlagOnboarding?: OnboardingConfig<PlatformOptions>; feedbackOnboardingCrashApi?: OnboardingConfig<PlatformOptions>; feedbackOnboardingNpm?: OnboardingConfig<PlatformOptions>; performanceOnboarding?: OnboardingConfig<PlatformOptions>; @@ -122,4 +126,5 @@ export type ConfigType = | 'crashReportOnboarding' | 'replayOnboarding' | 'replayOnboardingJsLoader' - | 'customMetricsOnboarding'; + | 'customMetricsOnboarding' + | 'featureFlagOnboarding'; diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/useLoadGettingStarted.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/useLoadGettingStarted.tsx index 74d29988835926..9c66df925109cf 100644 --- a/static/app/components/onboarding/gettingStartedDoc/utils/useLoadGettingStarted.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/utils/useLoadGettingStarted.tsx @@ -3,6 +3,7 @@ import * as Sentry from '@sentry/react'; import type {Docs} from 'sentry/components/onboarding/gettingStartedDoc/types'; import { + featureFlagOnboardingPlatforms, feedbackOnboardingPlatforms, replayPlatforms, withPerformanceOnboarding, @@ -15,7 +16,7 @@ import {useProjectKeys} from 'sentry/utils/useProjectKeys'; type Props = { orgSlug: Organization['slug']; platform: PlatformIntegration; - productType?: 'feedback' | 'replay' | 'performance'; + productType?: 'feedback' | 'replay' | 'performance' | 'featureFlags'; projSlug?: Project['slug']; }; @@ -45,7 +46,10 @@ export function useLoadGettingStarted({ !platformPath || (productType === 'replay' && !replayPlatforms.includes(platform.id)) || (productType === 'performance' && !withPerformanceOnboarding.has(platform.id)) || - (productType === 'feedback' && !feedbackOnboardingPlatforms.includes(platform.id)) + (productType === 'feedback' && + !feedbackOnboardingPlatforms.includes(platform.id)) || + (productType === 'featureFlags' && + !featureFlagOnboardingPlatforms.includes(platform.id)) ) { setModule('none'); return; diff --git a/static/app/components/onboarding/productSelection.tsx b/static/app/components/onboarding/productSelection.tsx index d8d1798c5d04ce..ad91ebd536de2a 100644 --- a/static/app/components/onboarding/productSelection.tsx +++ b/static/app/components/onboarding/productSelection.tsx @@ -82,7 +82,7 @@ function getDisabledProducts(organization: Organization): DisabledProducts { // Since the ProductSelection component is rendered in the onboarding/project creation flow only, it is ok to have this list here // NOTE: Please keep the prefix in alphabetical order export const platformProductAvailability = { - android: [ProductSolution.PERFORMANCE_MONITORING, ProductSolution.PROFILING], + android: [ProductSolution.PERFORMANCE_MONITORING], 'apple-ios': [ProductSolution.PERFORMANCE_MONITORING, ProductSolution.PROFILING], 'apple-macos': [ProductSolution.PERFORMANCE_MONITORING, ProductSolution.PROFILING], bun: [ProductSolution.PERFORMANCE_MONITORING], @@ -119,10 +119,6 @@ export const platformProductAvailability = { ProductSolution.PERFORMANCE_MONITORING, ProductSolution.SESSION_REPLAY, ], - 'javascript-nuxt': [ - ProductSolution.PERFORMANCE_MONITORING, - ProductSolution.SESSION_REPLAY, - ], 'javascript-angular': [ ProductSolution.PERFORMANCE_MONITORING, ProductSolution.SESSION_REPLAY, diff --git a/static/app/components/onboardingWizard/newSidebar.tsx b/static/app/components/onboardingWizard/newSidebar.tsx index 2c9504b2f5fdf6..72fa11a44e48d9 100644 --- a/static/app/components/onboardingWizard/newSidebar.tsx +++ b/static/app/components/onboardingWizard/newSidebar.tsx @@ -13,7 +13,7 @@ import {Chevron} from 'sentry/components/chevron'; import InteractionStateLayer from 'sentry/components/interactionStateLayer'; import SkipConfirm from 'sentry/components/onboardingWizard/skipConfirm'; import type {useOnboardingTasks} from 'sentry/components/onboardingWizard/useOnboardingTasks'; -import {findCompleteTasks, taskIsDone} from 'sentry/components/onboardingWizard/utils'; +import {taskIsDone} from 'sentry/components/onboardingWizard/utils'; import ProgressRing from 'sentry/components/progressRing'; import SidebarPanel from 'sentry/components/sidebar/sidebarPanel'; import type {CommonSidebarProps} from 'sentry/components/sidebar/types'; @@ -52,9 +52,7 @@ const orderedBeyondBasicsTasks = [ ]; function groupTasksByCompletion(tasks: OnboardingTask[]) { - const [completedTasks, incompletedTasks] = partition(tasks, task => - findCompleteTasks(task) - ); + const [completedTasks, incompletedTasks] = partition(tasks, task => taskIsDone(task)); return { completedTasks, incompletedTasks, diff --git a/static/app/components/onboardingWizard/skipConfirm.tsx b/static/app/components/onboardingWizard/skipConfirm.tsx index aafe6287ba87de..4820c9fe8c4b5a 100644 --- a/static/app/components/onboardingWizard/skipConfirm.tsx +++ b/static/app/components/onboardingWizard/skipConfirm.tsx @@ -87,7 +87,7 @@ const Confirmation = styled(({onDismiss, onSkip, visible: _, ...props}: ConfirmP align-items: center; flex-direction: column; justify-content: center; - background: rgba(255, 255, 255, 0.9); + background: ${p => p.theme.surface200}; animation: ${fadeIn} 200ms normal forwards; font-size: ${p => p.theme.fontSizeMedium}; diff --git a/static/app/components/onboardingWizard/taskConfig.tsx b/static/app/components/onboardingWizard/taskConfig.tsx index 8d746f7a6654bf..74e69ba9329289 100644 --- a/static/app/components/onboardingWizard/taskConfig.tsx +++ b/static/app/components/onboardingWizard/taskConfig.tsx @@ -313,7 +313,7 @@ export function getOnboardingTasks({ display: true, pendingTitle: t('Awaiting an error for this project.'), SupplementComponent: ({task}: OnboardingSupplementComponentProps) => { - if (!hasQuickStartUpdatesFeature(organization)) { + if (hasQuickStartUpdatesFeature(organization)) { return null; } if (!projects?.length || task.requisiteTasks.length > 0 || taskIsDone(task)) { diff --git a/static/app/components/organizations/projectPageFilter/index.spec.tsx b/static/app/components/organizations/projectPageFilter/index.spec.tsx index dcd45ef9612bd2..4a116187aae2d6 100644 --- a/static/app/components/organizations/projectPageFilter/index.spec.tsx +++ b/static/app/components/organizations/projectPageFilter/index.spec.tsx @@ -147,14 +147,12 @@ describe('ProjectPageFilter', function () { // Move focus to "Bookmark Project" button await userEvent.keyboard('{ArrowRight}'); - expect( - within(optionOne).getByRole('button', {name: 'Bookmark Project'}) - ).toHaveFocus(); + expect(within(optionOne).getByRole('button', {name: 'Bookmark'})).toHaveFocus(); // Activate the button await userEvent.keyboard('{Enter}'); expect( - within(optionOne).getByRole('button', {name: 'Bookmark Project'}) + within(optionOne).getByRole('button', {name: 'Remove Bookmark'}) ).toHaveAttribute('aria-pressed', 'true'); expect(mockApi).toHaveBeenCalledWith( `/projects/${organization.slug}/project-1/`, diff --git a/static/app/components/pageHeadingQuestionTooltip.tsx b/static/app/components/pageHeadingQuestionTooltip.tsx index 2cedc04b153c64..e2b6e056779837 100644 --- a/static/app/components/pageHeadingQuestionTooltip.tsx +++ b/static/app/components/pageHeadingQuestionTooltip.tsx @@ -13,17 +13,22 @@ interface PageHeadingQuestionTooltipProps extends TooltipProps { * The link to the documentation for this page. */ docsUrl: string; + /** + * The label to use for the external link. + */ + linkLabel?: React.ReactNode; } export function PageHeadingQuestionTooltip({ docsUrl, title, + linkLabel, ...props }: PageHeadingQuestionTooltipProps) { const contents = ( <Container> {title} - <ExternalLink href={docsUrl}>{t('Read the Docs')}</ExternalLink> + <ExternalLink href={docsUrl}>{linkLabel ?? t('Read the Docs')}</ExternalLink> </Container> ); diff --git a/static/app/components/passwordStrength.tsx b/static/app/components/passwordStrength.tsx index ad88fd838be76d..627c310a191424 100644 --- a/static/app/components/passwordStrength.tsx +++ b/static/app/components/passwordStrength.tsx @@ -1,19 +1,12 @@ import {Fragment} from 'react'; -import {render} from 'react-dom'; import {css} from '@emotion/react'; import styled from '@emotion/styled'; -import throttle from 'lodash/throttle'; import zxcvbn from 'zxcvbn'; import {tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import theme from 'sentry/utils/theme'; -/** - * NOTE: Do not import this component synchronously. The zxcvbn library is - * relatively large. This component should be loaded async as a split chunk. - */ - /** * The maximum score that zxcvbn reports */ @@ -34,7 +27,11 @@ type Props = { labels?: [string, string, string, string, string]; }; -function PasswordStrength({ +/** + * NOTE: Do not import this component synchronously. The zxcvbn library is + * relatively large. This component should be loaded async as a split chunk. + */ +export function PasswordStrength({ value, labels = ['Very Weak', 'Very Weak', 'Weak', 'Strong', 'Very Strong'], colors = [theme.red300, theme.red300, theme.yellow300, theme.green300, theme.green300], @@ -96,20 +93,3 @@ const StrengthLabel = styled('div')` const ScoreText = styled('strong')` color: ${p => p.theme.black}; `; - -export default PasswordStrength; - -/** - * This is a shim that allows the password strength component to be used - * outside of our main react application. Mostly useful since all of our - * registration pages aren't in the react app. - */ -export const attachTo = ({input, element}) => - element && - input && - input.addEventListener( - 'input', - throttle(e => { - render(<PasswordStrength value={e.target.value} />, element); - }) - ); diff --git a/static/app/components/performance/spanSearchQueryBuilder.tsx b/static/app/components/performance/spanSearchQueryBuilder.tsx index 887c64a9580817..8bef5f125bbad9 100644 --- a/static/app/components/performance/spanSearchQueryBuilder.tsx +++ b/static/app/components/performance/spanSearchQueryBuilder.tsx @@ -30,6 +30,7 @@ interface SpanSearchQueryBuilderProps { searchSource: string; datetime?: PageFilters['datetime']; disableLoadingTags?: boolean; + onBlur?: (query: string, state: CallbackSearchState) => void; onSearch?: (query: string, state: CallbackSearchState) => void; placeholder?: string; projects?: PageFilters['projects']; @@ -50,15 +51,18 @@ const getFunctionTags = (supportedAggregates?: AggregationKey[]) => { }, {}); }; -const getSpanFieldDefinition = (key: string, kind?: FieldKind) => { - return getFieldDefinition(key, 'span', kind); -}; +function getSpanFieldDefinitionFunction(tags: TagCollection) { + return (key: string) => { + return getFieldDefinition(key, 'span', tags[key]?.kind); + }; +} export function SpanSearchQueryBuilder({ initialQuery, searchSource, datetime, onSearch, + onBlur, placeholder, projects, }: SpanSearchQueryBuilderProps) { @@ -133,8 +137,9 @@ export function SpanSearchQueryBuilder({ placeholder={placeholderText} filterKeys={filterTags} initialQuery={initialQuery} - fieldDefinitionGetter={getSpanFieldDefinition} + fieldDefinitionGetter={getSpanFieldDefinitionFunction(filterTags)} onSearch={onSearch} + onBlur={onBlur} searchSource={searchSource} filterKeySections={filterKeySections} getTagValues={getSpanFilterTagValues} @@ -148,6 +153,7 @@ export function SpanSearchQueryBuilder({ interface EAPSpanSearchQueryBuilderProps extends SpanSearchQueryBuilderProps { numberTags: TagCollection; stringTags: TagCollection; + getFilterTokenWarning?: (key: string) => React.ReactNode; supportedAggregates?: AggregationKey[]; } @@ -155,9 +161,11 @@ export function EAPSpanSearchQueryBuilder({ initialQuery, placeholder, onSearch, + onBlur, searchSource, numberTags, stringTags, + getFilterTokenWarning, supportedAggregates = [], }: EAPSpanSearchQueryBuilderProps) { const api = useApi(); @@ -179,7 +187,12 @@ export function EAPSpanSearchQueryBuilder({ SPANS_FILTER_KEY_SECTIONS.flatMap(section => section.children) ); return [ - ...SPANS_FILTER_KEY_SECTIONS, + ...SPANS_FILTER_KEY_SECTIONS.map(section => { + return { + ...section, + children: section.children.filter(key => stringTags.hasOwnProperty(key)), + }; + }), { value: 'custom_fields', label: 'Custom Tags', @@ -219,8 +232,10 @@ export function EAPSpanSearchQueryBuilder({ placeholder={placeholderText} filterKeys={tags} initialQuery={initialQuery} - fieldDefinitionGetter={getSpanFieldDefinition} + fieldDefinitionGetter={getSpanFieldDefinitionFunction(tags)} onSearch={onSearch} + onBlur={onBlur} + getFilterTokenWarning={getFilterTokenWarning} searchSource={searchSource} filterKeySections={filterKeySections} getTagValues={getSpanFilterTagValues} diff --git a/static/app/components/performance/waterfall/constants.tsx b/static/app/components/performance/waterfall/constants.tsx index 0f06ed639aea6f..adb530ee222fc5 100644 --- a/static/app/components/performance/waterfall/constants.tsx +++ b/static/app/components/performance/waterfall/constants.tsx @@ -10,35 +10,35 @@ export enum SpanBarType { AUTOGROUPED_AND_AFFECTED = 'autogrouped_and_affected', } -type SpanBarColours = { +type SpanBarColors = { alternate: string; - insetTextColour: string; + insetTextColor: string; primary: string; }; -// TODO: Need to eventually add dark mode colours as well -export function getSpanBarColours( +// TODO: Need to eventually add dark mode colors as well +export function getSpanBarColors( spanBarType: SpanBarType | undefined, theme: Theme -): SpanBarColours { +): SpanBarColors { switch (spanBarType) { case SpanBarType.GAP: - return {primary: '#dedae3', alternate: '#f4f2f7', insetTextColour: theme.gray300}; + return {primary: '#dedae3', alternate: '#f4f2f7', insetTextColor: theme.gray300}; case SpanBarType.AFFECTED: - return {primary: '#f55459', alternate: '#faa9ac', insetTextColour: theme.white}; + return {primary: '#f55459', alternate: '#faa9ac', insetTextColor: theme.white}; case SpanBarType.AUTOGROUPED: return { primary: theme.blue300, alternate: '#d1dff9', - insetTextColour: theme.gray300, + insetTextColor: theme.gray300, }; case SpanBarType.AUTOGROUPED_AND_AFFECTED: return { primary: '#f55459', alternate: '#faa9ac', - insetTextColour: theme.white, + insetTextColor: theme.white, }; default: - return {primary: '', alternate: '', insetTextColour: theme.white}; + return {primary: '', alternate: '', insetTextColor: theme.white}; } } diff --git a/static/app/components/performance/waterfall/rowBar.tsx b/static/app/components/performance/waterfall/rowBar.tsx index 9ad547f4c459f5..3c61b935478bdc 100644 --- a/static/app/components/performance/waterfall/rowBar.tsx +++ b/static/app/components/performance/waterfall/rowBar.tsx @@ -5,7 +5,7 @@ import {ROW_HEIGHT, ROW_PADDING} from 'sentry/components/performance/waterfall/c import type {DurationDisplay} from 'sentry/components/performance/waterfall/types'; import { getDurationPillAlignment, - getDurationPillColours, + getDurationPillColors, getHatchPattern, } from 'sentry/components/performance/waterfall/utils'; import {space} from 'sentry/styles/space'; @@ -42,7 +42,7 @@ export const DurationPill = styled('div')<{ line-height: 1; ${getDurationPillAlignment} - ${getDurationPillColours} + ${getDurationPillColors} @media (max-width: ${p => p.theme.breakpoints.medium}) { font-size: 10px; diff --git a/static/app/components/performance/waterfall/utils.tsx b/static/app/components/performance/waterfall/utils.tsx index 114736039780f3..63b7fed34ff53c 100644 --- a/static/app/components/performance/waterfall/utils.tsx +++ b/static/app/components/performance/waterfall/utils.tsx @@ -6,7 +6,7 @@ import {CHART_PALETTE} from 'sentry/constants/chartPalette'; import {space} from 'sentry/styles/space'; import type {SpanBarType} from './constants'; -import {getSpanBarColours} from './constants'; +import {getSpanBarColors} from './constants'; export const getBackgroundColor = ({ showStriping, @@ -30,7 +30,7 @@ export const getBackgroundColor = ({ export function getHatchPattern(spanBarType: SpanBarType | undefined, theme: Theme) { if (spanBarType) { - const {primary, alternate} = getSpanBarColours(spanBarType, theme); + const {primary, alternate} = getSpanBarColors(spanBarType, theme); return css` background-image: linear-gradient( @@ -81,7 +81,7 @@ export const getDurationPillAlignment = ({ } }; -export const getDurationPillColours = ({ +export const getDurationPillColors = ({ durationDisplay, theme, showDetail, @@ -93,8 +93,8 @@ export const getDurationPillColours = ({ spanBarType?: SpanBarType; }) => { if (durationDisplay === 'inset') { - const {alternate, insetTextColour} = getSpanBarColours(spanBarType, theme); - return `background: ${alternate}; color: ${insetTextColour};`; + const {alternate, insetTextColor} = getSpanBarColors(spanBarType, theme); + return `background: ${alternate}; color: ${insetTextColor};`; } return `color: ${showDetail ? theme.gray200 : theme.gray300};`; @@ -116,7 +116,7 @@ export const getToggleTheme = ({ spanBarType?: SpanBarType; }) => { if (spanBarType) { - const {primary} = getSpanBarColours(spanBarType, theme); + const {primary} = getSpanBarColors(spanBarType, theme); return css` background: ${primary}; border: 2px solid ${theme.button.default.border}; diff --git a/static/app/components/platformPicker.spec.tsx b/static/app/components/platformPicker.spec.tsx index 2f79bbb0ba6ee6..a0a31b356c4904 100644 --- a/static/app/components/platformPicker.spec.tsx +++ b/static/app/components/platformPicker.spec.tsx @@ -88,6 +88,7 @@ describe('PlatformPicker', function () { 'Nest.js', 'Next.js', 'Node.js', + 'Nuxt', 'PHP', 'Python', 'Rails', diff --git a/static/app/components/profiling/boundTooltip.tsx b/static/app/components/profiling/boundTooltip.tsx index 34796ca64a42be..6a6cfc933af580 100644 --- a/static/app/components/profiling/boundTooltip.tsx +++ b/static/app/components/profiling/boundTooltip.tsx @@ -6,39 +6,74 @@ import {space} from 'sentry/styles/space'; import type {CanvasView} from 'sentry/utils/profiling/canvasView'; import {useFlamegraphTheme} from 'sentry/utils/profiling/flamegraph/useFlamegraphTheme'; import type {FlamegraphCanvas} from 'sentry/utils/profiling/flamegraphCanvas'; -import type {Rect} from 'sentry/utils/profiling/speedscope'; +import {Rect} from 'sentry/utils/profiling/speedscope'; import theme from 'sentry/utils/theme'; +// The cursor icon is drawn with an origin in the top left, which means that if we render +// a tooltip directly at the cursor's position, it will overlap with the cursor icon. +// x <- client x +// |----| <- cursor icon +// |-------------| <- tooltip +// +// This wont happen if we draw the tooltip to the left of the cursor, as the cursor icon will +// be drawn to right of the tooltip. The offset helps us correct this and remove the overlap. +// x <- client x offset +// |----| <- cursor icon +// |-------------| <- tooltip + +// We only need to do this when drawing the tooltip on the left side of the cursor, as +// the origin is in the correct position when drawing the tooltip on the right side. +const CURSOR_LEFT_OFFSET_PX = 6; +const CURSOR_TOP_OFFSET_PX = 4; +// Gap between the tooltip and container edge for each side +const WIDTH_OFFSET = 8; + function computeBestTooltipPlacement( cursor: vec2, - container: Rect, - tooltip: DOMRect + tooltip: DOMRect, + canvas: Rect, + container: Rect ): string { // This is because the cursor's origin is in the top left corner of the arrow, so we want // to offset it just enough so that the tooltip does not overlap with the arrow's tail. // When the tooltip placed to the left of the cursor, we do not have that issue and hence // no offset is applied. - const OFFSET_PX = 6; - let left = cursor[0] + OFFSET_PX; - const top = cursor[1] + OFFSET_PX; + const cursorLeft = cursor[0]; + const cursorTop = cursor[1]; + + // Cursor is relative to canvas, not container + const cursorRelativeToContainer = cursorLeft + canvas.x; + + let left = + cursorRelativeToContainer > container.width / 2 + ? cursorLeft - tooltip.width + : cursorLeft + CURSOR_LEFT_OFFSET_PX; + + const right = left + tooltip.width + canvas.left; - if (cursor[0] > container.width / 2) { - left = cursor[0] - tooltip.width; // No offset is applied here as tooltip is placed to the left + if (left + canvas.left - WIDTH_OFFSET <= 0) { + left = -canvas.left + WIDTH_OFFSET; + } else if (right >= container.width - WIDTH_OFFSET) { + left = container.width - tooltip.width - canvas.left - WIDTH_OFFSET; } - return `translate(${left || 0}px, ${top || 0}px)`; + return `translate(${left}px, ${cursorTop + CURSOR_TOP_OFFSET_PX}px)`; } interface BoundTooltipProps { - bounds: Rect; canvas: FlamegraphCanvas; + canvasBounds: Rect; canvasView: CanvasView<any>; cursor: vec2; children?: React.ReactNode; + containerBounds?: Rect; } +const DEFAULT_BOUNDS = Rect.Empty(); + function BoundTooltip({ - bounds, + containerBounds, + canvasBounds, canvas, cursor, canvasView, @@ -58,6 +93,21 @@ function BoundTooltip({ canvas.physicalToLogicalSpace ); + const containerBoundsRef = useRef<Rect>(containerBounds ?? DEFAULT_BOUNDS); + + if (containerBounds) { + containerBoundsRef.current = containerBounds; + } else if (containerBoundsRef.current.isEmpty()) { + const bodyRect = document.body.getBoundingClientRect(); + containerBoundsRef.current = new Rect( + bodyRect.x, + bodyRect.y, + bodyRect.width, + bodyRect.height + ); + } + + const sizeCache = useRef<{size: DOMRect; value: React.ReactNode} | null>(null); const rafIdRef = useRef<number | undefined>(); const onRef = useCallback( node => { @@ -71,14 +121,19 @@ function BoundTooltip({ } rafIdRef.current = window.requestAnimationFrame(() => { + if (!sizeCache.current || sizeCache.current?.value !== children) { + sizeCache.current = {value: children, size: node.getBoundingClientRect()}; + } + node.style.transform = computeBestTooltipPlacement( logicalSpaceCursor, - bounds, - node.getBoundingClientRect() + sizeCache.current.size, + canvasBounds, + containerBoundsRef.current ); }); }, - [bounds, logicalSpaceCursor] + [canvasBounds, logicalSpaceCursor, children] ); return ( @@ -89,7 +144,7 @@ function BoundTooltip({ fontSize: flamegraphTheme.SIZES.TOOLTIP_FONT_SIZE, fontFamily: flamegraphTheme.FONTS.FONT, zIndex: theme.zIndex.tooltip, - maxWidth: bounds.width, + maxWidth: containerBoundsRef.current.width - 2 * WIDTH_OFFSET, }} > {children} diff --git a/static/app/components/profiling/flamegraph/flamegraphChartTooltip.tsx b/static/app/components/profiling/flamegraph/flamegraphChartTooltip.tsx index b7a86cbc2036b1..02ac3aadbec950 100644 --- a/static/app/components/profiling/flamegraph/flamegraphChartTooltip.tsx +++ b/static/app/components/profiling/flamegraph/flamegraphChartTooltip.tsx @@ -45,9 +45,9 @@ export function FlamegraphChartTooltip({ return series.length > 0 ? ( <BoundTooltip - bounds={canvasBounds} cursor={configSpaceCursor} canvas={chartCanvas} + canvasBounds={canvasBounds} canvasView={chartView} > {series.map((p, i) => { diff --git a/static/app/components/profiling/flamegraph/flamegraphSpanTooltip.tsx b/static/app/components/profiling/flamegraph/flamegraphSpanTooltip.tsx index 2da641c8bdb0bd..b8e76f2fa640a8 100644 --- a/static/app/components/profiling/flamegraph/flamegraphSpanTooltip.tsx +++ b/static/app/components/profiling/flamegraph/flamegraphSpanTooltip.tsx @@ -53,9 +53,9 @@ export function FlamegraphSpanTooltip({ return ( <BoundTooltip - bounds={canvasBounds} cursor={configSpaceCursor} canvas={spansCanvas} + canvasBounds={canvasBounds} canvasView={spansView} > <FlamegraphTooltipFrameMainInfo> diff --git a/static/app/components/profiling/flamegraph/flamegraphTooltip.tsx b/static/app/components/profiling/flamegraph/flamegraphTooltip.tsx index d2f7d74eb5d346..cb9630fbac6045 100644 --- a/static/app/components/profiling/flamegraph/flamegraphTooltip.tsx +++ b/static/app/components/profiling/flamegraph/flamegraphTooltip.tsx @@ -106,9 +106,9 @@ function DifferentialFlamegraphTooltip(props: DifferentialFlamegraphTooltipProps return ( <BoundTooltip - bounds={props.canvasBounds} cursor={props.configSpaceCursor} canvas={props.flamegraphCanvas} + canvasBounds={props.canvasBounds} canvasView={props.flamegraphView} > <FlamegraphTooltipFrameMainInfo> @@ -140,7 +140,7 @@ interface AggregateFlamegraphTooltipProps extends FlamegraphTooltipProps { function AggregateFlamegraphTooltip(props: AggregateFlamegraphTooltipProps) { return ( <BoundTooltip - bounds={props.canvasBounds} + canvasBounds={props.canvasBounds} cursor={props.configSpaceCursor} canvas={props.flamegraphCanvas} canvasView={props.flamegraphView} @@ -173,7 +173,7 @@ interface FlamechartTooltipProps extends FlamegraphTooltipProps { function FlamechartTooltip(props: FlamechartTooltipProps) { return ( <BoundTooltip - bounds={props.canvasBounds} + canvasBounds={props.canvasBounds} cursor={props.configSpaceCursor} canvas={props.flamegraphCanvas} canvasView={props.flamegraphView} @@ -237,8 +237,9 @@ export const FlamegraphTooltipTimelineInfo = styled('div')` `; export const FlamegraphTooltipFrameMainInfo = styled('div')` - display: flex; - align-items: center; + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; `; export const FlamegraphTooltipColorIndicator = styled('div')<{ @@ -255,4 +256,5 @@ export const FlamegraphTooltipColorIndicator = styled('div')<{ background-size: 16px 16px; background-color: ${p => p.backgroundColor}; margin-right: ${space(1)}; + transform: translateY(2px); `; diff --git a/static/app/components/profiling/flamegraph/flamegraphUIFramesTooltip.tsx b/static/app/components/profiling/flamegraph/flamegraphUIFramesTooltip.tsx index 2d013f77effd51..9d316a7ce57a96 100644 --- a/static/app/components/profiling/flamegraph/flamegraphUIFramesTooltip.tsx +++ b/static/app/components/profiling/flamegraph/flamegraphUIFramesTooltip.tsx @@ -50,9 +50,9 @@ export function FlamegraphUIFramesTooltip({ return ( <BoundTooltip - bounds={canvasBounds} cursor={configSpaceCursor} canvas={uiFramesCanvas} + canvasBounds={canvasBounds} canvasView={uiFramesView} > {uiFramesInConfigSpace.map((frame, i) => { diff --git a/static/app/components/profiling/profileEventsTable.tsx b/static/app/components/profiling/profileEventsTable.tsx index b33320da3da652..862aedcb48ebae 100644 --- a/static/app/components/profiling/profileEventsTable.tsx +++ b/static/app/components/profiling/profileEventsTable.tsx @@ -14,6 +14,7 @@ import {t} from 'sentry/locale'; import type {Organization} from 'sentry/types/organization'; import type {Project} from 'sentry/types/project'; import {defined} from 'sentry/utils'; +import {trackAnalytics} from 'sentry/utils/analytics'; import {getTimeStampFromTableDateField} from 'sentry/utils/dates'; import EventView from 'sentry/utils/discover/eventView'; import {DURATION_UNITS} from 'sentry/utils/discover/fieldRenderers'; @@ -33,8 +34,7 @@ import { useDomainViewFilters, } from 'sentry/views/insights/pages/useFilters'; import {getTraceDetailsUrl} from 'sentry/views/performance/traceDetails/utils'; - -import {ProfilingTransactionHovercard} from './profilingTransactionHovercard'; +import {profilesRouteWithQuery} from 'sentry/views/performance/transactionSummary/transactionProfiles/utils'; interface ProfileEventsTableProps<F extends FieldType> { columns: readonly F[]; @@ -236,13 +236,26 @@ function ProfileEventsCell<F extends FieldType>(props: ProfileEventsCellProps<F> const project = getProjectForRow(props.baggage, props.dataRow); if (defined(project)) { + const linkToSummary = profilesRouteWithQuery({ + query: props.baggage.location.query, + orgSlug: props.baggage.organization.slug, + projectID: project.id, + transaction: props.dataRow.transaction, + }); + return ( <Container> - <ProfilingTransactionHovercard - transaction={value} - project={project} - organization={props.baggage.organization} - /> + <Link + to={linkToSummary} + onClick={() => + trackAnalytics('profiling_views.go_to_transaction', { + organization: props.baggage.organization, + source: 'profiling.landing.transaction_table', + }) + } + > + {props.dataRow.transaction} + </Link> </Container> ); } diff --git a/static/app/components/profiling/profilingTransactionHovercard.tsx b/static/app/components/profiling/profilingTransactionHovercard.tsx deleted file mode 100644 index 0294088b7c9f8d..00000000000000 --- a/static/app/components/profiling/profilingTransactionHovercard.tsx +++ /dev/null @@ -1,270 +0,0 @@ -import {Fragment, useEffect} from 'react'; -import styled from '@emotion/styled'; - -import {LinkButton} from 'sentry/components/button'; -import {Flex} from 'sentry/components/container/flex'; -import {Hovercard} from 'sentry/components/hovercard'; -import { - FunctionsMiniGrid, - FunctionsMiniGridEmptyState, - FunctionsMiniGridLoading, -} from 'sentry/components/profiling/functionsMiniGrid'; -import {TextTruncateOverflow} from 'sentry/components/profiling/textTruncateOverflow'; -import {t} from 'sentry/locale'; -import {space} from 'sentry/styles/space'; -import type {Organization} from 'sentry/types/organization'; -import type {Project} from 'sentry/types/project'; -import {trackAnalytics} from 'sentry/utils/analytics'; -import {getShortEventId} from 'sentry/utils/events'; -import {useProfilingTransactionQuickSummary} from 'sentry/utils/profiling/hooks/useProfilingTransactionQuickSummary'; -import { - generateProfileFlamechartRouteWithQuery, - generateProfileSummaryRouteWithQuery, -} from 'sentry/utils/profiling/routes'; -import {useLocation} from 'sentry/utils/useLocation'; -import {profilesRouteWithQuery} from 'sentry/views/performance/transactionSummary/transactionProfiles/utils'; - -import Link from '../links/link'; -import LoadingIndicator from '../loadingIndicator'; -import PerformanceDuration from '../performanceDuration'; - -interface ProfilingTransactionHovercardProps { - organization: Organization; - project: Project; - transaction: string; -} - -export function ProfilingTransactionHovercard(props: ProfilingTransactionHovercardProps) { - const {project, transaction, organization} = props; - const {query} = useLocation(); - - if (!organization.features.includes('continuous-profiling-ui')) { - const linkToSummary = generateProfileSummaryRouteWithQuery({ - query, - orgSlug: organization.slug, - projectSlug: project.slug, - transaction, - }); - - const triggerLink = ( - <Link - to={linkToSummary} - onClick={() => - trackAnalytics('profiling_views.go_to_transaction', { - organization, - source: 'transaction_hovercard.trigger', - }) - } - > - {transaction} - </Link> - ); - - return ( - <StyledHovercard - delay={250} - header={ - <Flex justify="space-between" align="center"> - <TextTruncateOverflow>{transaction}</TextTruncateOverflow> - <LinkButton to={linkToSummary} size="xs"> - {t('View Profiles')} - </LinkButton> - </Flex> - } - body={ - <ProfilingTransactionHovercardBody - transaction={transaction} - project={project} - organization={organization} - /> - } - showUnderline - > - {triggerLink} - </StyledHovercard> - ); - } - - const linkToSummary = profilesRouteWithQuery({ - query, - orgSlug: organization.slug, - projectID: project.id, - transaction, - }); - - return ( - <Link - to={linkToSummary} - onClick={() => - trackAnalytics('profiling_views.go_to_transaction', { - organization, - source: 'profiling.landing.transaction_table', - }) - } - > - {transaction} - </Link> - ); -} - -export function ProfilingTransactionHovercardBody({ - transaction, - project, - organization, -}: ProfilingTransactionHovercardProps) { - const { - slowestProfile, - slowestProfileQuery, - slowestProfileDurationMultiplier, - latestProfileQuery, - latestProfile, - functionsQuery, - functions, - } = useProfilingTransactionQuickSummary({ - transaction, - project, - referrer: 'api.profiling.transaction-hovercard', - }); - - const linkToFlamechartRoute = ( - profileId: string, - query?: {frameName: string; framePackage: string} - ) => { - return generateProfileFlamechartRouteWithQuery({ - orgSlug: organization.slug, - projectSlug: project.slug, - profileId, - query, - }); - }; - - useEffect(() => { - trackAnalytics('profiling_ui_events.transaction_hovercard_view', { - organization, - }); - }, [organization]); - - return ( - <Flex gap={space(3)} column> - <Flex justify="space-between"> - <ContextDetail - title={t('Latest profile')} - isLoading={latestProfileQuery.isPending} - > - {latestProfile ? ( - <Link - to={linkToFlamechartRoute(String(latestProfile['profile.id']))} - onClick={() => - trackAnalytics('profiling_views.go_to_flamegraph', { - organization, - source: 'transaction_hovercard.latest_profile', - }) - } - > - {getShortEventId(String(latestProfile!['profile.id']))} - </Link> - ) : ( - '-' - )} - </ContextDetail> - - <ContextDetail - title={t('Slowest profile')} - isLoading={slowestProfileQuery.isPending} - > - {slowestProfile ? ( - <Flex gap={space(1)}> - <PerformanceDuration - milliseconds={ - slowestProfileDurationMultiplier * - (slowestProfile['transaction.duration'] as number) - } - abbreviation - /> - <Link - to={linkToFlamechartRoute(String(slowestProfile['profile.id']))} - onClick={() => - trackAnalytics('profiling_views.go_to_flamegraph', { - organization, - source: 'transaction_hovercard.slowest_profile', - }) - } - > - ({getShortEventId(String(slowestProfile['profile.id']))}) - </Link> - </Flex> - ) : ( - '-' - )} - </ContextDetail> - </Flex> - - <Flex column h={125}> - <ProfilingTransactionHovercardFunctions - isLoading={functionsQuery.isPending} - functions={functions ?? []} - organization={organization} - project={project} - onLinkClick={() => - trackAnalytics('profiling_views.go_to_flamegraph', { - organization, - source: 'transaction_hovercard.suspect_function', - }) - } - /> - </Flex> - </Flex> - ); -} - -type ProfilingTransactionHovercardFunctionsProps = React.ComponentProps< - typeof FunctionsMiniGrid -> & {isLoading: boolean}; - -function ProfilingTransactionHovercardFunctions( - props: ProfilingTransactionHovercardFunctionsProps -) { - if (props.isLoading) { - return <FunctionsMiniGridLoading />; - } - - if (!props.functions || props.functions?.length === 0) { - return <FunctionsMiniGridEmptyState />; - } - return <FunctionsMiniGrid {...props} />; -} - -interface ContextDetailProps { - children: React.ReactNode; - isLoading: boolean; - title?: React.ReactNode; -} -function ContextDetail(props: ContextDetailProps) { - const {title, children, isLoading} = props; - - return ( - <Flex column gap={space(1)}> - {title && <UppercaseTitle>{title}</UppercaseTitle>} - <Fragment> - {isLoading ? ( - <Flex align="center" justify="center" h="1em"> - <LoadingIndicator mini /> - </Flex> - ) : ( - children - )} - </Fragment> - </Flex> - ); -} - -const UppercaseTitle = styled('span')` - text-transform: uppercase; - font-size: ${p => p.theme.fontSizeExtraSmall}; - font-weight: ${p => p.theme.fontWeightBold}; - color: ${p => p.theme.subText}; -`; - -const StyledHovercard = styled(Hovercard)` - width: 400px; -`; diff --git a/static/app/components/profiling/suspectFunctions/functionsTable.spec.tsx b/static/app/components/profiling/suspectFunctions/functionsTable.spec.tsx deleted file mode 100644 index b66e717067acbf..00000000000000 --- a/static/app/components/profiling/suspectFunctions/functionsTable.spec.tsx +++ /dev/null @@ -1,162 +0,0 @@ -import type {ReactElement} from 'react'; -import {useEffect} from 'react'; -import {ProjectFixture} from 'sentry-fixture/project'; - -import {render, screen} from 'sentry-test/reactTestingLibrary'; - -import {FunctionsTable} from 'sentry/components/profiling/suspectFunctions/functionsTable'; -import ProjectsStore from 'sentry/stores/projectsStore'; - -const project = ProjectFixture(); - -function TestContext({children}: {children: ReactElement}) { - useEffect(() => { - ProjectsStore.loadInitialData([project]); - return () => ProjectsStore.reset(); - }, []); - - return children; -} - -describe('FunctionsTable', function () { - it('renders loading', function () { - render( - <TestContext> - <FunctionsTable - analyticsPageSource="profiling_transaction" - isLoading - error={null} - functions={[]} - project={project} - sort={{key: 'p95()', order: 'desc'}} - /> - </TestContext> - ); - expect(screen.getByTestId('loading-indicator')).toBeInTheDocument(); - }); - - it('renders empty data', function () { - render( - <TestContext> - <FunctionsTable - analyticsPageSource="profiling_transaction" - isLoading={false} - error={null} - functions={[]} - project={project} - sort={{key: 'p95()', order: 'desc'}} - /> - </TestContext> - ); - - expect(screen.getByText('No results found for your query')).toBeInTheDocument(); - }); - - it('renders one function', function () { - const func = { - 'count()': 10, - 'all_examples()': [ - {profile_id: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'}, - {profile_id: 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'}, - ], - function: 'foo', - 'p75()': 10000000, - 'sum()': 25000000, - package: 'bar', - }; - - render( - <TestContext> - <FunctionsTable - analyticsPageSource="profiling_transaction" - isLoading={false} - error={null} - functions={[func]} - project={project} - sort={{key: 'p95()', order: 'desc'}} - /> - </TestContext> - ); - - expect(screen.getByText('Name')).toBeInTheDocument(); - expect(screen.getByText('foo')).toBeInTheDocument(); - - expect(screen.getByText('Package')).toBeInTheDocument(); - expect(screen.getByText('bar')).toBeInTheDocument(); - - expect(screen.getByText('Occurrences')).toBeInTheDocument(); - expect(screen.getByText('10')).toBeInTheDocument(); - - expect(screen.getByText('P75 Self Time')).toBeInTheDocument(); - expect(screen.getByText('10.00ms')).toBeInTheDocument(); - - expect(screen.getByText('Total Self Time')).toBeInTheDocument(); - expect(screen.getByText('25.00ms')).toBeInTheDocument(); - }); - - it('renders empty name', function () { - const func = { - 'count()': 10, - 'all_examples()': [ - {profile_id: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'}, - {profile_id: 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'}, - ], - function: '', - 'p75()': 10000000, - 'sum()': 25000000, - package: 'bar', - }; - - render( - <TestContext> - <FunctionsTable - analyticsPageSource="profiling_transaction" - isLoading={false} - error={null} - functions={[func]} - project={project} - sort={{key: 'p75()', order: 'desc'}} - /> - </TestContext> - ); - - expect(screen.getByText('Name')).toBeInTheDocument(); - expect(screen.getByText('Unknown')).toBeInTheDocument(); - - expect(screen.getByText('Package')).toBeInTheDocument(); - expect(screen.getByText('bar')).toBeInTheDocument(); - }); - - it('renders empty package', function () { - const func = { - 'count()': 10, - 'all_examples()': [ - {profile_id: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'}, - {profile_id: 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb'}, - ], - function: 'foo', - 'p75()': 10000000, - 'sum()': 25000000, - package: '', - }; - - render( - <TestContext> - <FunctionsTable - analyticsPageSource="profiling_transaction" - isLoading={false} - error={null} - functions={[func]} - project={project} - sort={{key: 'p75()', order: 'desc'}} - /> - </TestContext> - ); - - expect(screen.getByText('Name')).toBeInTheDocument(); - expect(screen.getByText('foo')).toBeInTheDocument(); - - expect(screen.getByText('Package')).toBeInTheDocument(); - expect(screen.getByText('Unknown')).toBeInTheDocument(); - }); -}); diff --git a/static/app/components/profiling/suspectFunctions/functionsTable.tsx b/static/app/components/profiling/suspectFunctions/functionsTable.tsx deleted file mode 100644 index 69eb89e872d550..00000000000000 --- a/static/app/components/profiling/suspectFunctions/functionsTable.tsx +++ /dev/null @@ -1,238 +0,0 @@ -import {useCallback, useMemo} from 'react'; -import styled from '@emotion/styled'; - -import Count from 'sentry/components/count'; -import type {GridColumnOrder} from 'sentry/components/gridEditable'; -import GridEditable, {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable'; -import PerformanceDuration from 'sentry/components/performanceDuration'; -import {ArrayLinks} from 'sentry/components/profiling/arrayLinks'; -import {t} from 'sentry/locale'; -import type {Project} from 'sentry/types/project'; -import {trackAnalytics} from 'sentry/utils/analytics'; -import {Container, NumberContainer} from 'sentry/utils/discover/styles'; -import {getShortEventId} from 'sentry/utils/events'; -import { - isContinuousProfileReference, - isTransactionProfileReference, -} from 'sentry/utils/profiling/guards/profile'; -import type {EventsResults, Sort} from 'sentry/utils/profiling/hooks/types'; -import {generateProfileRouteFromProfileReference} from 'sentry/utils/profiling/routes'; -import {renderTableHead} from 'sentry/utils/profiling/tableRenderer'; -import {useLocation} from 'sentry/utils/useLocation'; -import useOrganization from 'sentry/utils/useOrganization'; - -interface FunctionsTableProps { - analyticsPageSource: 'performance_transaction' | 'profiling_transaction'; - error: string | null; - functions: EventsResults<TableColumnKey>['data']; - isLoading: boolean; - project: Project | undefined; - sort: Sort<any>; -} - -export function FunctionsTable(props: FunctionsTableProps) { - const location = useLocation(); - const organization = useOrganization(); - - const functions: TableDataRow[] = useMemo(() => { - const project = props.project; - if (!project) { - return []; - } - - return props.functions.map(func => { - const examples = func['all_examples()']; - - return { - ...func, - 'all_examples()': examples.map(example => { - return { - value: getShortEventId(getTargetId(example)), - onClick: () => - trackAnalytics('profiling_views.go_to_flamegraph', { - organization, - source: `${props.analyticsPageSource}.suspect_functions_table`, - }), - target: generateProfileRouteFromProfileReference({ - orgSlug: organization.slug, - projectSlug: project.slug, - reference: example, - // specify the frame to focus, the flamegraph will switch - // to the appropriate thread when these are specified - frameName: func.function as string, - framePackage: func.package as string, - }), - }; - }), - }; - }); - }, [organization, props.project, props.functions, props.analyticsPageSource]); - - const generateSortLink = useCallback( - (column: TableColumnKey) => { - if (!SORTABLE_COLUMNS.has(column)) { - return () => undefined; - } - - const direction = - props.sort.key !== column ? 'desc' : props.sort.order === 'desc' ? 'asc' : 'desc'; - - return () => ({ - ...location, - query: { - ...location.query, - functionsSort: `${direction === 'desc' ? '-' : ''}${column}`, - }, - }); - }, - [location, props.sort] - ); - - return ( - <GridEditable - isLoading={props.isLoading} - error={props.error} - data={functions} - columnOrder={COLUMN_ORDER.map(key => COLUMNS[key])} - columnSortBy={[]} - grid={{ - renderHeadCell: renderTableHead({ - currentSort: props.sort, - rightAlignedColumns: RIGHT_ALIGNED_COLUMNS, - sortableColumns: SORTABLE_COLUMNS, - generateSortLink, - }), - renderBodyCell: renderFunctionsTableCell, - }} - /> - ); -} - -const RIGHT_ALIGNED_COLUMNS = new Set<TableColumnKey>(['p75()', 'sum()', 'count()']); - -const SORTABLE_COLUMNS = RIGHT_ALIGNED_COLUMNS; - -function renderFunctionsTableCell( - column: TableColumn, - dataRow: TableDataRow, - rowIndex: number, - columnIndex: number -) { - return ( - <ProfilingFunctionsTableCell - column={column} - dataRow={dataRow} - rowIndex={rowIndex} - columnIndex={columnIndex} - /> - ); -} - -interface ProfilingFunctionsTableCellProps { - column: TableColumn; - columnIndex: number; - dataRow: TableDataRow; - rowIndex: number; -} - -const EmptyValueContainer = styled('span')` - color: ${p => p.theme.gray300}; -`; - -function ProfilingFunctionsTableCell({ - column, - dataRow, -}: ProfilingFunctionsTableCellProps) { - const value = dataRow[column.key]; - - switch (column.key) { - case 'count()': - return ( - <NumberContainer> - <Count value={value} /> - </NumberContainer> - ); - case 'p75()': - case 'sum()': - return ( - <NumberContainer> - <PerformanceDuration nanoseconds={value} abbreviation /> - </NumberContainer> - ); - case 'all_examples()': - return <ArrayLinks items={value} />; - case 'function': - case 'package': - const name = value || <EmptyValueContainer>{t('Unknown')}</EmptyValueContainer>; - return <Container>{name}</Container>; - default: - return <Container>{value}</Container>; - } -} - -export const functionsFields = [ - 'package', - 'function', - 'count()', - 'p75()', - 'sum()', - 'all_examples()', -] as const; - -export type TableColumnKey = (typeof functionsFields)[number]; - -type TableDataRow = Record<TableColumnKey, any>; - -type TableColumn = GridColumnOrder<TableColumnKey>; - -const COLUMN_ORDER: TableColumnKey[] = [ - 'function', - 'package', - 'count()', - 'p75()', - 'sum()', - 'all_examples()', -]; - -const COLUMNS: Record<TableColumnKey, TableColumn> = { - function: { - key: 'function', - name: t('Name'), - width: COL_WIDTH_UNDEFINED, - }, - package: { - key: 'package', - name: t('Package'), - width: COL_WIDTH_UNDEFINED, - }, - 'p75()': { - key: 'p75()', - name: t('P75 Self Time'), - width: COL_WIDTH_UNDEFINED, - }, - 'sum()': { - key: 'sum()', - name: t('Total Self Time'), - width: COL_WIDTH_UNDEFINED, - }, - 'count()': { - key: 'count()', - name: t('Occurrences'), - width: COL_WIDTH_UNDEFINED, - }, - 'all_examples()': { - key: 'all_examples()', - name: t('Example Profiles'), - width: COL_WIDTH_UNDEFINED, - }, -}; - -function getTargetId(reference): string { - if (isTransactionProfileReference(reference)) { - return reference.profile_id; - } - if (isContinuousProfileReference(reference)) { - return reference.profiler_id; - } - return reference; -} diff --git a/static/app/components/profiling/suspectFunctions/suspectFunctionsTable.tsx b/static/app/components/profiling/suspectFunctions/suspectFunctionsTable.tsx index 25a65552978045..c12594b5584e7f 100644 --- a/static/app/components/profiling/suspectFunctions/suspectFunctionsTable.tsx +++ b/static/app/components/profiling/suspectFunctions/suspectFunctionsTable.tsx @@ -1,130 +1,292 @@ import {Fragment, useCallback, useMemo, useState} from 'react'; import styled from '@emotion/styled'; +import clamp from 'lodash/clamp'; -import {CompactSelect} from 'sentry/components/compactSelect'; -import Pagination from 'sentry/components/pagination'; -import type {TableColumnKey as FunctionsField} from 'sentry/components/profiling/suspectFunctions/functionsTable'; -import { - functionsFields, - FunctionsTable, -} from 'sentry/components/profiling/suspectFunctions/functionsTable'; +import {Button} from 'sentry/components/button'; +import ButtonBar from 'sentry/components/buttonBar'; +import {SectionHeading} from 'sentry/components/charts/styles'; +import EmptyStateWarning from 'sentry/components/emptyStateWarning'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; +import {ArrayLinks} from 'sentry/components/profiling/arrayLinks'; +import {IconChevron} from 'sentry/icons/iconChevron'; +import {IconWarning} from 'sentry/icons/iconWarning'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; +import type {Organization} from 'sentry/types/organization'; import type {Project} from 'sentry/types/project'; -import {browserHistory} from 'sentry/utils/browserHistory'; -import {useProfileFunctions} from 'sentry/utils/profiling/hooks/useProfileFunctions'; -import {formatSort} from 'sentry/utils/profiling/hooks/utils'; -import {decodeScalar} from 'sentry/utils/queryString'; -import {MutableSearch} from 'sentry/utils/tokenizeSearch'; +import {trackAnalytics} from 'sentry/utils/analytics'; +import type EventView from 'sentry/utils/discover/eventView'; +import type {RenderFunctionBaggage} from 'sentry/utils/discover/fieldRenderers'; +import {FIELD_FORMATTERS} from 'sentry/utils/discover/fieldRenderers'; +import {getShortEventId} from 'sentry/utils/events'; +import {useAggregateFlamegraphQuery} from 'sentry/utils/profiling/hooks/useAggregateFlamegraphQuery'; +import {generateProfileRouteFromProfileReference} from 'sentry/utils/profiling/routes'; import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import { + Table, + TableBody, + TableBodyCell, + TableHead, + TableHeadCell, + TableRow, + TableStatus, + useTableStyles, +} from 'sentry/views/explore/components/table'; +import {getProfileTargetId} from 'sentry/views/profiling/utils'; + +function sortFunctions(a: Profiling.FunctionMetric, b: Profiling.FunctionMetric) { + return b.sum - a.sum; +} + +type Column = { + label: React.ReactNode; + value: keyof Profiling.FunctionMetric; +}; + +const COLUMNS: Column[] = [ + { + label: t('function'), + value: 'name', + }, + { + label: t('package'), + value: 'package', + }, + { + label: t('avg()'), + value: 'avg', + }, + { + label: t('p75()'), + value: 'p75', + }, + { + label: t('p95()'), + value: 'p95', + }, + { + label: t('p99()'), + value: 'p99', + }, + { + label: t('examples'), + value: 'examples', + }, +]; interface SuspectFunctionsTableProps { analyticsPageSource: 'performance_transaction' | 'profiling_transaction'; - project: Project | undefined; - transaction: string; + eventView: EventView; + project?: Project; } -const FUNCTIONS_CURSOR_NAME = 'functionsCursor'; - export function SuspectFunctionsTable({ analyticsPageSource, + eventView, project, - transaction, }: SuspectFunctionsTableProps) { - const [functionType, setFunctionType] = useState<'application' | 'system' | 'all'>( - 'application' - ); const location = useLocation(); - const functionsCursor = useMemo( - () => decodeScalar(location.query[FUNCTIONS_CURSOR_NAME]), - [location.query] - ); - const functionsSort = useMemo( - () => - formatSort<FunctionsField>( - decodeScalar(location.query.functionsSort), - functionsFields, - { - key: 'sum()', - order: 'desc', - } - ), - [location.query.functionsSort] - ); + const organization = useOrganization(); - const handleFunctionsCursor = useCallback((cursor, pathname, query) => { - browserHistory.push({ - pathname, - query: {...query, [FUNCTIONS_CURSOR_NAME]: cursor}, - }); - }, []); - - const query = useMemo(() => { - const conditions = new MutableSearch(''); - conditions.setFilterValues('transaction', [transaction]); - if (functionType === 'application') { - conditions.setFilterValues('is_application', ['1']); - } else if (functionType === 'system') { - conditions.setFilterValues('is_application', ['0']); - } - return conditions.formatString(); - }, [functionType, transaction]); - - const functionsQuery = useProfileFunctions<FunctionsField>({ - fields: functionsFields, - referrer: 'api.profiling.profile-summary-functions-table', - sort: functionsSort, - query, - limit: 5, - cursor: functionsCursor, + const flamegraphQuery = useAggregateFlamegraphQuery({ + // User query is only permitted when using transactions. + // If this is to be reused for strictly continuous profiling, + // it'll need to be swapped to use the `profiles` data source + // with no user query. + dataSource: 'transactions', + query: eventView.query, + metrics: true, }); + const sortedMetrics = useMemo(() => { + const metrics = flamegraphQuery.data?.metrics || []; + return metrics.sort(sortFunctions); + }, [flamegraphQuery.data?.metrics]); + + const pagination = useMemoryPagination(sortedMetrics, 5); + + const metrics = useMemo(() => { + return sortedMetrics.slice(pagination.start, pagination.end); + }, [sortedMetrics, pagination]); + + const {tableStyles} = useTableStyles({ + items: COLUMNS, + }); + + const baggage: RenderFunctionBaggage = { + location, + organization, + unit: 'nanosecond', + }; + return ( <Fragment> <TableHeader> - <CompactSelect - triggerProps={{prefix: t('Slowest Functions'), size: 'xs'}} - value={functionType} - options={[ - { - label: t('All'), - value: 'all' as const, - }, - { - label: t('Application'), - value: 'application' as const, - }, - { - label: t('System'), - value: 'system' as const, - }, - ]} - onChange={({value}) => setFunctionType(value)} - /> - <StyledPagination - pageLinks={functionsQuery.getResponseHeader?.('Link')} - onCursor={handleFunctionsCursor} - size="xs" - /> + <SectionHeading>{t('Suspect Functions')}</SectionHeading> + <ButtonBar merged> + <Button + icon={<IconChevron direction="left" />} + aria-label={t('Previous')} + size="xs" + {...pagination.previousButtonProps} + /> + <Button + icon={<IconChevron direction="right" />} + aria-label={t('Next')} + size="xs" + {...pagination.nextButtonProps} + /> + </ButtonBar> </TableHeader> - <FunctionsTable - analyticsPageSource={analyticsPageSource} - error={functionsQuery.isError ? functionsQuery.error.message : null} - isLoading={functionsQuery.isPending} - functions={functionsQuery.isFetched ? functionsQuery.data?.data ?? [] : []} - project={project} - sort={functionsSort} - /> + <Table style={tableStyles}> + <TableHead> + <TableRow> + {COLUMNS.map((column, i) => { + return ( + <TableHeadCell + key={i} + isFirst={i === 0} + align={ + column.value === 'package' || column.value === 'name' + ? 'left' + : 'right' + } + > + {column.label} + </TableHeadCell> + ); + })} + </TableRow> + </TableHead> + <TableBody> + {flamegraphQuery.isPending ? ( + <TableStatus> + <LoadingIndicator /> + </TableStatus> + ) : flamegraphQuery.isError ? ( + <TableStatus> + <IconWarning data-test-id="error-indicator" color="gray300" size="lg" /> + </TableStatus> + ) : flamegraphQuery.isFetched ? ( + metrics.map((metric, i) => ( + <TableEntry + key={i} + analyticsPageSource={analyticsPageSource} + baggage={baggage} + metric={metric} + organization={organization} + project={project} + /> + )) + ) : ( + <TableStatus> + <EmptyStateWarning> + <p>{t('No functions found')}</p> + </EmptyStateWarning> + </TableStatus> + )} + </TableBody> + </Table> </Fragment> ); } +interface TableEntryProps { + analyticsPageSource: 'performance_transaction' | 'profiling_transaction'; + baggage: RenderFunctionBaggage; + metric: Profiling.FunctionMetric; + organization: Organization; + project?: Project; +} + +function TableEntry({ + analyticsPageSource, + baggage, + metric, + organization, + project, +}: TableEntryProps) { + return ( + <TableRow> + {COLUMNS.map(column => { + if (column.value === 'examples') { + const items = metric[column.value].map(example => { + return { + value: getShortEventId(getProfileTargetId(example)), + onClick: () => + trackAnalytics('profiling_views.go_to_flamegraph', { + organization, + source: `${analyticsPageSource}.suspect_functions_table`, + }), + target: generateProfileRouteFromProfileReference({ + orgSlug: organization.slug, + projectSlug: project?.slug || '', + reference: example, + // specify the frame to focus, the flamegraph will switch + // to the appropriate thread when these are specified + frameName: metric.name, + framePackage: metric.package, + }), + }; + }); + return ( + <TableBodyCell key={column.value}> + <ArrayLinks items={items} /> + </TableBodyCell> + ); + } + + const formatter = + typeof metric[column.value] === 'number' + ? FIELD_FORMATTERS.duration.renderFunc + : FIELD_FORMATTERS.string.renderFunc; + return ( + <TableBodyCell key={column.value}> + {formatter(column.value, metric, baggage)} + </TableBodyCell> + ); + })} + </TableRow> + ); +} + +function useMemoryPagination(items: any[], size: number) { + const [pagination, setPagination] = useState({ + start: 0, + end: size, + }); + + const page = Math.floor(pagination.start / size); + const toPage = useCallback( + (p: number) => { + const next = clamp(p, 0, Math.floor(items.length / size)); + + setPagination({ + start: clamp(next * size, 0, items.length - size), + end: Math.min(next * size + size, items.length), + }); + }, + [size, items] + ); + + return { + page, + start: pagination.start, + end: pagination.end, + nextButtonProps: { + disabled: pagination.end >= items.length, + onClick: () => toPage(page + 1), + }, + previousButtonProps: { + disabled: pagination.start <= 0, + onClick: () => toPage(page - 1), + }, + }; +} + const TableHeader = styled('div')` display: flex; justify-content: space-between; margin-bottom: ${space(1)}; `; - -const StyledPagination = styled(Pagination)` - margin: 0 0 0 ${space(1)}; -`; diff --git a/static/app/components/profiling/transactionProfileIdProvider.tsx b/static/app/components/profiling/transactionProfileIdProvider.tsx deleted file mode 100644 index e5d1f085afca08..00000000000000 --- a/static/app/components/profiling/transactionProfileIdProvider.tsx +++ /dev/null @@ -1,83 +0,0 @@ -import {createContext, useContext, useEffect, useMemo} from 'react'; -import * as Sentry from '@sentry/react'; - -import type {PageFilters} from 'sentry/types/core'; -import {useProfileEvents} from 'sentry/utils/profiling/hooks/useProfileEvents'; - -const TransactionProfileContext = createContext<string | null | undefined>(undefined); - -interface TransactionToProfileIdProviderProps { - children: React.ReactNode; - timestamp: string | undefined; - transactionId: string | undefined; - projectId?: string | undefined; -} - -export function TransactionProfileIdProvider({ - projectId, - timestamp, - transactionId, - children, -}: TransactionToProfileIdProviderProps) { - // create a 24h timeframe relative from the transaction timestamp to use for - // the profile events query - const datetime: PageFilters['datetime'] | undefined = useMemo(() => { - if (!timestamp) { - return undefined; - } - const ts = new Date(timestamp); - const start = new Date(new Date(ts).setHours(ts.getHours() - 12)); - const end = new Date(new Date(ts).setHours(ts.getHours() + 12)); - - return { - start, - end, - period: null, - utc: true, - }; - }, [timestamp]); - - const transactionIdColumn = 'id'; - - const {status, data, error} = useProfileEvents({ - projects: projectId ? [projectId] : undefined, - fields: ['profile.id'], - referrer: 'transactionToProfileProvider', - limit: 1, - sort: { - key: 'id', - order: 'asc', - }, - query: `${transactionIdColumn}:${transactionId}`, - enabled: Boolean(transactionId), - datetime, - }); - - useEffect(() => { - if (status !== 'error') { - return; - } - - if (error.status !== 404) { - Sentry.captureException(error); - } - }, [status, error]); - - const profileId = (data?.data[0]?.['profile.id'] as string | undefined) ?? null; - - return ( - <TransactionProfileContext.Provider value={profileId}> - {children} - </TransactionProfileContext.Provider> - ); -} -TransactionProfileIdProvider.Context = TransactionProfileContext; - -export function useTransactionProfileId() { - const ctx = useContext(TransactionProfileContext); - if (typeof ctx === 'undefined') { - throw new Error(`useTransactionProfile called outside of TransactionProfileProvider`); - } - - return ctx; -} diff --git a/static/app/components/profiling/transactonProfileIdProvider.spec.tsx b/static/app/components/profiling/transactonProfileIdProvider.spec.tsx deleted file mode 100644 index 3a970c78c354d8..00000000000000 --- a/static/app/components/profiling/transactonProfileIdProvider.spec.tsx +++ /dev/null @@ -1,106 +0,0 @@ -import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary'; - -import * as useProfileEventsModule from 'sentry/utils/profiling/hooks/useProfileEvents'; -import * as useApiModule from 'sentry/utils/useApi'; - -import * as TransactionProfileIdProviderModule from './transactionProfileIdProvider'; - -const useApiSpy = jest.spyOn(useApiModule, 'default'); - -// this order matters; create the spy before dereferencing below -const useTransactionProfileIdSpy = jest.spyOn( - TransactionProfileIdProviderModule, - 'useTransactionProfileId' -); - -const {TransactionProfileIdProvider, useTransactionProfileId} = - TransactionProfileIdProviderModule; - -const useProfileEventsSpy = jest.spyOn(useProfileEventsModule, 'useProfileEvents'); - -function MockComponent() { - const profileId = useTransactionProfileId(); - return <div data-test-id={profileId} />; -} - -const MOCK_TRX_ID = '123'; -const MOCK_PROFILE_ID = '456'; - -describe('TransactionProfileIdProvider', () => { - afterEach(() => { - jest.clearAllMocks(); - }); - it('provides default profileId state as null', () => { - render( - <TransactionProfileIdProvider transactionId={undefined} timestamp={undefined}> - <MockComponent /> - </TransactionProfileIdProvider> - ); - - expect(useProfileEventsSpy).toHaveBeenCalledWith( - expect.objectContaining({ - enabled: false, - }) - ); - expect(useTransactionProfileIdSpy).toHaveReturnedWith(null); - }); - - it('does not query the events endpoint when transactionId is undefined', () => { - const requestPromiseMock = jest.fn(); - // @ts-expect-error - useApiSpy.mockReturnValueOnce({ - requestPromise: requestPromiseMock, - }); - render( - <TransactionProfileIdProvider transactionId={undefined} timestamp={undefined}> - <MockComponent /> - </TransactionProfileIdProvider> - ); - - expect(useProfileEventsSpy).toHaveBeenCalledWith( - expect.objectContaining({ - enabled: false, - }) - ); - expect(requestPromiseMock).not.toHaveBeenCalled(); - expect(useTransactionProfileIdSpy).toHaveReturnedWith(null); - }); - - it('queries the events endpoint for a profile id when given a transactionId', async () => { - MockApiClient.addMockResponse({ - method: 'GET', - url: '/organizations/org-slug/events/', - body: { - data: [ - { - 'profile.id': MOCK_PROFILE_ID, - }, - ], - }, - }); - - render( - <TransactionProfileIdProvider - transactionId={MOCK_TRX_ID} - timestamp="2022-12-19T16:00:00.000Z" - > - <MockComponent /> - </TransactionProfileIdProvider> - ); - - await waitFor(() => screen.findAllByTestId(MOCK_PROFILE_ID)); - - expect(useProfileEventsSpy).toHaveBeenCalledWith( - expect.objectContaining({ - query: 'id:' + MOCK_TRX_ID, - datetime: { - end: new Date('2022-12-20T04:00:00.000Z'), - period: null, - start: new Date('2022-12-19T04:00:00.000Z'), - utc: true, - }, - }) - ); - expect(useTransactionProfileIdSpy).toHaveReturnedWith(MOCK_PROFILE_ID); - }); -}); diff --git a/static/app/components/projects/bookmarkStar.tsx b/static/app/components/projects/bookmarkStar.tsx index a660b822029b12..9c606e569b4873 100644 --- a/static/app/components/projects/bookmarkStar.tsx +++ b/static/app/components/projects/bookmarkStar.tsx @@ -9,6 +9,7 @@ import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {Organization} from 'sentry/types/organization'; import type {Project} from 'sentry/types/project'; +import {useMutation} from 'sentry/utils/queryClient'; import useApi from 'sentry/utils/useApi'; type Props = { @@ -19,28 +20,36 @@ type Props = { }; function BookmarkStar({className, organization, project, onToggle}: Props) { - const api = useApi(); + const api = useApi({persistInFlight: true}); const [isBookmarked, setIsBookmarked] = useState(project.isBookmarked); - const handleBookmarkToggle = (event: React.MouseEvent) => { - // prevent dropdowns from closing - event.stopPropagation(); + const {mutate: handleBookmarkToggle, isPending: isBookmarking} = useMutation({ + mutationFn: () => { + return update(api, { + orgId: organization.slug, + projectId: project.slug, + data: {isBookmarked: !isBookmarked}, + }); + }, + onMutate: () => { + onToggle?.(isBookmarked); + setIsBookmarked(current => !current); + }, + onError: () => { + addErrorMessage(t('Unable to toggle bookmark for %s', project.slug)); + setIsBookmarked(current => !current); + }, + }); - update(api, { - orgId: organization.slug, - projectId: project.slug, - data: {isBookmarked: !isBookmarked}, - }).catch(() => addErrorMessage(t('Unable to toggle bookmark for %s', project.slug))); - - setIsBookmarked(current => !current); - onToggle?.(!isBookmarked); - }; + const label = isBookmarked ? t('Remove Bookmark') : t('Bookmark'); return ( <BookmarkStarButton - aria-label={t('Bookmark Project')} + title={label} + aria-label={label} aria-pressed={isBookmarked} - onClick={handleBookmarkToggle} + busy={isBookmarking} + onClick={() => handleBookmarkToggle()} size="zero" borderless className={className} diff --git a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx index 084f0e15310f57..c23ad1fe73963f 100644 --- a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx +++ b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx @@ -16,7 +16,7 @@ import Timeline from 'sentry/components/timeline'; import {Tooltip} from 'sentry/components/tooltip'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {Extraction} from 'sentry/utils/replays/extractHtml'; +import type {Extraction} from 'sentry/utils/replays/extractDomNodes'; import {getReplayDiffOffsetsFromFrame} from 'sentry/utils/replays/getDiffTimestamps'; import getFrameDetails from 'sentry/utils/replays/getFrameDetails'; import type ReplayReader from 'sentry/utils/replays/replayReader'; diff --git a/static/app/components/replays/diff/replayDiffChooser.tsx b/static/app/components/replays/diff/replayDiffChooser.tsx index f1dd87e6f6309e..63627a02d9cb0c 100644 --- a/static/app/components/replays/diff/replayDiffChooser.tsx +++ b/static/app/components/replays/diff/replayDiffChooser.tsx @@ -1,6 +1,7 @@ import styled from '@emotion/styled'; import FeatureBadge from 'sentry/components/badge/featureBadge'; +import {ReplayMutationTree} from 'sentry/components/replays/diff/replayMutationTree'; import {ReplaySideBySideImageDiff} from 'sentry/components/replays/diff/replaySideBySideImageDiff'; import {ReplaySliderDiff} from 'sentry/components/replays/diff/replaySliderDiff'; import {ReplayTextDiff} from 'sentry/components/replays/diff/replayTextDiff'; @@ -22,6 +23,7 @@ export const enum DiffType { HTML = 'html', SLIDER = 'slider', VISUAL = 'visual', + MUTATIONS = 'mutations', } export default function ReplayDiffChooser({ @@ -41,6 +43,9 @@ export default function ReplayDiffChooser({ <TabList> <TabList.Item key={DiffType.SLIDER}>{t('Slider Diff')}</TabList.Item> <TabList.Item key={DiffType.VISUAL}>{t('Side By Side Diff')}</TabList.Item> + <TabList.Item key={DiffType.MUTATIONS}> + {t('Mutations')} <FeatureBadge type={'beta'} /> + </TabList.Item> <TabList.Item key={DiffType.HTML}> {t('HTML Diff')} <FeatureBadge type={'beta'} /> </TabList.Item> @@ -68,6 +73,13 @@ export default function ReplayDiffChooser({ rightOffsetMs={rightOffsetMs} /> </TabPanels.Item> + <TabPanels.Item key={DiffType.MUTATIONS}> + <ReplayMutationTree + leftOffsetMs={leftOffsetMs} + replay={replay} + rightOffsetMs={rightOffsetMs} + /> + </TabPanels.Item> </StyledTabPanels> </TabStateProvider> </Grid> diff --git a/static/app/components/replays/diff/replayMutationTree.tsx b/static/app/components/replays/diff/replayMutationTree.tsx new file mode 100644 index 00000000000000..a40b5770883e33 --- /dev/null +++ b/static/app/components/replays/diff/replayMutationTree.tsx @@ -0,0 +1,53 @@ +import {css} from '@emotion/react'; +import styled from '@emotion/styled'; + +import StructuredEventData from 'sentry/components/structuredEventData'; +import useExtractDiffMutations from 'sentry/utils/replays/hooks/useExtractDiffMutations'; +import type ReplayReader from 'sentry/utils/replays/replayReader'; + +interface Props { + leftOffsetMs: number; + replay: ReplayReader; + rightOffsetMs: number; +} + +export function ReplayMutationTree({replay, leftOffsetMs, rightOffsetMs}: Props) { + const {data} = useExtractDiffMutations({ + leftOffsetMs, + replay, + rightOffsetMs, + }); + + const timeIndexedMutations = Array.from(data?.values() ?? []).reduce( + (acc, mutation) => { + for (const timestamp of Object.keys(mutation)) { + acc[timestamp] = mutation[timestamp]; + } + return acc; + }, + {} + ); + + return ( + <ScrollWrapper> + <StructuredEventData + key={data?.size} + data={timeIndexedMutations} + maxDefaultDepth={4} + css={css` + flex: auto 1 1; + & > pre { + margin: 0; + } + `} + /> + </ScrollWrapper> + ); +} + +const ScrollWrapper = styled('div')` + overflow: auto; + height: 0; + display: flex; + flex-grow: 1; +`; diff --git a/static/app/components/replays/diff/replaySliderDiff.tsx b/static/app/components/replays/diff/replaySliderDiff.tsx index 8873c8b18519a1..6d40793ab8730d 100644 --- a/static/app/components/replays/diff/replaySliderDiff.tsx +++ b/static/app/components/replays/diff/replaySliderDiff.tsx @@ -71,7 +71,7 @@ function DiffSides({ viewDimensions: {height: number; width: number}; width: string | undefined; }) { - const rightSideElem = useRef<HTMLDivElement>(null); + const beforeElemRef = useRef<HTMLDivElement>(null); const dividerElem = useRef<HTMLDivElement>(null); const {onMouseDown: onDividerMouseDown} = useResizableDrawer({ @@ -79,12 +79,11 @@ function DiffSides({ initialSize: viewDimensions.width / 2, min: 0, onResize: newSize => { - if (rightSideElem.current) { - rightSideElem.current.style.width = + if (beforeElemRef.current) { + beforeElemRef.current.style.width = viewDimensions.width === 0 ? '100%' - : toPixels(Math.min(viewDimensions.width, viewDimensions.width - newSize)) ?? - '0px'; + : toPixels(Math.min(viewDimensions.width, newSize)) ?? '0px'; } if (dividerElem.current) { dividerElem.current.style.left = @@ -118,18 +117,18 @@ function DiffSides({ <ReplayPlayerStateContextProvider> <StyledNegativeSpaceContainer> <ReplayPlayerMeasurer measure="both"> - {style => <ReplayPlayer style={style} offsetMs={leftOffsetMs} />} + {style => <ReplayPlayer style={style} offsetMs={rightOffsetMs} />} </ReplayPlayerMeasurer> </StyledNegativeSpaceContainer> </ReplayPlayerStateContextProvider> </Placement> </Cover> - <Cover ref={rightSideElem} style={{width: 0}}> + <Cover ref={beforeElemRef}> <Placement style={{width}}> <ReplayPlayerStateContextProvider> <StyledNegativeSpaceContainer> <ReplayPlayerMeasurer measure="both"> - {style => <ReplayPlayer style={style} offsetMs={rightOffsetMs} />} + {style => <ReplayPlayer style={style} offsetMs={leftOffsetMs} />} </ReplayPlayerMeasurer> </StyledNegativeSpaceContainer> </ReplayPlayerStateContextProvider> @@ -159,13 +158,15 @@ const Cover = styled('div')` height: 100%; overflow: hidden; position: absolute; - right: 0px; + left: 0px; top: 0px; - border-color: ${p => p.theme.red300}; + border-color: ${p => p.theme.green300}; & + & { - border-color: ${p => p.theme.green300}; - border-left-color: transparent; + border: 3px solid; + border-radius: ${space(0.5)} 0 0 ${space(0.5)}; + border-color: ${p => p.theme.red300}; + border-right-width: 0; } `; @@ -174,7 +175,7 @@ const Placement = styled('div')` height: 100%; justify-content: center; position: absolute; - right: 0; + left: 0; top: 0; place-items: center; `; diff --git a/static/app/components/replays/replayContext.tsx b/static/app/components/replays/replayContext.tsx index e8597ae48a340c..25b64b29411833 100644 --- a/static/app/components/replays/replayContext.tsx +++ b/static/app/components/replays/replayContext.tsx @@ -428,7 +428,8 @@ export function Provider({ speed: prefs.playbackSpeed, // rrweb specific theme, - events: events ?? [], + eventsWithSnapshots: replay?.getRRWebFramesWithSnapshots() ?? [], + touchEvents: replay?.getRRwebTouchEvents() ?? [], // common to both root, context: { @@ -448,7 +449,6 @@ export function Provider({ applyInitialOffset, clipWindow, durationMs, - events, isFetching, isVideoReplay, organization.slug, diff --git a/static/app/components/replays/videoReplayerWithInteractions.tsx b/static/app/components/replays/videoReplayerWithInteractions.tsx index a1ca3510afa6e0..ebccbacb409ff7 100644 --- a/static/app/components/replays/videoReplayerWithInteractions.tsx +++ b/static/app/components/replays/videoReplayerWithInteractions.tsx @@ -5,20 +5,13 @@ import {Replayer} from '@sentry-internal/rrweb'; import type {VideoReplayerConfig} from 'sentry/components/replays/videoReplayer'; import {VideoReplayer} from 'sentry/components/replays/videoReplayer'; import type {ClipWindow, RecordingFrame, VideoEvent} from 'sentry/utils/replays/types'; -import { - EventType, - isMetaFrame, - isTouchEndFrame, - isTouchStartFrame, - NodeType, -} from 'sentry/utils/replays/types'; type RootElem = HTMLDivElement | null; interface VideoReplayerWithInteractionsOptions { context: {sdkName: string | undefined | null; sdkVersion: string | undefined | null}; durationMs: number; - events: RecordingFrame[]; + eventsWithSnapshots: RecordingFrame[]; onBuffer: (isBuffering: boolean) => void; onFinished: () => void; onLoaded: (event: any) => void; @@ -26,6 +19,7 @@ interface VideoReplayerWithInteractionsOptions { speed: number; start: number; theme: Theme; + touchEvents: RecordingFrame[]; videoApiPrefix: string; videoEvents: VideoEvent[]; clipWindow?: ClipWindow; @@ -42,7 +36,8 @@ export class VideoReplayerWithInteractions { constructor({ videoEvents, - events, + eventsWithSnapshots, + touchEvents, root, start, videoApiPrefix, @@ -74,62 +69,6 @@ export class VideoReplayerWithInteractions { root?.classList.add('video-replayer'); - const eventsWithSnapshots: RecordingFrame[] = []; - events.forEach((e, index) => { - // For taps, sometimes the timestamp difference between TouchStart - // and TouchEnd is too small. This clamps the tap to a min time - // if the difference is less, so that the rrweb tap is visible and obvious. - if (isTouchStartFrame(e) && index < events.length - 2) { - const nextEvent = events[index + 1]; - if (isTouchEndFrame(nextEvent)) { - nextEvent.timestamp = Math.max(nextEvent.timestamp, e.timestamp + 500); - } - } - eventsWithSnapshots.push(e); - if (isMetaFrame(e)) { - // Create a mock full snapshot event, in order to render rrweb gestures properly - // Need to add one for every meta event we see - // The hardcoded data.node.id here should match the ID of the data being sent - // in the `positions` arrays - eventsWithSnapshots.push({ - type: EventType.FullSnapshot, - data: { - node: { - type: NodeType.Document, - childNodes: [ - { - type: NodeType.DocumentType, - id: 1, - name: 'html', - publicId: '', - systemId: '', - }, - { - type: NodeType.Element, - id: 2, - tagName: 'html', - attributes: { - lang: 'en', - }, - childNodes: [], - }, - ], - id: 0, - }, - initialOffset: { - top: 0, - left: 0, - }, - }, - timestamp: e.timestamp, - }); - } - }); - - // log instances where we have a pointer touchStart without a touchEnd - const touchEvents = eventsWithSnapshots.filter( - e => isTouchEndFrame(e) || isTouchStartFrame(e) - ); const grouped = Object.groupBy(touchEvents, (t: any) => t.data.pointerId); Object.values(grouped).forEach(t => { if (t?.length !== 2) { diff --git a/static/app/components/replaysOnboarding/platformOptionDropdown.tsx b/static/app/components/replaysOnboarding/platformOptionDropdown.tsx index 457dc1dd4fd70e..780a9fc75f8924 100644 --- a/static/app/components/replaysOnboarding/platformOptionDropdown.tsx +++ b/static/app/components/replaysOnboarding/platformOptionDropdown.tsx @@ -1,7 +1,10 @@ +import {Fragment} from 'react'; + import type {SelectOption} from 'sentry/components/compactSelect'; import {CompactSelect} from 'sentry/components/compactSelect'; import type {PlatformOption} from 'sentry/components/onboarding/gettingStartedDoc/types'; import {useUrlPlatformOptions} from 'sentry/components/onboarding/platformOptionsControl'; +import {t} from 'sentry/locale'; import useRouter from 'sentry/utils/useRouter'; export type OptionControlProps = { @@ -73,12 +76,15 @@ export function PlatformOptionDropdown({ } return ( - <OptionControl - key="platformOption" - option={platforms} - value={urlOptionValues.siblingOption ?? platforms.items[0]?.label} - onChange={v => handleChange('siblingOption', v.value)} - disabled={disabled} - /> + <Fragment> + {t('with')} + <OptionControl + key="platformOption" + option={platforms} + value={urlOptionValues.siblingOption ?? platforms.items[0]?.label} + onChange={v => handleChange('siblingOption', v.value)} + disabled={disabled} + /> + </Fragment> ); } diff --git a/static/app/components/replaysOnboarding/sidebar.tsx b/static/app/components/replaysOnboarding/sidebar.tsx index aece2b7ef1e18f..f2c6054f997d19 100644 --- a/static/app/components/replaysOnboarding/sidebar.tsx +++ b/static/app/components/replaysOnboarding/sidebar.tsx @@ -259,15 +259,12 @@ function OnboardingContent({ /> ), })} - {jsFrameworkDocs?.platformOptions && - tct('with [optionSelect]', { - optionSelect: ( - <PlatformOptionDropdown - platformOptions={jsFrameworkDocs?.platformOptions} - disabled={setupMode() === 'jsLoader'} - /> - ), - })} + {jsFrameworkDocs?.platformOptions && ( + <PlatformOptionDropdown + platformOptions={jsFrameworkDocs?.platformOptions} + disabled={setupMode() === 'jsLoader'} + /> + )} </PlatformSelect> ) : ( t('I use NPM or Yarn') diff --git a/static/app/components/repositoryEditForm.tsx b/static/app/components/repositoryEditForm.tsx index eb963bd373a583..a7b869e704c820 100644 --- a/static/app/components/repositoryEditForm.tsx +++ b/static/app/components/repositoryEditForm.tsx @@ -58,7 +58,7 @@ function RepositoryEditForm({ 'Changing the [name:repo name] may have consequences if it no longer matches the repo name used when [link:sending commits with releases].', { link: ( - <ExternalLink href="https://docs.sentry.io/product/cli/releases/#sentry-cli-commit-integration" /> + <ExternalLink href="https://docs.sentry.io/cli/releases/#sentry-cli-commit-integration" /> ), name: <strong>repo name</strong>, } diff --git a/static/app/components/searchQueryBuilder/index.tsx b/static/app/components/searchQueryBuilder/index.tsx index b21078ab190d04..ff0a27b3adf4cf 100644 --- a/static/app/components/searchQueryBuilder/index.tsx +++ b/static/app/components/searchQueryBuilder/index.tsx @@ -82,6 +82,11 @@ export interface SearchQueryBuilderProps { * Sections and filter keys are displayed in the order they are provided. */ filterKeySections?: FilterKeySection[]; + /** + * A function that returns a warning message for a given filter key + * will only render a warning if the value is truthy + */ + getFilterTokenWarning?: (key: string) => React.ReactNode; /** * Allows for customization of the invalid token messages. */ @@ -195,6 +200,7 @@ export function SearchQueryBuilder({ searchSource, showUnsubmittedIndicator, trailingItems, + getFilterTokenWarning, }: SearchQueryBuilderProps) { const wrapperRef = useRef<HTMLDivElement>(null); const actionBarRef = useRef<HTMLDivElement>(null); @@ -207,6 +213,7 @@ export function SearchQueryBuilder({ const parsedQuery = useMemo( () => parseQueryBuilderValue(state.query, fieldDefinitionGetter, { + getFilterTokenWarning, disallowFreeText, disallowLogicalOperators, disallowUnsupportedFilters, @@ -223,6 +230,7 @@ export function SearchQueryBuilder({ disallowWildcard, filterKeys, invalidMessages, + getFilterTokenWarning, ] ); diff --git a/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx b/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx index 09e30637eb7013..af001328692cca 100644 --- a/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx +++ b/static/app/components/searchQueryBuilder/tokens/filter/filter.tsx @@ -205,11 +205,13 @@ export function SearchQueryBuilderFilter({item, state, token}: SearchQueryTokenP }); const tokenHasError = 'invalid' in token && defined(token.invalid); + const tokenHasWarning = 'warning' in token && defined(token.warning); return ( <FilterWrapper aria-label={token.text} aria-invalid={tokenHasError} + state={tokenHasError ? 'invalid' : tokenHasWarning ? 'warning' : 'valid'} ref={ref} {...modifiedRowProps} > @@ -265,7 +267,7 @@ export function SearchQueryBuilderFilter({item, state, token}: SearchQueryTokenP ); } -const FilterWrapper = styled('div')` +const FilterWrapper = styled('div')<{state: 'invalid' | 'warning' | 'valid'}>` position: relative; border: 1px solid ${p => p.theme.innerBorder}; border-radius: ${p => p.theme.borderRadius}; @@ -278,10 +280,18 @@ const FilterWrapper = styled('div')` outline: none; } - &[aria-invalid='true'] { - border-color: ${p => p.theme.red200}; - background-color: ${p => p.theme.red100}; - } + ${p => + p.state === 'invalid' + ? ` + border-color: ${p.theme.red200}; + background-color: ${p.theme.red100}; + ` + : p.state === 'warning' + ? ` + border-color: ${p.theme.gray300}; + background-color: ${p.theme.gray100}; + ` + : ''} &[aria-selected='true'] { background-color: ${p => p.theme.gray100}; diff --git a/static/app/components/searchQueryBuilder/tokens/invalidTokenTooltip.tsx b/static/app/components/searchQueryBuilder/tokens/invalidTokenTooltip.tsx index 219a92ef15dba2..ef673eebe8884f 100644 --- a/static/app/components/searchQueryBuilder/tokens/invalidTokenTooltip.tsx +++ b/static/app/components/searchQueryBuilder/tokens/invalidTokenTooltip.tsx @@ -17,13 +17,15 @@ interface InvalidTokenTooltipProps extends Omit<TooltipProps, 'title'> { function getForceVisible({ isFocused, isInvalid, + hasWarning, forceVisible, }: { + hasWarning: boolean; isFocused: boolean; isInvalid: boolean; forceVisible?: boolean; }) { - if (!isInvalid) { + if (!isInvalid && !hasWarning) { return false; } @@ -43,6 +45,9 @@ export function InvalidTokenTooltip({ ...tooltipProps }: InvalidTokenTooltipProps) { const invalid = 'invalid' in token ? token.invalid : null; + const warning = 'warning' in token ? token.warning : null; + + const hasWarning = Boolean(warning); const isInvalid = Boolean(invalid); const isFocused = state.selectionManager.isFocused && state.selectionManager.focusedKey === item.key; @@ -50,9 +55,9 @@ export function InvalidTokenTooltip({ return ( <Tooltip skipWrapper - forceVisible={getForceVisible({isFocused, isInvalid, forceVisible})} + forceVisible={getForceVisible({isFocused, isInvalid, hasWarning, forceVisible})} position="bottom" - title={invalid?.reason ?? t('This token is invalid')} + title={invalid?.reason ?? warning ?? t('This token is invalid')} {...tooltipProps} > {children} diff --git a/static/app/components/searchQueryBuilder/utils.tsx b/static/app/components/searchQueryBuilder/utils.tsx index fa4a29f501b8e1..740705c5cc9021 100644 --- a/static/app/components/searchQueryBuilder/utils.tsx +++ b/static/app/components/searchQueryBuilder/utils.tsx @@ -72,6 +72,7 @@ export function parseQueryBuilderValue( disallowLogicalOperators?: boolean; disallowUnsupportedFilters?: boolean; disallowWildcard?: boolean; + getFilterTokenWarning?: (key: string) => React.ReactNode; invalidMessages?: SearchConfig['invalidMessages']; } ): ParseResult | null { @@ -79,6 +80,7 @@ export function parseQueryBuilderValue( parseSearch(value || ' ', { flattenParenGroups: true, disallowFreeText: options?.disallowFreeText, + getFilterTokenWarning: options?.getFilterTokenWarning, validateKeys: options?.disallowUnsupportedFilters, disallowWildcard: options?.disallowWildcard, disallowedLogicalOperators: options?.disallowLogicalOperators diff --git a/static/app/components/searchSyntax/parser.tsx b/static/app/components/searchSyntax/parser.tsx index 9d524fca7052aa..92c2970364247f 100644 --- a/static/app/components/searchSyntax/parser.tsx +++ b/static/app/components/searchSyntax/parser.tsx @@ -770,9 +770,12 @@ export class TokenConverter { * Checks a filter against some non-grammar validation rules */ checkFilterWarning = <T extends FilterType>(key: FilterMap[T]['key']) => { - if (![Token.KEY_SIMPLE, Token.KEY_EXPLICIT_TAG].includes(key.type)) { + if ( + ![Token.KEY_SIMPLE, Token.KEY_EXPLICIT_TAG, Token.KEY_AGGREGATE].includes(key.type) + ) { return null; } + const keyName = getKeyName( key as TokenResult<Token.KEY_SIMPLE | Token.KEY_EXPLICIT_TAG> ); diff --git a/static/app/components/sentryAppComponentIcon.spec.tsx b/static/app/components/sentryAppComponentIcon.spec.tsx new file mode 100644 index 00000000000000..824353b806cc07 --- /dev/null +++ b/static/app/components/sentryAppComponentIcon.spec.tsx @@ -0,0 +1,24 @@ +import {SentryAppComponentFixture} from 'sentry-fixture/sentryAppComponent'; + +import {sentryAppComponentIsDisabled} from 'sentry/components/sentryAppComponentIcon'; + +describe('SentryAppComponentIcon', function () { + it('sentryAppComponentIsDisabled returns false if the error is a non empty string', () => { + const component = SentryAppComponentFixture(); + component.error = 'RIP couldnt connect to sentry :C'; + expect(sentryAppComponentIsDisabled(component)).toBe(false); + }); + + it('sentryAppComponentIsDisabled returns true if the error is an empty string', () => { + const component = SentryAppComponentFixture(); + component.error = ''; + expect(sentryAppComponentIsDisabled(component)).toBe(true); + }); + + // TODO: Delete after new errors are deployed + it('sentryAppComponentIsDisabled returns itself if the error is a boolean', () => { + const component = SentryAppComponentFixture(); + component.error = true; + expect(sentryAppComponentIsDisabled(component)).toBe(true); + }); +}); diff --git a/static/app/components/sentryAppComponentIcon.tsx b/static/app/components/sentryAppComponentIcon.tsx index afda2cfa354ef8..b1e44f250a9dea 100644 --- a/static/app/components/sentryAppComponentIcon.tsx +++ b/static/app/components/sentryAppComponentIcon.tsx @@ -18,7 +18,8 @@ function SentryAppComponentIcon({sentryAppComponent, size = 20}: Props) { ({color}) => color === false ); const isDefault = selectedAvatar?.avatarType !== 'upload'; - const isDisabled = sentryAppComponent.error; + const isDisabled = sentryAppComponentIsDisabled(sentryAppComponent); + return ( <SentryAppAvatarWrapper isDark={ConfigStore.get('theme') === 'dark'} @@ -34,6 +35,11 @@ function SentryAppComponentIcon({sentryAppComponent, size = 20}: Props) { ); } +// Patch for backwards compatibility as the change's truth table is inverse to the previous' +export const sentryAppComponentIsDisabled = (component: SentryAppComponent) => { + return typeof component.error === 'boolean' ? component.error : !component.error; +}; + export default SentryAppComponentIcon; const SentryAppAvatarWrapper = styled('span')<{ diff --git a/static/app/components/sidebar/index.spec.tsx b/static/app/components/sidebar/index.spec.tsx index 58344913d66fef..ba4e9463a90470 100644 --- a/static/app/components/sidebar/index.spec.tsx +++ b/static/app/components/sidebar/index.spec.tsx @@ -11,6 +11,7 @@ import {logout} from 'sentry/actionCreators/account'; import {OnboardingContextProvider} from 'sentry/components/onboarding/onboardingContext'; import SidebarContainer from 'sentry/components/sidebar'; import ConfigStore from 'sentry/stores/configStore'; +import PreferenceStore from 'sentry/stores/preferencesStore'; import type {Organization} from 'sentry/types/organization'; import type {StatuspageIncident} from 'sentry/types/system'; import localStorage from 'sentry/utils/localStorage'; @@ -368,7 +369,7 @@ describe('Sidebar', function () { }); const links = screen.getAllByRole('link'); - expect(links).toHaveLength(31); + expect(links).toHaveLength(25); [ 'Issues', @@ -380,16 +381,10 @@ describe('Sidebar', function () { 'Replays', 'Discover', /Insights/, - 'Requests', - 'Queries', - 'Assets', - 'App Starts', - 'Screen Loads', - 'Web Vitals', - /Caches/, - /Queues/, - /Mobile UI/, - /LLM Monitoring/, + 'Frontend', + 'Backend', + 'Mobile', + 'AI', 'Performance', 'User Feedback', 'Crons', @@ -406,36 +401,117 @@ describe('Sidebar', function () { }); }); - it('mobile screens module hides all other mobile modules', async function () { - localStorage.setItem('sidebar-accordion-insights:expanded', 'true'); - renderSidebarWithFeatures([ - 'insights-entry-points', - 'starfish-mobile-ui-module', - 'insights-mobile-screens-module', - ]); + it('should not render floating accordion when expanded', async () => { + renderSidebarWithFeatures(ALL_AVAILABLE_FEATURES); + await userEvent.click( + screen.getByTestId('sidebar-accordion-insights-domains-item') + ); + expect(screen.queryByTestId('floating-accordion')).not.toBeInTheDocument(); + }); - await waitFor(function () { - expect(apiMocks.broadcasts).toHaveBeenCalled(); + it('should render floating accordion when collapsed', async () => { + renderSidebarWithFeatures(ALL_AVAILABLE_FEATURES); + await userEvent.click(screen.getByTestId('sidebar-collapse')); + await userEvent.click( + screen.getByTestId('sidebar-accordion-insights-domains-item') + ); + expect(await screen.findByTestId('floating-accordion')).toBeInTheDocument(); + }); + }); + + describe('Rollback prompts', () => { + beforeEach(() => { + PreferenceStore.showSidebar(); + }); + + it('should render the sidebar banner with no dismissed prompts and an existing rollback', async () => { + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/prompts-activity/`, + body: {data: null}, }); - ['App Starts', 'Screen Loads', /Mobile UI/].forEach(title => { - expect(screen.queryByText(title)).not.toBeInTheDocument(); + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/user-rollback/`, + body: {data: null}, }); - expect(screen.getByText(/Mobile Screens/)).toBeInTheDocument(); + renderSidebarWithFeatures(['sentry-rollback-2024']); + + expect(await screen.findByText(/Your 2024 Rollback/)).toBeInTheDocument(); }); - it('should not render floating accordion when expanded', async () => { - renderSidebarWithFeatures(ALL_AVAILABLE_FEATURES); - await userEvent.click(screen.getByTestId('sidebar-accordion-insights-item')); - expect(screen.queryByTestId('floating-accordion')).not.toBeInTheDocument(); + it('will not render anything if the user does not have a rollback', async () => { + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/prompts-activity/`, + body: {data: null}, + }); + + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/user-rollback/`, + statusCode: 404, + }); + + renderSidebarWithFeatures(['sentry-rollback-2024']); + + await screen.findByText('OS'); + + await waitFor(() => { + expect(screen.queryByText(/Your 2024 Rollback/)).not.toBeInTheDocument(); + }); }); - it('should render floating accordion when collapsed', async () => { - renderSidebarWithFeatures(ALL_AVAILABLE_FEATURES); + it('will not render sidebar banner when collapsed', async () => { + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/prompts-activity/`, + body: {data: null}, + }); + + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/user-rollback/`, + body: {data: null}, + }); + + renderSidebarWithFeatures(['sentry-rollback-2024']); + await userEvent.click(screen.getByTestId('sidebar-collapse')); - await userEvent.click(screen.getByTestId('sidebar-accordion-insights-item')); - expect(await screen.findByTestId('floating-accordion')).toBeInTheDocument(); + + await waitFor(() => { + expect(screen.queryByText(/Your 2024 Rollback/)).not.toBeInTheDocument(); + }); + }); + + it('should show dot on org dropdown after dismissing sidebar banner', async () => { + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/prompts-activity/`, + body: {data: null}, + }); + + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/user-rollback/`, + body: {data: null}, + }); + + const dismissMock = MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/prompts-activity/`, + method: 'PUT', + body: {}, + }); + + renderSidebarWithFeatures(['sentry-rollback-2024']); + + await userEvent.click(await screen.findByRole('button', {name: /Dismiss/})); + + expect(await screen.findByTestId('rollback-notification-dot')).toBeInTheDocument(); + expect(screen.queryByText(/Your 2024 Rollback/)).not.toBeInTheDocument(); + expect(dismissMock).toHaveBeenCalled(); + + // Opening the org dropdown will remove the dot + await userEvent.click(screen.getByTestId('sidebar-dropdown')); + await waitFor(() => { + expect(screen.queryByTestId('rollback-notification-dot')).not.toBeInTheDocument(); + }); + + expect(dismissMock).toHaveBeenCalledTimes(2); }); }); }); diff --git a/static/app/components/sidebar/index.tsx b/static/app/components/sidebar/index.tsx index 06a3368b62f1c0..bae6b5408a7e31 100644 --- a/static/app/components/sidebar/index.tsx +++ b/static/app/components/sidebar/index.tsx @@ -6,6 +6,7 @@ import {hideSidebar, showSidebar} from 'sentry/actionCreators/preferences'; import Feature from 'sentry/components/acl/feature'; import GuideAnchor from 'sentry/components/assistant/guideAnchor'; import {Chevron} from 'sentry/components/chevron'; +import FeatureFlagOnboardingSidebar from 'sentry/components/events/featureFlags/featureFlagOnboardingSidebar'; import FeedbackOnboardingSidebar from 'sentry/components/feedback/feedbackOnboarding/sidebar'; import Hook from 'sentry/components/hook'; import {OnboardingContext} from 'sentry/components/onboarding/onboardingContext'; @@ -18,6 +19,7 @@ import { ExpandedContextProvider, } from 'sentry/components/sidebar/expandedContextProvider'; import {NewOnboardingStatus} from 'sentry/components/sidebar/newOnboardingStatus'; +import {DismissableRollbackBanner} from 'sentry/components/sidebar/rollback/dismissableBanner'; import {isDone} from 'sentry/components/sidebar/utils'; import { IconDashboard, @@ -53,8 +55,6 @@ import {useLocation} from 'sentry/utils/useLocation'; import useMedia from 'sentry/utils/useMedia'; import useOrganization from 'sentry/utils/useOrganization'; import useProjects from 'sentry/utils/useProjects'; -import {useModuleURLBuilder} from 'sentry/views/insights/common/utils/useModuleURL'; -import {MODULE_SIDEBAR_TITLE as HTTP_MODULE_SIDEBAR_TITLE} from 'sentry/views/insights/http/settings'; import { AI_LANDING_SUB_PATH, AI_SIDEBAR_LABEL, @@ -75,7 +75,6 @@ import { DOMAIN_VIEW_BASE_TITLE, DOMAIN_VIEW_BASE_URL, } from 'sentry/views/insights/pages/settings'; -import {MODULE_TITLES} from 'sentry/views/insights/settings'; import MetricsOnboardingSidebar from 'sentry/views/metrics/ddmOnboarding/sidebar'; import {getPerformanceBaseUrl} from 'sentry/views/performance/utils'; @@ -143,7 +142,6 @@ function Sidebar() { const hasNewNav = organization?.features.includes('navigation-sidebar-v2'); const hasOrganization = !!organization; const isSelfHostedErrorsOnly = ConfigStore.get('isSelfHostedErrorsOnly'); - const hasPerfDomainViews = organization?.features.includes('insights-domain-view'); const collapsed = hasNewNav ? true : !!preferences.collapsed; const horizontal = useMedia(`(max-width: ${theme.breakpoints.medium})`); @@ -270,156 +268,6 @@ function Sidebar() { </Feature> ); - const moduleURLBuilder = useModuleURLBuilder(true); - - const queries = hasOrganization && ( - <Feature key="db" features="insights-entry-points" organization={organization}> - <SidebarItem - {...sidebarItemProps} - label={ - <GuideAnchor target="performance-database">{MODULE_TITLES.db}</GuideAnchor> - } - to={`/organizations/${organization.slug}/${moduleURLBuilder('db')}/`} - id="performance-database" - icon={<SubitemDot collapsed />} - /> - </Feature> - ); - - const requests = hasOrganization && ( - <Feature key="http" features="insights-entry-points" organization={organization}> - <SidebarItem - {...sidebarItemProps} - label={ - <GuideAnchor target="performance-http">{HTTP_MODULE_SIDEBAR_TITLE}</GuideAnchor> - } - to={`/organizations/${organization.slug}/${moduleURLBuilder('http')}/`} - id="performance-http" - icon={<SubitemDot collapsed />} - /> - </Feature> - ); - - const caches = hasOrganization && ( - <Feature key="cache" features="insights-entry-points" organization={organization}> - <SidebarItem - {...sidebarItemProps} - label={ - <GuideAnchor target="performance-cache">{MODULE_TITLES.cache}</GuideAnchor> - } - to={`/organizations/${organization.slug}/${moduleURLBuilder('cache')}/`} - id="performance-cache" - icon={<SubitemDot collapsed />} - /> - </Feature> - ); - - const webVitals = hasOrganization && ( - <Feature key="vital" features="insights-entry-points" organization={organization}> - <SidebarItem - {...sidebarItemProps} - label={ - <GuideAnchor target="performance-webvitals">{MODULE_TITLES.vital}</GuideAnchor> - } - to={`/organizations/${organization.slug}/${moduleURLBuilder('vital')}/`} - id="performance-webvitals" - icon={<SubitemDot collapsed />} - /> - </Feature> - ); - - const queues = hasOrganization && ( - <Feature key="queue" features="insights-entry-points" organization={organization}> - <SidebarItem - {...sidebarItemProps} - label={ - <GuideAnchor target="performance-queues">{MODULE_TITLES.queue}</GuideAnchor> - } - to={`/organizations/${organization.slug}/${moduleURLBuilder('queue')}/`} - id="performance-queues" - icon={<SubitemDot collapsed />} - /> - </Feature> - ); - - // the mobile screens module is meant to be as a replacement for screen load, app start, and mobile ui - // so if mobile screens is enabled, we should not show the other mobile modules - const hasMobileScreensModule = - hasOrganization && organization.features.includes('insights-mobile-screens-module'); - - const screenLoads = hasOrganization && !hasMobileScreensModule && ( - <Feature - key="screen_load" - features="insights-entry-points" - organization={organization} - > - <SidebarItem - {...sidebarItemProps} - label={MODULE_TITLES.screen_load} - to={`/organizations/${organization.slug}/${moduleURLBuilder('screen_load')}/`} - id="performance-mobile-screens" - icon={<SubitemDot collapsed />} - /> - </Feature> - ); - - const appStarts = hasOrganization && !hasMobileScreensModule && ( - <Feature key="app_start" features="insights-entry-points" organization={organization}> - <SidebarItem - {...sidebarItemProps} - label={MODULE_TITLES.app_start} - to={`/organizations/${organization.slug}/${moduleURLBuilder('app_start')}/`} - id="performance-mobile-app-startup" - icon={<SubitemDot collapsed />} - /> - </Feature> - ); - - const mobileUI = hasOrganization && !hasMobileScreensModule && ( - <Feature - key="mobile-ui" - features={['insights-entry-points', 'starfish-mobile-ui-module']} - organization={organization} - > - <SidebarItem - {...sidebarItemProps} - label={MODULE_TITLES['mobile-ui']} - to={`/organizations/${organization.slug}/${moduleURLBuilder('mobile-ui')}/`} - id="performance-mobile-ui" - icon={<SubitemDot collapsed />} - isAlpha - /> - </Feature> - ); - - const mobileScreens = hasOrganization && hasMobileScreensModule && ( - <Feature - key="mobile-screens" - features={['insights-entry-points']} - organization={organization} - > - <SidebarItem - {...sidebarItemProps} - label={MODULE_TITLES['mobile-screens']} - to={`/organizations/${organization.slug}/${moduleURLBuilder('mobile-screens')}/`} - id="performance-mobile-screens" - icon={<SubitemDot collapsed />} - /> - </Feature> - ); - - const resources = hasOrganization && ( - <Feature key="resource" features="insights-entry-points"> - <SidebarItem - {...sidebarItemProps} - label={<GuideAnchor target="starfish">{MODULE_TITLES.resource}</GuideAnchor>} - to={`/organizations/${organization.slug}/${moduleURLBuilder('resource')}/`} - id="performance-browser-resources" - icon={<SubitemDot collapsed />} - /> - </Feature> - ); - const traces = hasOrganization && ( <Feature features="performance-trace-explorer"> <SidebarItem @@ -433,18 +281,6 @@ function Sidebar() { </Feature> ); - const llmMonitoring = hasOrganization && ( - <Feature features={['insights-entry-points']} organization={organization}> - <SidebarItem - {...sidebarItemProps} - icon={<SubitemDot collapsed />} - label={MODULE_TITLES.ai} - to={`/organizations/${organization.slug}/${moduleURLBuilder('ai')}/`} - id="llm-monitoring" - /> - </Feature> - ); - const performance = hasOrganization && ( <Feature hookName="feature-disabled:performance-sidebar-item" @@ -611,10 +447,7 @@ function Sidebar() { ); const performanceDomains = hasOrganization && ( - <Feature - features={['insights-domain-view', 'performance-view']} - organization={organization} - > + <Feature features={['performance-view']} organization={organization}> <SidebarAccordion {...sidebarItemProps} icon={<IconGraph />} @@ -655,31 +488,6 @@ function Sidebar() { </Feature> ); - const insights = hasOrganization && !hasPerfDomainViews && ( - <Feature key="insights" features="insights-entry-points" organization={organization}> - <SidebarAccordion - {...sidebarItemProps} - icon={<IconGraph />} - label={<GuideAnchor target="insights">{t('Insights')}</GuideAnchor>} - id="insights" - initiallyExpanded={false} - exact={!shouldAccordionFloat} - > - {requests} - {queries} - {resources} - {appStarts} - {screenLoads} - {webVitals} - {caches} - {queues} - {mobileUI} - {mobileScreens} - {llmMonitoring} - </SidebarAccordion> - </Feature> - ); - // Sidebar accordion includes a secondary list of nav items // TODO: replace with a secondary panel const explore = ( @@ -720,6 +528,13 @@ function Sidebar() { )} </DropdownSidebarSection> + {organization ? ( + <DismissableRollbackBanner + organization={organization} + collapsed={collapsed || horizontal} + /> + ) : null} + <PrimaryItems> {hasOrganization && ( <Fragment> @@ -732,7 +547,6 @@ function Sidebar() { <Fragment> <SidebarSection hasNewNav={hasNewNav}> {explore} - {insights} {performanceDomains} </SidebarSection> @@ -789,6 +603,12 @@ function Sidebar() { hidePanel={hidePanel} {...sidebarItemProps} /> + <FeatureFlagOnboardingSidebar + currentPanel={activePanel} + onShowPanel={() => togglePanel(SidebarPanelKey.FEATURE_FLAG_ONBOARDING)} + hidePanel={hidePanel} + {...sidebarItemProps} + /> <ProfilingOnboardingSidebar currentPanel={activePanel} onShowPanel={() => togglePanel(SidebarPanelKey.PROFILING_ONBOARDING)} diff --git a/static/app/components/sidebar/newOnboardingStatus.tsx b/static/app/components/sidebar/newOnboardingStatus.tsx index 62b0b659bfef62..2cb24e6e1a6880 100644 --- a/static/app/components/sidebar/newOnboardingStatus.tsx +++ b/static/app/components/sidebar/newOnboardingStatus.tsx @@ -73,12 +73,11 @@ export function NewOnboardingStatus({ }); const label = walkthrough ? t('Guided Tours') : t('Onboarding'); - const totalRemainingTasks = allTasks.length - doneTasks.length; const pendingCompletionSeen = doneTasks.length !== completeTasks.length; + const allTasksCompleted = allTasks.length === completeTasks.length; const skipQuickStart = - !organization.features?.includes('onboarding') || - (completeTasks.length === allTasks.length && !isActive); + !organization.features?.includes('onboarding') || (allTasksCompleted && !isActive); const unseenDoneTasks = useMemo( () => @@ -111,7 +110,7 @@ export function NewOnboardingStatus({ }, [onShowPanel, isActive, walkthrough, markDoneTaskAsComplete, organization]); useEffect(() => { - if (totalRemainingTasks !== 0 || skipQuickStart || quickStartCompleted) { + if (!allTasksCompleted || skipQuickStart || quickStartCompleted) { return; } @@ -123,11 +122,11 @@ export function NewOnboardingStatus({ setQuickStartCompleted(true); }, [ - totalRemainingTasks, organization, skipQuickStart, quickStartCompleted, setQuickStartCompleted, + allTasksCompleted, ]); useEffect(() => { diff --git a/static/app/components/sidebar/rollback/banner.tsx b/static/app/components/sidebar/rollback/banner.tsx new file mode 100644 index 00000000000000..6ef90af4f6c51e --- /dev/null +++ b/static/app/components/sidebar/rollback/banner.tsx @@ -0,0 +1,97 @@ +import styled from '@emotion/styled'; + +import {Button, LinkButton} from 'sentry/components/button'; +import Panel from 'sentry/components/panels/panel'; +import {IconClose, IconOpen} from 'sentry/icons'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import type {Organization} from 'sentry/types/organization'; + +type RollbackBannerProps = { + organization: Organization; + className?: string; + handleDismiss?: () => void; +}; + +export function RollbackBanner({ + className, + handleDismiss, + organization, +}: RollbackBannerProps) { + return ( + <StyledPanel className={className}> + <Title>🥳 {t('Your 2024 Rollback')} + + {t("See what you did (and didn't do) with %s this year.", organization.name)} + + } + priority="primary" + size="xs" + analyticsEventKey="rollback.sidebar_view_clicked" + analyticsEventName="Rollback: Sidebar View Clicked" + > + {t('View My Rollback')} + + {handleDismiss ? ( + } + aria-label={t('Dismiss')} + onClick={handleDismiss} + size="xs" + borderless + analyticsEventKey="rollback.sidebar_dismiss_clicked" + analyticsEventName="Rollback: Sidebar Dismiss Clicked" + /> + ) : null} + + ); +} + +const StyledPanel = styled(Panel)` + position: relative; + background: linear-gradient( + 269.35deg, + ${p => p.theme.backgroundTertiary} 0.32%, + rgba(245, 243, 247, 0) 99.69% + ); + padding: ${space(1)}; + margin: ${space(1)}; + color: ${p => p.theme.textColor}; +`; + +const Title = styled('p')` + font-size: ${p => p.theme.fontSizeSmall}; + font-weight: ${p => p.theme.fontWeightBold}; + margin: 0; +`; + +const Description = styled('p')` + font-size: ${p => p.theme.fontSizeSmall}; + margin: ${space(0.5)} 0; +`; + +const RollbackButton = styled(LinkButton)` + background: #ff45a8; + border-color: #ff45a8; + margin: 0 auto; + width: 100%; + + &:hover { + border-color: #ff45a8; + } +`; + +const DismissButton = styled(Button)` + position: absolute; + top: 0; + right: 0; + + color: currentColor; + + &:hover { + color: currentColor; + } +`; diff --git a/static/app/components/sidebar/rollback/dismissableBanner.tsx b/static/app/components/sidebar/rollback/dismissableBanner.tsx new file mode 100644 index 00000000000000..3aaf0a4eef5971 --- /dev/null +++ b/static/app/components/sidebar/rollback/dismissableBanner.tsx @@ -0,0 +1,50 @@ +import styled from '@emotion/styled'; + +import {RollbackBanner} from 'sentry/components/sidebar/rollback/banner'; +import {useRollbackPrompts} from 'sentry/components/sidebar/rollback/useRollbackPrompts'; +import ConfigStore from 'sentry/stores/configStore'; +import {useLegacyStore} from 'sentry/stores/useLegacyStore'; +import {space} from 'sentry/styles/space'; +import type {Organization} from 'sentry/types/organization'; + +type DismissableRollbackBannerProps = {collapsed: boolean; organization: Organization}; + +export function DismissableRollbackBanner({ + collapsed, + organization, +}: DismissableRollbackBannerProps) { + const config = useLegacyStore(ConfigStore); + + const isDarkMode = config.theme === 'dark'; + + const {shouldShowSidebarBanner, onDismissSidebarBanner} = useRollbackPrompts({ + collapsed, + organization, + }); + + if (!shouldShowSidebarBanner || !organization) { + return null; + } + + return ( + + + + ); +} + +const Wrapper = styled('div')` + padding: 0 ${space(1)}; +`; + +const TranslucentBackgroundBanner = styled(RollbackBanner)<{isDarkMode: boolean}>` + position: relative; + background: rgba(245, 243, 247, ${p => (p.isDarkMode ? 0.05 : 0.1)}); + border: 1px solid rgba(245, 243, 247, ${p => (p.isDarkMode ? 0.1 : 0.15)}); + color: ${p => (p.isDarkMode ? p.theme.textColor : '#ebe6ef')}; + margin: ${space(0.5)} ${space(1)}; +`; diff --git a/static/app/components/sidebar/rollback/notificationDot.tsx b/static/app/components/sidebar/rollback/notificationDot.tsx new file mode 100644 index 00000000000000..d24af3f84d60f9 --- /dev/null +++ b/static/app/components/sidebar/rollback/notificationDot.tsx @@ -0,0 +1,15 @@ +import styled from '@emotion/styled'; + +export function RollbackNotificationDot() { + return ; +} + +const Dot = styled('div')` + width: 11px; + height: 11px; + border-radius: 50%; + position: absolute; + background-color: #ff45a8; + left: 25px; + top: -2px; +`; diff --git a/static/app/components/sidebar/rollback/useRollback.tsx b/static/app/components/sidebar/rollback/useRollback.tsx new file mode 100644 index 00000000000000..172dab7c616c45 --- /dev/null +++ b/static/app/components/sidebar/rollback/useRollback.tsx @@ -0,0 +1,11 @@ +import type {Organization} from 'sentry/types/organization'; +import {useApiQuery} from 'sentry/utils/queryClient'; + +export function useRollback({organization}: {organization: Organization | null}) { + return useApiQuery([`/organizations/${organization?.slug}/user-rollback/`], { + staleTime: Infinity, + retry: false, + enabled: organization?.features.includes('sentry-rollback-2024') ?? false, + retryOnMount: false, + }); +} diff --git a/static/app/components/sidebar/rollback/useRollbackPrompts.tsx b/static/app/components/sidebar/rollback/useRollbackPrompts.tsx new file mode 100644 index 00000000000000..df19cb1928fab7 --- /dev/null +++ b/static/app/components/sidebar/rollback/useRollbackPrompts.tsx @@ -0,0 +1,53 @@ +import {usePrompt} from 'sentry/actionCreators/prompts'; +import {useRollback} from 'sentry/components/sidebar/rollback/useRollback'; +import type {Organization} from 'sentry/types/organization'; + +export function useRollbackPrompts({ + collapsed, + organization, +}: { + collapsed: boolean; + organization: Organization | null; +}) { + const hasRollback = organization?.features.includes('sentry-rollback-2024') ?? false; + const {data} = useRollback({organization}); + + const { + isPromptDismissed: isSidebarPromptDismissed, + dismissPrompt: dismissSidebarPrompt, + } = usePrompt({ + feature: 'rollback_2024_sidebar', + organization, + options: {enabled: hasRollback}, + }); + + const { + isPromptDismissed: isDropdownPromptDismissed, + dismissPrompt: dismissDropdownPrompt, + } = usePrompt({ + feature: 'rollback_2024_dropdown', + organization, + options: {enabled: hasRollback}, + }); + + return { + shouldShowSidebarBanner: + hasRollback && data && !collapsed && isSidebarPromptDismissed === false, + shouldShowDropdownBanner: hasRollback && data, + shouldShowDot: + hasRollback && + data && + isDropdownPromptDismissed === false && + (collapsed || isSidebarPromptDismissed), + onOpenOrgDropdown: () => { + if ( + hasRollback && + isSidebarPromptDismissed === true && + isDropdownPromptDismissed === false + ) { + dismissDropdownPrompt(); + } + }, + onDismissSidebarBanner: dismissSidebarPrompt, + }; +} diff --git a/static/app/components/sidebar/sidebarDropdown/index.tsx b/static/app/components/sidebar/sidebarDropdown/index.tsx index 495e4a8baf1ff6..5242fc5bd47d9a 100644 --- a/static/app/components/sidebar/sidebarDropdown/index.tsx +++ b/static/app/components/sidebar/sidebarDropdown/index.tsx @@ -9,6 +9,9 @@ import DeprecatedDropdownMenu from 'sentry/components/deprecatedDropdownMenu'; import Hook from 'sentry/components/hook'; import IdBadge from 'sentry/components/idBadge'; import Link from 'sentry/components/links/link'; +import {RollbackBanner} from 'sentry/components/sidebar/rollback/banner'; +import {RollbackNotificationDot} from 'sentry/components/sidebar/rollback/notificationDot'; +import {useRollbackPrompts} from 'sentry/components/sidebar/rollback/useRollbackPrompts'; import SidebarDropdownMenu from 'sentry/components/sidebar/sidebarDropdownMenu.styled'; import SidebarMenuItem, {menuItemStyles} from 'sentry/components/sidebar/sidebarMenuItem'; import SidebarOrgSummary from 'sentry/components/sidebar/sidebarOrgSummary'; @@ -54,6 +57,13 @@ export default function SidebarDropdown({orientation, collapsed, hideOrgLinks}: const hasTeamRead = org?.access?.includes('team:read'); const canCreateOrg = ConfigStore.get('features').has('organizations:create'); + const {onOpenOrgDropdown, shouldShowDropdownBanner, shouldShowDot} = useRollbackPrompts( + { + collapsed: collapsed || orientation === 'top', + organization: org, + } + ); + function handleLogout() { logout(api); } @@ -75,7 +85,7 @@ export default function SidebarDropdown({orientation, collapsed, hideOrgLinks}: ); return ( - + {({isOpen, getRootProps, getActorProps, getMenuProps}) => ( - {avatar} + + {avatar} + {shouldShowDot ? : null} + {!collapsed && orientation !== 'top' && ( @@ -102,6 +115,9 @@ export default function SidebarDropdown({orientation, collapsed, hideOrgLinks}: {hasOrganization && ( + {org && shouldShowDropdownBanner ? ( + + ) : null} {!hideOrgLinks && ( {hasOrgRead && ( @@ -263,3 +279,7 @@ const OrgAndUserMenu = styled('div')` const StyledChevron = styled(Chevron)` transform: translateY(${space(0.25)}); `; + +const AvatarWrapper = styled('div')` + position: relative; +`; diff --git a/static/app/components/sidebar/sidebarItem.tsx b/static/app/components/sidebar/sidebarItem.tsx index 0d517daaed29f6..b03d510f79ba92 100644 --- a/static/app/components/sidebar/sidebarItem.tsx +++ b/static/app/components/sidebar/sidebarItem.tsx @@ -390,7 +390,8 @@ const getActiveStyle = ({ }; const StyledSidebarItem = styled(Link, { - shouldForwardProp: p => !['isInFloatingAccordion', 'hasNewNav', 'index'].includes(p), + shouldForwardProp: p => + !['isInFloatingAccordion', 'hasNewNav', 'index', 'organization'].includes(p), })` display: flex; color: ${p => (p.isInFloatingAccordion ? p.theme.gray400 : 'inherit')}; diff --git a/static/app/components/sidebar/types.tsx b/static/app/components/sidebar/types.tsx index 5fe8c9728688c2..91f826cda24e98 100644 --- a/static/app/components/sidebar/types.tsx +++ b/static/app/components/sidebar/types.tsx @@ -9,6 +9,7 @@ export enum SidebarPanelKey { PROFILING_ONBOARDING = 'profiling_onboarding', METRICS_ONBOARDING = 'metrics_onboarding', FEEDBACK_ONBOARDING = 'feedback_onboarding', + FEATURE_FLAG_ONBOARDING = 'flag_onboarding', } export type CommonSidebarProps = { diff --git a/static/app/components/slideOverPanel.tsx b/static/app/components/slideOverPanel.tsx index c848b99f5b51ba..e0e83419acd882 100644 --- a/static/app/components/slideOverPanel.tsx +++ b/static/app/components/slideOverPanel.tsx @@ -13,11 +13,13 @@ const PANEL_HEIGHT = '50vh'; const OPEN_STYLES = { bottom: {opacity: 1, x: 0, y: 0}, right: {opacity: 1, x: 0, y: 0}, + left: {opacity: 1, x: 0, y: 0}, }; const COLLAPSED_STYLES = { bottom: {opacity: 0, x: 0, y: PANEL_HEIGHT}, right: {opacity: 0, x: PANEL_WIDTH, y: 0}, + left: {opacity: 0, x: -200, y: 0}, }; type SlideOverPanelProps = { @@ -25,8 +27,9 @@ type SlideOverPanelProps = { collapsed: boolean; ariaLabel?: string; className?: string; + 'data-test-id'?: string; onOpen?: () => void; - slidePosition?: 'right' | 'bottom'; + slidePosition?: 'right' | 'bottom' | 'left'; transitionProps?: AnimationProps['transition']; }; @@ -34,6 +37,7 @@ export default forwardRef(SlideOverPanel); function SlideOverPanel( { + 'data-test-id': testId, ariaLabel, collapsed, children, @@ -73,6 +77,7 @@ function SlideOverPanel( aria-hidden={collapsed} aria-label={ariaLabel ?? 'slide out drawer'} className={className} + data-test-id={testId} > {children} @@ -84,7 +89,7 @@ const _SlideOverPanel = styled(motion.div, { ['initial', 'animate', 'exit', 'transition'].includes(prop) || (prop !== 'collapsed' && isPropValid(prop)), })<{ - slidePosition?: 'right' | 'bottom'; + slidePosition?: 'right' | 'bottom' | 'left'; }>` position: fixed; @@ -118,16 +123,28 @@ const _SlideOverPanel = styled(motion.div, { bottom: 0; left: 0; ` - : css` - position: fixed; - - width: ${PANEL_WIDTH}; - height: 100%; - - top: 0; - right: 0; - bottom: 0; - left: auto; - `} + : p.slidePosition === 'right' + ? css` + position: fixed; + + width: ${PANEL_WIDTH}; + height: 100%; + + top: 0; + right: 0; + bottom: 0; + left: auto; + ` + : css` + position: relative; + + width: ${PANEL_WIDTH}; + height: 100%; + + top: 0; + right: auto; + bottom: 0; + left: auto; + `} } `; diff --git a/static/app/components/stream/group.tsx b/static/app/components/stream/group.tsx index f0cbf9ebba7b28..7d897d25ea75a9 100644 --- a/static/app/components/stream/group.tsx +++ b/static/app/components/stream/group.tsx @@ -844,6 +844,7 @@ const CheckboxLabel = styled('label')<{hasNewLayout: boolean}>` `; const CountsWrapper = styled('div')` + position: relative; display: flex; flex-direction: column; `; diff --git a/static/app/components/timeRangeSelector/dateRange.tsx b/static/app/components/timeRangeSelector/dateRange.tsx index 7326d541b8a729..e84b45be46932b 100644 --- a/static/app/components/timeRangeSelector/dateRange.tsx +++ b/static/app/components/timeRangeSelector/dateRange.tsx @@ -189,7 +189,7 @@ class BaseDateRange extends Component { // Subtract additional day because we force the end date to be inclusive, // so when you pick Jan 1 the time becomes Jan 1 @ 23:59:59, // (or really, Jan 2 @ 00:00:00 - 1 second), while the start time is at 00:00 - let minDate = getStartOfPeriodAgo('days', (maxPickableDays ?? MAX_PICKABLE_DAYS) - 2); + let minDate = getStartOfPeriodAgo('days', (maxPickableDays ?? MAX_PICKABLE_DAYS) - 1); let maxDate = new Date(); diff --git a/static/app/components/tooltip.tsx b/static/app/components/tooltip.tsx index 022283ca042b54..9c0903fb5b07d4 100644 --- a/static/app/components/tooltip.tsx +++ b/static/app/components/tooltip.tsx @@ -1,5 +1,6 @@ import {createContext, Fragment, useContext, useEffect} from 'react'; import {createPortal} from 'react-dom'; +import isPropValid from '@emotion/is-prop-valid'; import type {SerializedStyles} from '@emotion/react'; import {useTheme} from '@emotion/react'; import styled from '@emotion/styled'; @@ -23,14 +24,18 @@ export const TooltipContext = createContext({container: nul interface TooltipProps extends UseHoverOverlayProps { /** - * The content to show in the tooltip popover + * The content to show in the tooltip popover. */ title: React.ReactNode; children?: React.ReactNode; /** - * Disable the tooltip display entirely + * Disable the tooltip display entirely. */ disabled?: boolean; + /** + * The max width the tooltip is allowed to grow. + */ + maxWidth?: number; /** * Additional style rules for the tooltip content. */ @@ -42,6 +47,7 @@ function Tooltip({ overlayStyle, title, disabled = false, + maxWidth, ...hoverOverlayProps }: TooltipProps) { const {container} = useContext(TooltipContext); @@ -63,6 +69,7 @@ function Tooltip({ const tooltipContent = isOpen && ( ` padding: ${space(1)} ${space(1.5)}; overflow-wrap: break-word; - max-width: 225px; + max-width: ${p => p.maxWidth ?? 225}px; color: ${p => p.theme.textColor}; font-size: ${p => p.theme.fontSizeSmall}; line-height: 1.2; diff --git a/static/app/components/versionHoverCard.spec.tsx b/static/app/components/versionHoverCard.spec.tsx new file mode 100644 index 00000000000000..2cb53e47b536f2 --- /dev/null +++ b/static/app/components/versionHoverCard.spec.tsx @@ -0,0 +1,75 @@ +import {DeployFixture} from 'sentry-fixture/deploy'; +import {OrganizationFixture} from 'sentry-fixture/organization'; +import {ProjectFixture} from 'sentry-fixture/project'; +import {ReleaseFixture} from 'sentry-fixture/release'; +import {RepositoryFixture} from 'sentry-fixture/repository'; + +import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; + +import VersionHoverCard from './versionHoverCard'; + +describe('VersionHoverCard', () => { + const organization = OrganizationFixture(); + const project = ProjectFixture(); + const release = ReleaseFixture(); + const repository = RepositoryFixture(); + const deploy = DeployFixture(); + + beforeEach(() => { + MockApiClient.clearMockResponses(); + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/repos/`, + body: [repository], + }); + MockApiClient.addMockResponse({ + url: `/projects/${organization.slug}/${project.slug}/releases/${encodeURIComponent(release.version)}/`, + body: release, + }); + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/releases/${encodeURIComponent(release.version)}/deploys/`, + body: [deploy], + }); + }); + + it('renders', async () => { + render( + +
{release.version}
+
+ ); + + expect(await screen.findByText(release.version)).toBeInTheDocument(); + await userEvent.hover(screen.getByText(release.version)); + + expect(await screen.findByText(deploy.environment)).toBeInTheDocument(); + }); + + it('renders authors without ids', async () => { + const noIdAuthorRelease = ReleaseFixture({ + authors: [ + {name: 'Test Author', email: 'test@sentry.io'}, + {name: 'Test Author 2', email: 'test2@sentry.io'}, + ], + }); + MockApiClient.addMockResponse({ + url: `/projects/${organization.slug}/${project.slug}/releases/${encodeURIComponent(noIdAuthorRelease.version)}/`, + body: noIdAuthorRelease, + }); + + render( + +
{release.version}
+
+ ); + + expect(await screen.findByText(release.version)).toBeInTheDocument(); + }); +}); diff --git a/static/app/components/versionHoverCard.tsx b/static/app/components/versionHoverCard.tsx index 4e10ef7af421a4..11b22b3171ca95 100644 --- a/static/app/components/versionHoverCard.tsx +++ b/static/app/components/versionHoverCard.tsx @@ -1,6 +1,6 @@ +import {useMemo} from 'react'; import styled from '@emotion/styled'; -import type {Client} from 'sentry/api'; import AvatarList from 'sentry/components/avatar/avatarList'; import Tag from 'sentry/components/badge/tag'; import {LinkButton} from 'sentry/components/button'; @@ -14,48 +14,50 @@ import TimeSince from 'sentry/components/timeSince'; import Version from 'sentry/components/version'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {Repository} from 'sentry/types/integrations'; +import type {Actor} from 'sentry/types/core'; import type {Organization} from 'sentry/types/organization'; -import type {Deploy, Release} from 'sentry/types/release'; +import type {User} from 'sentry/types/user'; import {defined} from 'sentry/utils'; -import withApi from 'sentry/utils/withApi'; -import withRelease from 'sentry/utils/withRelease'; -import withRepositories from 'sentry/utils/withRepositories'; +import {uniqueId} from 'sentry/utils/guid'; +import {useDeploys} from 'sentry/utils/useDeploys'; +import {useRelease} from 'sentry/utils/useRelease'; +import {useRepositories} from 'sentry/utils/useRepositories'; interface Props extends React.ComponentProps { - api: Client; organization: Organization; projectSlug: string; - releaseVersion: string; - deploys?: Array; - deploysError?: Error; - deploysLoading?: boolean; - release?: Release; - releaseError?: Error; - releaseLoading?: boolean; - repositories?: Array; - repositoriesError?: Error; - repositoriesLoading?: boolean; } function VersionHoverCard({ - api: _api, - projectSlug: _projectSlug, - deploysLoading, - deploysError, - release, - releaseLoading, - releaseError, - repositories, - repositoriesLoading, - repositoriesError, organization, - deploys, + projectSlug, releaseVersion, - children, ...hovercardProps }: Props) { + const { + data: repositories, + isPending: isRepositoriesLoading, + isError: isRepositoriesError, + } = useRepositories({orgSlug: organization.slug}); + const { + data: release, + isPending: isReleaseLoading, + isError: isReleaseError, + } = useRelease({ + orgSlug: organization.slug, + projectSlug, + releaseVersion, + }); + const { + data: deploys, + isPending: isDeploysLoading, + isError: isDeploysError, + } = useDeploys({ + orgSlug: organization.slug, + releaseVersion, + }); + function getRepoLink() { const orgSlug = organization.slug; return { @@ -76,6 +78,19 @@ function VersionHoverCard({ }; } + const authors = useMemo( + () => + release?.authors.map(author => + // Add a unique id if missing + ({ + ...author, + type: 'user', + id: 'id' in author ? author.id : uniqueId(), + }) + ), + [release?.authors] + ); + function getBody() { if (release === undefined || !defined(deploys)) { return {header: null, body: null}; @@ -105,7 +120,7 @@ function VersionHoverCard({ {release.authors.length !== 1 ? t('authors') : t('author')}{' '} 0; if (loading) { @@ -158,11 +173,7 @@ function VersionHoverCard({ body = renderObj.body; } - return ( - - {children} - - ); + return ; } interface VersionHoverHeaderProps { @@ -186,7 +197,7 @@ function VersionHoverHeader({releaseVersion}: VersionHoverHeaderProps) { ); } -export default withApi(withRelease(withRepositories(VersionHoverCard))); +export default VersionHoverCard; const ConnectRepo = styled('div')` padding: ${space(2)}; diff --git a/static/app/constants/chartPalette.tsx b/static/app/constants/chartPalette.tsx index 09715edccec76e..be738abb6bc395 100644 --- a/static/app/constants/chartPalette.tsx +++ b/static/app/constants/chartPalette.tsx @@ -8,7 +8,7 @@ export const CHART_PALETTE = [ ['#444674', '#f2b712'], ['#444674', '#d6567f', '#f2b712'], ['#444674', '#a35488', '#ef7061', '#f2b712'], - ['#444674', '#895289', '#d6567f', '#f38150', '#f2b712', '#a397f7'], + ['#444674', '#895289', '#d6567f', '#f38150', '#f2b712'], ['#444674', '#7a5088', '#b85586', '#e9626e', '#f58c46', '#f2b712'], ['#444674', '#704f87', '#a35488', '#d6567f', '#ef7061', '#f59340', '#f2b712'], [ diff --git a/static/app/constants/index.tsx b/static/app/constants/index.tsx index d06f4941e8a4bc..0d6200f2773da9 100644 --- a/static/app/constants/index.tsx +++ b/static/app/constants/index.tsx @@ -333,7 +333,7 @@ export const DATA_CATEGORY_INFO = { apiName: 'span', plural: 'spans', displayName: 'span', - titleName: t('Spans'), + titleName: t('Spans'), // TODO(DS Spans): Update name productName: t('Tracing'), uid: 12, isBilledCategory: true, @@ -348,6 +348,16 @@ export const DATA_CATEGORY_INFO = { uid: 13, isBilledCategory: true, }, + [DataCategoryExact.SPAN_INDEXED]: { + name: DataCategoryExact.SPAN_INDEXED, + apiName: 'span_indexed', + plural: 'spansIndexed', + displayName: 'stored span', + titleName: t('Stored Spans'), + productName: t('Tracing'), + uid: 16, + isBilledCategory: false, + }, [DataCategoryExact.PROFILE_DURATION]: { name: DataCategoryExact.PROFILE_DURATION, apiName: 'profile_duration', diff --git a/static/app/data/forms/organizationGeneralSettings.tsx b/static/app/data/forms/organizationGeneralSettings.tsx index d3dbe605e95786..fa724690b85200 100644 --- a/static/app/data/forms/organizationGeneralSettings.tsx +++ b/static/app/data/forms/organizationGeneralSettings.tsx @@ -43,21 +43,6 @@ const formGroups: JsonFormObject[] = [ }), visible: () => !ConfigStore.get('isSelfHostedErrorsOnly'), }, - { - name: 'aiSuggestedSolution', - type: 'boolean', - label: t('AI Suggested Solution'), - help: tct( - 'Opt-in to [link:ai suggested solution] to get AI help on how to solve an issue.', - { - link: ( - - ), - } - ), - visible: ({features}) => - !ConfigStore.get('isSelfHostedErrorsOnly') && !features.has('autofix'), - }, { name: 'uptimeAutodetection', type: 'boolean', diff --git a/static/app/data/forms/projectSecurityAndPrivacyGroups.tsx b/static/app/data/forms/projectSecurityAndPrivacyGroups.tsx index c498340ecce4a1..d9416cfc08eb67 100644 --- a/static/app/data/forms/projectSecurityAndPrivacyGroups.tsx +++ b/static/app/data/forms/projectSecurityAndPrivacyGroups.tsx @@ -1,6 +1,9 @@ +import {hasEveryAccess} from 'sentry/components/acl/access'; import type {JsonFormObject} from 'sentry/components/forms/types'; import Link from 'sentry/components/links/link'; import {t, tct} from 'sentry/locale'; +import type {Organization} from 'sentry/types/organization'; +import type {Project} from 'sentry/types/project'; import {convertMultilineFieldValue, extractMultilineFields} from 'sentry/utils'; import { formatStoreCrashReports, @@ -11,12 +14,46 @@ import { // Export route to make these forms searchable by label/help export const route = '/settings/:orgId/projects/:projectId/security-and-privacy/'; -const ORG_DISABLED_REASON = t( - "This option is enforced by your organization's settings and cannot be customized per-project." -); - // Check if a field has been set AND IS TRUTHY at the organization level. -const hasOrgOverride = ({organization, name}) => organization[name]; +const hasOrgOverride = ({ + organization, + name, +}: { + name: string; + organization: Organization; +}) => organization[name]; + +function hasProjectWriteAndOrgOverride({ + organization, + project, + name, +}: { + name: string; + organization: Organization; + project: Project; +}) { + if (hasOrgOverride({organization, name})) { + return true; + } + + return !hasEveryAccess(['project:write'], {organization, project}); +} + +function projectWriteAndOrgOverrideDisabledReason({ + organization, + name, +}: { + name: string; + organization: Organization; +}) { + if (hasOrgOverride({organization, name})) { + return t( + "This option is enforced by your organization's settings and cannot be customized per-project." + ); + } + + return null; +} const formGroups: JsonFormObject[] = [ { @@ -63,8 +100,8 @@ const formGroups: JsonFormObject[] = [ name: 'dataScrubber', type: 'boolean', label: t('Data Scrubber'), - disabled: hasOrgOverride, - disabledReason: ORG_DISABLED_REASON, + disabled: hasProjectWriteAndOrgOverride, + disabledReason: projectWriteAndOrgOverrideDisabledReason, help: t('Enable server-side data scrubbing'), 'aria-label': t('Enable server-side data scrubbing'), // `props` are the props given to FormField @@ -76,8 +113,8 @@ const formGroups: JsonFormObject[] = [ { name: 'dataScrubberDefaults', type: 'boolean', - disabled: hasOrgOverride, - disabledReason: ORG_DISABLED_REASON, + disabled: hasProjectWriteAndOrgOverride, + disabledReason: projectWriteAndOrgOverrideDisabledReason, label: t('Use Default Scrubbers'), help: t( 'Apply default scrubbers to prevent things like passwords and credit cards from being stored' @@ -94,8 +131,8 @@ const formGroups: JsonFormObject[] = [ { name: 'scrubIPAddresses', type: 'boolean', - disabled: hasOrgOverride, - disabledReason: ORG_DISABLED_REASON, + disabled: hasProjectWriteAndOrgOverride, + disabledReason: projectWriteAndOrgOverrideDisabledReason, // `props` are the props given to FormField setValue: (val, props) => props.organization?.[props.name] || val, label: t('Prevent Storing of IP Addresses'), diff --git a/static/app/data/platformCategories.tsx b/static/app/data/platformCategories.tsx index 22e25ad2ccdd88..200e4611e9fcf5 100644 --- a/static/app/data/platformCategories.tsx +++ b/static/app/data/platformCategories.tsx @@ -562,6 +562,36 @@ export const feedbackOnboardingPlatforms: readonly PlatformKey[] = [ ...feedbackCrashApiPlatforms, ]; +// Feature flag onboarding platforms +export const featureFlagOnboardingPlatforms: readonly PlatformKey[] = [ + 'javascript', + 'python', + 'javascript-angular', + 'javascript-astro', + 'javascript-ember', + 'javascript-gatsby', + 'javascript-nextjs', + 'javascript-nuxt', + 'javascript-react', + 'javascript-remix', + 'javascript-solid', + 'javascript-solidstart', + 'javascript-svelte', + 'javascript-sveltekit', + 'javascript-vue', + 'python-aiohttp', + 'python-bottle', + 'python-django', + 'python-falcon', + 'python-fastapi', + 'python-flask', + 'python-pyramid', + 'python-quart', + 'python-sanic', + 'python-starlette', + 'python-tornado', +]; + const customMetricBackendPlatforms: readonly PlatformKey[] = [ 'bun', 'dart', diff --git a/static/app/data/platformPickerCategories.tsx b/static/app/data/platformPickerCategories.tsx index 2a8e73c4111a7e..9d706d400d17ae 100644 --- a/static/app/data/platformPickerCategories.tsx +++ b/static/app/data/platformPickerCategories.tsx @@ -13,6 +13,7 @@ const popularPlatformCategories: Set = new Set([ 'javascript', 'javascript-angular', 'javascript-nextjs', + 'javascript-nuxt', 'javascript-react', 'javascript-vue', 'node', diff --git a/static/app/gettingStartedDocs/javascript/angular.tsx b/static/app/gettingStartedDocs/javascript/angular.tsx index ff202bfe24d682..69ce7090e61b91 100644 --- a/static/app/gettingStartedDocs/javascript/angular.tsx +++ b/static/app/gettingStartedDocs/javascript/angular.tsx @@ -31,6 +31,7 @@ import { getReplayConfigureDescription, getReplayVerifyStep, } from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding'; +import {featureFlagOnboarding} from 'sentry/gettingStartedDocs/javascript/javascript'; import {t, tct} from 'sentry/locale'; export enum AngularConfigType { @@ -473,6 +474,7 @@ const docs: Docs = { crashReportOnboarding, platformOptions, profilingOnboarding, + featureFlagOnboarding: featureFlagOnboarding, }; export default docs; diff --git a/static/app/gettingStartedDocs/javascript/astro.tsx b/static/app/gettingStartedDocs/javascript/astro.tsx index 1c20b807619d07..cdb2b2ae12948f 100644 --- a/static/app/gettingStartedDocs/javascript/astro.tsx +++ b/static/app/gettingStartedDocs/javascript/astro.tsx @@ -23,6 +23,7 @@ import { getReplaySDKSetupSnippet, getReplayVerifyStep, } from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding'; +import {featureFlagOnboarding} from 'sentry/gettingStartedDocs/javascript/javascript'; import {t, tct} from 'sentry/locale'; type Params = DocsParams; @@ -378,6 +379,7 @@ const docs: Docs = { replayOnboarding, customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}), crashReportOnboarding, + featureFlagOnboarding: featureFlagOnboarding, }; export default docs; diff --git a/static/app/gettingStartedDocs/javascript/ember.tsx b/static/app/gettingStartedDocs/javascript/ember.tsx index fbe114f9a4ec29..2f015dcf5a0fdb 100644 --- a/static/app/gettingStartedDocs/javascript/ember.tsx +++ b/static/app/gettingStartedDocs/javascript/ember.tsx @@ -27,6 +27,7 @@ import { getReplayConfigureDescription, getReplayVerifyStep, } from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding'; +import {featureFlagOnboarding} from 'sentry/gettingStartedDocs/javascript/javascript'; import {t, tct} from 'sentry/locale'; type Params = DocsParams; @@ -292,6 +293,7 @@ const docs: Docs = { customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}), crashReportOnboarding, profilingOnboarding, + featureFlagOnboarding: featureFlagOnboarding, }; export default docs; diff --git a/static/app/gettingStartedDocs/javascript/gatsby.tsx b/static/app/gettingStartedDocs/javascript/gatsby.tsx index 0605bd549dbf39..f09be5ea3a51c8 100644 --- a/static/app/gettingStartedDocs/javascript/gatsby.tsx +++ b/static/app/gettingStartedDocs/javascript/gatsby.tsx @@ -27,6 +27,7 @@ import { getReplayConfigureDescription, getReplayVerifyStep, } from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding'; +import {featureFlagOnboarding} from 'sentry/gettingStartedDocs/javascript/javascript'; import {t, tct} from 'sentry/locale'; type Params = DocsParams; @@ -321,6 +322,7 @@ const docs: Docs = { customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}), crashReportOnboarding, profilingOnboarding, + featureFlagOnboarding: featureFlagOnboarding, }; export default docs; diff --git a/static/app/gettingStartedDocs/javascript/javascript.tsx b/static/app/gettingStartedDocs/javascript/javascript.tsx index 7266d26701e4f0..12894d782401e0 100644 --- a/static/app/gettingStartedDocs/javascript/javascript.tsx +++ b/static/app/gettingStartedDocs/javascript/javascript.tsx @@ -1,5 +1,6 @@ import {css} from '@emotion/react'; +import {IntegrationOptions} from 'sentry/components/events/featureFlags/utils'; import ExternalLink from 'sentry/components/links/externalLink'; import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout'; import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout'; @@ -61,6 +62,35 @@ const platformOptions = { type PlatformOptions = typeof platformOptions; type Params = DocsParams; +type FlagOptions = { + importStatement: string; // feature flag SDK import + integration: string; // what's in the integrations array + sdkInit: string; // code to register with feature flag SDK +}; + +const FLAG_OPTIONS: Record = { + [IntegrationOptions.LAUNCHDARKLY]: { + importStatement: `import * as LaunchDarkly from 'launchdarkly-js-client-sdk';`, + integration: 'launchDarklyIntegration()', + sdkInit: `const ldClient = LaunchDarkly.initialize( + 'my-client-ID', + {kind: 'user', key: 'my-user-context-key'}, + {inspectors: [Sentry.buildLaunchDarklyFlagUsedHandler()]} +); + +// Evaluates a flag +const flagVal = ldClient.variation('my-flag', false);`, + }, + [IntegrationOptions.OPENFEATURE]: { + importStatement: `import { OpenFeature } from '@openfeature/web-sdk';`, + integration: 'openFeatureIntegration()', + sdkInit: `const client = OpenFeature.getClient(); +client.addHooks(new Sentry.OpenFeatureIntegrationHook()); + +// Evaluating flags will record the result on the Sentry client. +const result = client.getBooleanValue('my-flag', false);`, + }, +}; const isAutoInstall = (params: Params) => params.platformOptions.installationMode === InstallationMode.AUTO; @@ -612,6 +642,44 @@ const profilingOnboarding: OnboardingConfig = { introduction: params => , }; +export const featureFlagOnboarding: OnboardingConfig = { + install: () => [], + configure: ({featureFlagOptions = {integration: ''}, dsn}) => [ + { + type: StepType.CONFIGURE, + description: tct( + 'Add [name] to your integrations list, and then register with your feature flag SDK.', + { + name: ( + {`${FLAG_OPTIONS[featureFlagOptions.integration].integration}`} + ), + } + ), + configurations: [ + { + language: 'JavaScript', + code: ` +${FLAG_OPTIONS[featureFlagOptions.integration].importStatement} + +// Register with Sentry +Sentry.init({ + dsn: "${dsn.public}", + integrations: [ + Sentry.${FLAG_OPTIONS[featureFlagOptions.integration].integration}, + ], +}); + +// Register with your feature flag SDK +${FLAG_OPTIONS[featureFlagOptions.integration].sdkInit} +`, + }, + ], + }, + ], + verify: () => [], + nextSteps: () => [], +}; + const docs: Docs = { onboarding, feedbackOnboardingNpm: feedbackOnboarding, @@ -622,6 +690,7 @@ const docs: Docs = { crashReportOnboarding, platformOptions, profilingOnboarding, + featureFlagOnboarding, }; export default docs; diff --git a/static/app/gettingStartedDocs/javascript/nextjs.tsx b/static/app/gettingStartedDocs/javascript/nextjs.tsx index 00e24eb868a253..f66cf459579809 100644 --- a/static/app/gettingStartedDocs/javascript/nextjs.tsx +++ b/static/app/gettingStartedDocs/javascript/nextjs.tsx @@ -27,6 +27,7 @@ import { getReplayVerifyStep, } from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding'; import TextCopyInput from 'sentry/components/textCopyInput'; +import {featureFlagOnboarding} from 'sentry/gettingStartedDocs/javascript/javascript'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import {trackAnalytics} from 'sentry/utils/analytics'; @@ -424,6 +425,7 @@ const docs: Docs = { }), performanceOnboarding, crashReportOnboarding, + featureFlagOnboarding: featureFlagOnboarding, }; export default docs; diff --git a/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx b/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx index ab3a3f8578e521..c3796f6848e834 100644 --- a/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx +++ b/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx @@ -11,75 +11,33 @@ describe('javascript-nuxt onboarding docs', function () { renderWithOnboardingLayout(docs); // Renders main headings - expect(screen.getByRole('heading', {name: 'Install'})).toBeInTheDocument(); - expect(screen.getByRole('heading', {name: 'Configure SDK'})).toBeInTheDocument(); - expect(screen.getByRole('heading', {name: 'Upload Source Maps'})).toBeInTheDocument(); - expect(screen.getByRole('heading', {name: 'Verify'})).toBeInTheDocument(); - - // Includes 2 import statements - expect( - screen.getAllByText( - textWithMarkupMatcher(/import \* as Sentry from "@sentry\/nuxt"/) - ) - ).toHaveLength(2); - }); - - it('displays sample rates by default', () => { - renderWithOnboardingLayout(docs, { - selectedProducts: [ - ProductSolution.ERROR_MONITORING, - ProductSolution.PERFORMANCE_MONITORING, - ProductSolution.SESSION_REPLAY, - ], - }); - - expect(screen.getAllByText(textWithMarkupMatcher(/tracesSampleRate/))).toHaveLength( - 2 - ); // client and server expect( - screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate/)) - ).toBeInTheDocument(); // only client + screen.getByRole('heading', {name: 'Automatic Configuration (Recommended)'}) + ).toBeInTheDocument(); + // Renders main headings expect( - screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/)) - ).toBeInTheDocument(); // only client - }); - - it('enables performance setting the tracesSampleRate to 1', () => { - renderWithOnboardingLayout(docs, { - selectedProducts: [ - ProductSolution.ERROR_MONITORING, - ProductSolution.PERFORMANCE_MONITORING, - ], - }); + screen.getByRole('heading', {name: 'Manual Configuration'}) + ).toBeInTheDocument(); + // Renders main headings + expect(screen.getByRole('heading', {name: 'Verify'})).toBeInTheDocument(); + // Includes configure statement expect( - screen.getAllByText(textWithMarkupMatcher(/tracesSampleRate: 1\.0/)) - ).toHaveLength(2); + screen.getByText(textWithMarkupMatcher(/npx @sentry\/wizard@latest -i nuxt/)) + ).toBeInTheDocument(); }); - it('enables replay by setting replay samplerates', () => { + it('displays the verify instructions', () => { renderWithOnboardingLayout(docs, { selectedProducts: [ ProductSolution.ERROR_MONITORING, + ProductSolution.PERFORMANCE_MONITORING, ProductSolution.SESSION_REPLAY, ], }); expect( - screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate: 0\.1/)) - ).toBeInTheDocument(); - expect( - screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate: 1\.0/)) + screen.queryByText(textWithMarkupMatcher(/sentry-example-page/)) ).toBeInTheDocument(); }); - - it('enables profiling by setting profiling sample rates', () => { - renderWithOnboardingLayout(docs, { - selectedProducts: [ProductSolution.ERROR_MONITORING, ProductSolution.PROFILING], - }); - - expect( - screen.getAllByText(textWithMarkupMatcher(/profilesSampleRate: 1\.0/)) - ).toHaveLength(2); - }); }); diff --git a/static/app/gettingStartedDocs/javascript/nuxt.tsx b/static/app/gettingStartedDocs/javascript/nuxt.tsx index bc4a3d78b190a6..09d11fe6157697 100644 --- a/static/app/gettingStartedDocs/javascript/nuxt.tsx +++ b/static/app/gettingStartedDocs/javascript/nuxt.tsx @@ -1,8 +1,8 @@ import {Fragment} from 'react'; -import styled from '@emotion/styled'; -import Alert from 'sentry/components/alert'; import ExternalLink from 'sentry/components/links/externalLink'; +import List from 'sentry/components/list/'; +import ListItem from 'sentry/components/list/listItem'; import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout'; import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout'; import TracePropagationMessage from 'sentry/components/onboarding/gettingStartedDoc/replay/tracePropagationMessage'; @@ -17,88 +17,50 @@ import { getCrashReportModalConfigDescription, getCrashReportModalIntroduction, getFeedbackConfigureDescription, + getFeedbackSDKSetupSnippet, } from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding'; import {getJSMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding'; import {MaybeBrowserProfilingBetaWarning} from 'sentry/components/onboarding/gettingStartedDoc/utils/profilingOnboarding'; import { - getReplayConfigOptions, getReplayConfigureDescription, + getReplaySDKSetupSnippet, + getReplayVerifyStep, } from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding'; -import {t, tct} from 'sentry/locale'; +import {featureFlagOnboarding} from 'sentry/gettingStartedDocs/javascript/javascript'; +import {t, tct, tctCode} from 'sentry/locale'; type Params = DocsParams; -const getNuxtModuleSnippet = () => ` -export default defineNuxtConfig({ - modules: ["@sentry/nuxt/module"], -}); -`; +const getConfigStep = ({isSelfHosted, organization, projectSlug}: Params) => { + const urlParam = isSelfHosted ? '--saas' : '--saas'; -const getSdkClientSetupSnippet = (params: Params) => ` -import * as Sentry from "@sentry/nuxt"; - -Sentry.init({ - // If set up, you can use your runtime config here - // dsn: useRuntimeConfig().public.sentry.dsn, - dsn: "${params.dsn.public}",${ - params.isReplaySelected - ? ` - integrations: [Sentry.replayIntegration(${getReplayConfigOptions(params.replayOptions)})],` - : '' - }${ - params.isPerformanceSelected - ? ` - // Tracing - // We recommend adjusting this value in production, or using a tracesSampler for finer control. - tracesSampleRate: 1.0, // Capture 100% of the transactions - // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled - tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/],` - : '' - }${ - params.isReplaySelected - ? ` - // Session Replay - replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production. - replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.` - : '' - }${ - params.isProfilingSelected - ? ` - // Set profilesSampleRate to 1.0 to profile every transaction. - // Since profilesSampleRate is relative to tracesSampleRate, - // the final profiling rate can be computed as tracesSampleRate * profilesSampleRate - // For example, a tracesSampleRate of 0.5 and profilesSampleRate of 0.5 would - // results in 25% of transactions being profiled (0.5*0.5=0.25) - profilesSampleRate: 1.0,` - : '' - } -}); -`; - -const getSdkServerSetupSnippet = (params: Params) => ` -import * as Sentry from "@sentry/nuxt"; + return [ + { + type: StepType.INSTALL, + description: tct( + 'Configure your app automatically by running the [wizardLink:Sentry wizard] in the root of your project.', + { + wizardLink: ( + + ), + } + ), + configurations: [ + { + language: 'bash', + code: `npx @sentry/wizard@latest -i nuxt ${urlParam} --org ${organization.slug} --project ${projectSlug}`, + }, + ], + }, + ]; +}; -Sentry.init({ - dsn: "${params.dsn.public}",${ - params.isPerformanceSelected - ? ` - // Tracing - // We recommend adjusting this value in production, or using a tracesSampler for finer control. - tracesSampleRate: 1.0, // Capture 100% of the transactions` - : '' - }${ - params.isProfilingSelected - ? ` - // Set profilesSampleRate to 1.0 to profile every transaction. - // Since profilesSampleRate is relative to tracesSampleRate, - // the final profiling rate can be computed as tracesSampleRate * profilesSampleRate - // For example, a tracesSampleRate of 0.5 and profilesSampleRate of 0.5 would - // results in 25% of transactions being profiled (0.5*0.5=0.25) - profilesSampleRate: 1.0,` - : '' - } -}); -`; +const getInstallConfig = (params: Params) => [ + { + type: StepType.INSTALL, + configurations: getConfigStep(params), + }, +]; const getVerifyNuxtSnippet = () => `