Skip to content

Commit

Permalink
Run mypy in Github continuous integration checks (#2324)
Browse files Browse the repository at this point in the history
* run mypy in CI

* more type hint fixing

* Update requirements.txt

* google.cloud import workaround

python/mypy#10360

* Revert "google.cloud import workaround"

This reverts commit e03dbc7.

* mypy google.cloud workaround

python/mypy#12985

* fix regular tests

* remove --install-types flag

* last mypy fix

* type fixes for address_reasons

* move email lists creation

* update for newly merged changes

* Fix regular tests

* Changes suggested by @jrobbins

* Catch invalid requests

* small type hint

* Change methods to properly override

* revert to previous implementation

* revert test

* add back original validate request type method

* remove unused import and rearrange methods

* remove comment string casting; add test back

* add ndb ignore

* Update test_html_rendering.html

* mypy fix

* remove merge addition
  • Loading branch information
DanielRyanSmith authored Oct 24, 2022
1 parent 9a35f31 commit 49fb215
Show file tree
Hide file tree
Showing 34 changed files with 149 additions and 117 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ jobs:
- name: lint
run: npm run lint

- name: mypy
run: mypy --ignore-missing-imports --exclude cs-env/ --exclude appengine_config.py .

- name: test
run: npm test

Expand Down
2 changes: 1 addition & 1 deletion api/accounts_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

import logging

from google.cloud import ndb
from google.cloud import ndb # type: ignore

from framework import basehandlers
from framework import permissions
Expand Down
9 changes: 4 additions & 5 deletions api/comments_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
# limitations under the License.

import logging
from typing import Any, Optional
from typing import Any

from framework import basehandlers
from framework import permissions
Expand All @@ -24,7 +24,6 @@


def comment_to_json_dict(comment: Activity) -> dict[str, Any]:

return {
'comment_id': comment.key.id(),
'feature_id': comment.feature_id,
Expand Down Expand Up @@ -56,8 +55,8 @@ def do_get(self, **kwargs) -> dict[str, list[dict[str, Any]]]:
is_admin = permissions.can_admin_site(user)

# Filter deleted comments the user can't see.
comments = filter(
lambda c: self._should_show_comment(c, user.email(), is_admin), comments)
comments = list(filter(
lambda c: self._should_show_comment(c, user.email(), is_admin), comments))

dicts = [comment_to_json_dict(c) for c in comments]
return {'comments': dicts}
Expand Down Expand Up @@ -110,7 +109,7 @@ def do_post(self, **kwargs) -> dict[str, str]:

def do_patch(self, **kwargs) -> dict[str, str]:
comment_id = self.get_param('commentId', required=True)
comment: Optional[Activity] = Activity.get_by_id(comment_id)
comment: Activity = Activity.get_by_id(comment_id)

user = self.get_current_user(required=True)
if not permissions.can_admin_site(user) and (
Expand Down
4 changes: 2 additions & 2 deletions api/features_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
class FeaturesAPI(basehandlers.APIHandler):
"""Features are the the main records that we track."""

def get_one_feature(self, feature_id: int) -> dict:
def get_one_feature(self, feature_id: int) -> dict[str, Any]:
features = core_models.Feature.get_by_ids([feature_id])
if not features:
self.abort(404, msg='Feature %r not found' % feature_id)
Expand Down Expand Up @@ -64,7 +64,7 @@ def do_search(self) -> dict[str, Any]:
'features': features_on_page,
}

def do_get(self, **kwargs) -> dict:
def do_get(self, **kwargs) -> dict[str, Any]:
"""Handle GET requests for a single feature or a search."""
feature_id = kwargs.get('feature_id', None)
if feature_id:
Expand Down
8 changes: 4 additions & 4 deletions framework/basehandlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,14 @@
import logging
import os
import re
from typing import Optional

import flask
import flask.views
import werkzeug.exceptions

import google.appengine.api
from google.cloud import ndb
from google.cloud import ndb # type: ignore

import settings
from framework import csp
Expand Down Expand Up @@ -268,8 +269,8 @@ def require_signed_in_and_xsrf_token(self):

class FlaskHandler(BaseHandler):

TEMPLATE_PATH = None # Subclasses should define this.
HTTP_CACHE_TYPE = None # Subclasses can use 'public' or 'private'
TEMPLATE_PATH: Optional[str] = None # Subclasses should define this.
HTTP_CACHE_TYPE: Optional[str] = None # Subclasses can use 'public' or 'private'
JSONIFY = False # Set to True for JSON feeds.
IS_INTERNAL_HANDLER = False # Subclasses can skip XSRF check.

Expand Down Expand Up @@ -362,7 +363,6 @@ def get(self, *args, **kwargs):
location = self.request.url.replace('www.', '', 1)
logging.info('Striping www and redirecting to %r', location)
return self.redirect(location)

handler_data = self.get_template_data(*args, **kwargs)
users.refresh_user_session()

Expand Down
8 changes: 4 additions & 4 deletions framework/basehandlers_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -528,19 +528,19 @@ def test_get_headers(self):
actual)

def test_get_template_data__missing(self):
"""Every subsclass should overide get_template_data()."""
"""Every subclass should overide get_template_data()."""
self.handler = basehandlers.FlaskHandler()
with self.assertRaises(NotImplementedError):
self.handler.get_template_data()

def test_get_template_path__missing(self):
"""Subsclasses that don't define TEMPLATE_PATH trigger error."""
"""Subclasses that don't define TEMPLATE_PATH trigger error."""
self.handler = basehandlers.FlaskHandler()
with self.assertRaises(ValueError):
self.handler.get_template_path({})

def test_get_template_path__specified_in_class(self):
"""Subsclasses can define TEMPLATE_PATH."""
"""Subclasses can define TEMPLATE_PATH."""
actual = self.handler.get_template_path({})
self.assertEqual('test_template.html', actual)

Expand All @@ -551,7 +551,7 @@ def test_get_template_path__specalized_by_template_data(self):
self.assertEqual('special.html', actual)

def test_process_post_data__missing(self):
"""Subsclasses that don't override process_post_data() give a 405."""
"""Subclasses that don't override process_post_data() give a 405."""
self.handler = basehandlers.FlaskHandler()
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
self.handler.process_post_data()
Expand Down
2 changes: 1 addition & 1 deletion framework/cloud_tasks_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
if not settings.UNIT_TEST_MODE:
import grpc # See requirements.dev.txt.
from google.api_core import retry
from google.cloud import tasks
from google.cloud import tasks # type: ignore



Expand Down
3 changes: 1 addition & 2 deletions framework/csp.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import copy
import logging
import os
import six

import flask

Expand Down Expand Up @@ -101,7 +100,7 @@ def get_csp_header_key():
def build_policy(policy):
"""Builds the CSP policy string from the internal representation."""
csp_directives = [
k + ' ' + ' '.join(v) for k, v in six.iteritems(policy) if v is not None
k + ' ' + ' '.join(v) for k, v in policy.items() if v is not None
]
if REPORT_URI:
csp_directives.append('report-uri %s' % REPORT_URI)
Expand Down
6 changes: 3 additions & 3 deletions framework/csp_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,11 @@ def test_get_default_policy__strict(self):
"""We can get the regular strict policy."""
policy = csp.get_default_policy(nonce='12345')
self.assertCountEqual(list(csp.DEFAULT_POLICY.keys()), list(policy.keys()))
self.assertIn('strict-dynamic', policy['script-src'])
self.assertIn('\'strict-dynamic\'', policy['script-src'])
self.assertIn("'nonce-12345'", policy['script-src'])

@mock.patch('framework.csp.USE_NONCE_ONLY_POLICY', True)
def test_get_default_policy__strict(self):
def test_get_default_policy__strict_two(self):
"""We can get the even stricter nonce-only policy."""
policy = csp.get_default_policy(nonce='12345')
self.assertCountEqual(list(csp.NONCE_ONLY_POLICY.keys()), list(policy.keys()))
Expand All @@ -71,7 +71,7 @@ def test_get_csp_header_key__enforced(self):
csp.get_csp_header_key())

@mock.patch('framework.csp.REPORT_ONLY', True)
def test_get_csp_header_key__enforced(self):
def test_get_csp_header_key__enforced_two(self):
"""We can get the header used when only reporting violations."""
self.assertEqual(
csp.HEADER_KEY_REPORT_ONLY,
Expand Down
2 changes: 1 addition & 1 deletion framework/secrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import string
import time

from google.cloud import ndb
from google.cloud import ndb # type: ignore


# For random key generation
Expand Down
5 changes: 3 additions & 2 deletions internals/approval_defs.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
'main/third_party/blink/API_OWNERS?format=TEXT')

ApprovalFieldDef = collections.namedtuple(
'ApprovalField',
'ApprovalFieldDef',
'name, description, field_id, rule, approvers')

# Note: This can be requested manually through the UI, but it is not
Expand Down Expand Up @@ -193,7 +193,8 @@ def set_vote(
set_on=now, set_by=set_by_email)
new_vote.put()

update_gate_approval_state(gate)
if gate:
update_gate_approval_state(gate)

def get_gate_by_type(feature_id: int, gate_type: int):
"""Return a single gate based on the feature and gate type."""
Expand Down
13 changes: 7 additions & 6 deletions internals/core_enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@


import collections
from typing import Optional


WEBCOMPONENTS = 1
Expand Down Expand Up @@ -149,43 +150,43 @@
GATE_SHIP = 4

# Prototype stage types for every feature type.
STAGE_TYPES_PROTOTYPE = {
STAGE_TYPES_PROTOTYPE: dict[int, Optional[int]] = {
FEATURE_TYPE_INCUBATE_ID: STAGE_BLINK_PROTOTYPE,
FEATURE_TYPE_EXISTING_ID: STAGE_FAST_PROTOTYPE,
FEATURE_TYPE_CODE_CHANGE_ID: None,
FEATURE_TYPE_DEPRECATION_ID: None
}
# Dev trial stage types for every feature type.
STAGE_TYPES_DEV_TRIAL = {
STAGE_TYPES_DEV_TRIAL: dict[int, Optional[int]] = {
FEATURE_TYPE_INCUBATE_ID: STAGE_BLINK_DEV_TRIAL,
FEATURE_TYPE_EXISTING_ID: STAGE_FAST_DEV_TRIAL,
FEATURE_TYPE_CODE_CHANGE_ID: STAGE_PSA_DEV_TRIAL,
FEATURE_TYPE_DEPRECATION_ID: STAGE_DEP_DEV_TRIAL
}
# Origin trial stage types for every feature type.
STAGE_TYPES_ORIGIN_TRIAL = {
STAGE_TYPES_ORIGIN_TRIAL: dict[int, Optional[int]] = {
FEATURE_TYPE_INCUBATE_ID: STAGE_BLINK_ORIGIN_TRIAL,
FEATURE_TYPE_EXISTING_ID: STAGE_FAST_ORIGIN_TRIAL,
FEATURE_TYPE_CODE_CHANGE_ID: None,
FEATURE_TYPE_DEPRECATION_ID: STAGE_DEP_DEPRECATION_TRIAL
}
# Origin trial extension stage types for every feature type.
STAGE_TYPES_EXTEND_ORIGIN_TRIAL = {
STAGE_TYPES_EXTEND_ORIGIN_TRIAL: dict[int, Optional[int]] = {
FEATURE_TYPE_INCUBATE_ID: STAGE_BLINK_EXTEND_ORIGIN_TRIAL,
FEATURE_TYPE_EXISTING_ID: STAGE_FAST_EXTEND_ORIGIN_TRIAL,
FEATURE_TYPE_CODE_CHANGE_ID: None,
FEATURE_TYPE_DEPRECATION_ID: STAGE_DEP_EXTEND_DEPRECATION_TRIAL
}
# Shipping stage types for every feature type.
STAGE_TYPES_SHIPPING = {
STAGE_TYPES_SHIPPING: dict[int, Optional[int]] = {
FEATURE_TYPE_INCUBATE_ID: STAGE_BLINK_SHIPPING,
FEATURE_TYPE_EXISTING_ID: STAGE_FAST_SHIPPING,
FEATURE_TYPE_CODE_CHANGE_ID: STAGE_PSA_SHIPPING,
FEATURE_TYPE_DEPRECATION_ID: STAGE_DEP_SHIPPING
}

# Mapping of original field names to the new stage types the fields live on.
STAGE_TYPES_BY_FIELD_MAPPING = {
STAGE_TYPES_BY_FIELD_MAPPING: dict[str, dict[int, Optional[int]]] = {
'finch_url': STAGE_TYPES_SHIPPING,
'experiment_goals': STAGE_TYPES_ORIGIN_TRIAL,
'experiment_risks': STAGE_TYPES_ORIGIN_TRIAL,
Expand Down
27 changes: 14 additions & 13 deletions internals/core_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import re
from typing import Any, Optional

from google.cloud import ndb
from google.cloud import ndb # type: ignore

from framework import rediscache
from framework import users
Expand Down Expand Up @@ -205,7 +205,7 @@ def format_for_template(self, version=2) -> dict[str, Any]:
self.migrate_views()
logging.info('In format_for_template for %s',
repr(self)[:settings.MAX_LOG_LINE])
d = self.to_dict()
d: dict[str, Any] = self.to_dict()
is_released = self.impl_status_chrome in RELEASE_IMPL_STATES
d['is_released'] = is_released

Expand Down Expand Up @@ -494,7 +494,8 @@ def filter_unlisted(self, feature_list: list[dict]) -> list[dict]:

@classmethod
def get_by_ids(
self, feature_ids: list[int], update_cache: bool=False) -> list[dict]:
self, feature_ids: list[int],
update_cache: bool=False) -> list[dict[str, Any]]:
"""Return a list of JSON dicts for the specified features.
Because the cache may rarely have stale data, this should only be
Expand All @@ -514,7 +515,7 @@ def get_by_ids(
result_dict[feature_id] = feature

for future in futures:
unformatted_feature = future.get_result()
unformatted_feature: Optional[Feature] = future.get_result()
if unformatted_feature and not unformatted_feature.deleted:
feature = unformatted_feature.format_for_template()
feature['updated_display'] = (
Expand All @@ -525,7 +526,7 @@ def get_by_ids(
result_dict[unformatted_feature.key.integer_id()] = feature

result_list = [
result_dict.get(feature_id) for feature_id in feature_ids
result_dict[feature_id] for feature_id in feature_ids
if feature_id in result_dict]
return result_list

Expand Down Expand Up @@ -632,7 +633,7 @@ def getSortingMilestone(feature):

@classmethod
def get_in_milestone(self, milestone: int,
show_unlisted: bool=False) -> dict[int, list[dict[str, Any]]]:
show_unlisted: bool=False) -> dict[str, list[dict[str, Any]]]:
"""Return {reason: [feaure_dict]} with all the reasons a feature can
be part of a milestone.
Expand All @@ -648,7 +649,7 @@ def get_in_milestone(self, milestone: int,
if cached_features_by_type:
features_by_type = cached_features_by_type
else:
all_features: dict[int, list[Feature]] = {}
all_features: dict[str, list[Feature]] = {}
all_features[IMPLEMENTATION_STATUS[ENABLED_BY_DEFAULT]] = []
all_features[IMPLEMENTATION_STATUS[DEPRECATED]] = []
all_features[IMPLEMENTATION_STATUS[REMOVED]] = []
Expand Down Expand Up @@ -782,10 +783,11 @@ def get_in_milestone(self, milestone: int,

def crbug_number(self) -> Optional[str]:
if not self.bug_url:
return
return None
m = re.search(r'[\/|?id=]([0-9]+)$', self.bug_url)
if m:
return m.group(1)
return None

def new_crbug_url(self) -> str:
url = 'https://bugs.chromium.org/p/chromium/issues/entry'
Expand Down Expand Up @@ -1132,7 +1134,7 @@ def __init__(self, *args, **kwargs):

@classmethod
def get_feature_entry(self, feature_id: int, update_cache: bool=False
) -> FeatureEntry:
) -> Optional[FeatureEntry]:
KEY = feature_cache_key(FeatureEntry.DEFAULT_CACHE_KEY, feature_id)
feature = rediscache.get(KEY)

Expand Down Expand Up @@ -1174,7 +1176,7 @@ def get_by_ids(self, entry_ids: list[int], update_cache: bool=False
procesing a POST to edit data. For editing use case, load the
data from NDB directly.
"""
result_dict = {}
result_dict: dict[int, int] = {}
futures = []

for fe_id in entry_ids:
Expand All @@ -1192,9 +1194,8 @@ def get_by_ids(self, entry_ids: list[int], update_cache: bool=False
rediscache.set(store_key, entry)
result_dict[entry.key.integer_id()] = entry

result_list = [
result_dict.get(fe_id) for fe_id in entry_ids
if fe_id in result_dict]
result_list = [result_dict[fe_id] for fe_id in entry_ids
if fe_id in result_dict]
return result_list

# Note: get_in_milestone will be in a new file legacy_queries.py.
Expand Down
3 changes: 2 additions & 1 deletion internals/data_backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Any

from googleapiclient.discovery import build
from googleapiclient.discovery_cache.base import Cache
Expand All @@ -20,7 +21,7 @@
from framework import basehandlers

class MemoryCache(Cache):
_CACHE = {}
_CACHE: dict[Any, Any] = {}

def get(self, url):
return MemoryCache._CACHE.get(url)
Expand Down
Loading

0 comments on commit 49fb215

Please sign in to comment.