diff --git a/lib/pbench/cli/server/user_management.py b/lib/pbench/cli/server/user_management.py index f3f6f15cc8..22df4b499c 100644 --- a/lib/pbench/cli/server/user_management.py +++ b/lib/pbench/cli/server/user_management.py @@ -4,15 +4,12 @@ from pbench.cli import pass_cli_context from pbench.cli.server import config_setup from pbench.cli.server.options import common_options -from pbench.server.database.models.users import Roles, User +from pbench.server.database.models.users import User -USER_LIST_ROW_FORMAT = "{0:15}\t{1:15}\t{2:15}\t{3:15}\t{4:20}" -USER_LIST_HEADER_ROW = USER_LIST_ROW_FORMAT.format( - "Username", "First Name", "Last Name", "Registered On", "Email" -) +USER_LIST_ROW_FORMAT = "{0:15}\t{1:36}" +USER_LIST_HEADER_ROW = USER_LIST_ROW_FORMAT.format("Username", "OIDC ID") -# User create CLI @click.group("user_group") @click.version_option() @pass_cli_context @@ -22,94 +19,6 @@ def user_command_cli(context): pass -@user_command_cli.command() -@pass_cli_context -@click.option( - "--username", - prompt=True, - required=True, - help="pbench server account username (will prompt if unspecified)", -) -@click.option( - "--password", - prompt=True, - hide_input=True, - required=True, - help="pbench server account password (will prompt if unspecified)", -) -@click.option( - "--email", - prompt=True, - required=True, - help="pbench server account email (will prompt if unspecified)", -) -@click.option( - "--first-name", - required=False, - help="pbench server account first name (will prompt if unspecified)", -) -@click.option( - "--last-name", - required=False, - help="pbench server account last name (will prompt if unspecified)", -) -@click.option( - "--role", - type=click.Choice([role.name for role in Roles], case_sensitive=False), - required=False, - help="Optional role of the user such as Admin", -) -@common_options -def user_create( - context: object, - username: str, - password: str, - email: str, - first_name: str, - last_name: str, - role: str, -) -> None: - try: - config_setup(context) - user = User( - username=username, - password=password, - first_name=first_name, - last_name=last_name, - email=email, - role=role if role else "", - ) - user.add() - if user.is_admin(): - click.echo(f"Admin user {username} registered") - else: - click.echo(f"User {username} registered") - rv = 0 - except Exception as exc: - click.echo(exc, err=True) - rv = 2 if isinstance(exc, BadConfig) else 1 - - click.get_current_context().exit(rv) - - -# User delete CLI -@user_command_cli.command() -@common_options -@click.argument("username") -@pass_cli_context -def user_delete(context: object, username: str) -> None: - try: - # Delete the the user with specified username - config_setup(context) - User.delete(username=username) - rv = 0 - except Exception as exc: - click.echo(exc, err=True) - rv = 2 if isinstance(exc, BadConfig) else 1 - - click.get_current_context().exit(rv) - - # Users list CLI @user_command_cli.command() @common_options @@ -126,10 +35,7 @@ def user_list(context: object) -> None: click.echo( USER_LIST_ROW_FORMAT.format( user.username, - user.first_name, - user.last_name, - user.registered_on.strftime("%Y-%m-%d"), - user.email, + user.id, ) ) @@ -139,80 +45,3 @@ def user_list(context: object) -> None: rv = 2 if isinstance(exc, BadConfig) else 1 click.get_current_context().exit(rv) - - -# User update CLI -@user_command_cli.command() -@common_options -@click.argument("updateuser") -@click.option( - "--username", - required=False, - help="Specify the new username", -) -@click.option( - "--email", - required=False, - help="Specify the new email", -) -@click.option( - "--first-name", - required=False, - help="Specify the new first name", -) -@click.option( - "--last-name", - required=False, - help="Specify the new last name", -) -@click.option( - "--role", - required=False, - type=click.Choice([role.name for role in Roles], case_sensitive=False), - help="Specify the new role", -) -@pass_cli_context -def user_update( - context: object, - updateuser: str, - username: str, - first_name: str, - last_name: str, - email: str, - role: str, -) -> None: - try: - config_setup(context) - # Query the user - user = User.query(username=updateuser) - - if user is None: - click.echo(f"User {updateuser} doesn't exist") - rv = 1 - else: - dict_to_update = {} - if username: - dict_to_update["username"] = username - - if first_name: - dict_to_update["first_name"] = first_name - - if last_name: - dict_to_update["last_name"] = last_name - - if email: - dict_to_update["email"] = email - - if role: - dict_to_update["role"] = role - - # Update the user - user.update(**dict_to_update) - - click.echo(f"User {updateuser} updated") - rv = 0 - except Exception as exc: - click.echo(exc, err=True) - rv = 2 if isinstance(exc, BadConfig) else 1 - - click.get_current_context().exit(rv) diff --git a/lib/pbench/client/__init__.py b/lib/pbench/client/__init__.py index 014a72ea78..2911a80250 100644 --- a/lib/pbench/client/__init__.py +++ b/lib/pbench/client/__init__.py @@ -49,13 +49,9 @@ class API(Enum): DATASETS_SEARCH = "datasets_search" DATASETS_VALUES = "datasets_values" ENDPOINTS = "endpoints" - LOGIN = "login" - LOGOUT = "logout" - REGISTER = "register" SERVER_AUDIT = "server_audit" SERVER_SETTINGS = "server_settings" UPLOAD = "upload" - USER = "user" class PbenchServerClient: @@ -205,7 +201,7 @@ def put( api: API, uri_params: Optional[JSONOBJECT] = None, *, - json: Optional[dict[str, str]] = None, + json: Optional[JSONOBJECT] = None, headers: Optional[dict[str, str]] = None, raise_error: bool = True, **kwargs, @@ -240,7 +236,7 @@ def post( api: API, uri_params: Optional[JSONOBJECT] = None, *, - json: Optional[dict[str, str]] = None, + json: Optional[JSONOBJECT] = None, headers: Optional[dict[str, str]] = None, raise_error: bool = True, **kwargs, @@ -443,6 +439,16 @@ def get_list(self, **kwargs) -> Iterator[Dataset]: break json = self.get(uri=next_url).json() + def get_user(self, username: str, add_auth_header: bool = True) -> JSONOBJECT: + """ """ + if add_auth_header: + return self.get( + api=API.USER, uri_params={"target_username": username} + ).json() + response = self.session.get(self._uri(API.USER, {"target_username": username})) + response.raise_for_status() + return response.json() + def get_metadata(self, dataset_id: str, metadata: list[str]) -> JSONOBJECT: """Return requested metadata for a specified dataset. diff --git a/lib/pbench/client/oidc_admin.py b/lib/pbench/client/oidc_admin.py index d21a6cb0d9..fb47a77b83 100644 --- a/lib/pbench/client/oidc_admin.py +++ b/lib/pbench/client/oidc_admin.py @@ -102,17 +102,16 @@ def user_login(self, client_id: str, username: str, password: str) -> dict: data = { "client_id": client_id, "grant_type": "password", - "scope": "profile email", + "scope": "openid profile email", "username": username, "password": password, } return self.post(path=url_path, data=data).json() - def get_user(self, username: str, token: str) -> dict: + def get_user(self, token: str) -> dict: """Get the OIDC user representation dict. Args: - username: username to query token: access_token string to validate Returns: @@ -132,10 +131,9 @@ def get_user(self, username: str, token: str) -> dict: } """ response = self.get( - f"admin/realms/{self.OIDC_REALM}/users", + f"/realms/{self.OIDC_REALM}/protocol/openid-connect/userinfo", headers={"Authorization": f"Bearer {token}"}, - username=username, ) if response.status_code == HTTPStatus.OK: - return response.json()[0] + return response.json() return {} diff --git a/lib/pbench/server/api/__init__.py b/lib/pbench/server/api/__init__.py index a15840b088..12b3c2358c 100644 --- a/lib/pbench/server/api/__init__.py +++ b/lib/pbench/server/api/__init__.py @@ -36,7 +36,6 @@ from pbench.server.api.resources.server_audit import ServerAudit from pbench.server.api.resources.server_settings import ServerSettings from pbench.server.api.resources.upload_api import Upload -from pbench.server.api.resources.users_api import Login, Logout, RegisterUser, UserAPI import pbench.server.auth.auth as Auth from pbench.server.database import init_db from pbench.server.database.database import Database @@ -131,24 +130,6 @@ def register_endpoints(api: Api, app: Flask, config: PbenchServerConfig): endpoint="endpoints", resource_class_args=(config,), ) - api.add_resource( - Login, - f"{base_uri}/login", - endpoint="login", - resource_class_args=(config,), - ) - api.add_resource( - Logout, - f"{base_uri}/logout", - endpoint="logout", - resource_class_args=(config,), - ) - api.add_resource( - RegisterUser, - f"{base_uri}/register", - endpoint="register", - resource_class_args=(config,), - ) api.add_resource( ServerAudit, f"{base_uri}/server/audit", @@ -163,11 +144,6 @@ def register_endpoints(api: Api, app: Flask, config: PbenchServerConfig): endpoint="server_settings", resource_class_args=(config,), ) - api.add_resource( - UserAPI, - f"{base_uri}/user/", - endpoint="user", - ) api.add_resource( Upload, f"{base_uri}/upload/", diff --git a/lib/pbench/server/api/resources/__init__.py b/lib/pbench/server/api/resources/__init__.py index e1a3d92f96..ac267d2979 100644 --- a/lib/pbench/server/api/resources/__init__.py +++ b/lib/pbench/server/api/resources/__init__.py @@ -402,7 +402,7 @@ def convert_username(value: Union[str, None], _) -> Union[str, None]: if not user: raise ConversionError(value, "username", http_status=HTTPStatus.NOT_FOUND) - return str(user.id) + return user.id def convert_dataset(value: str, _) -> Dataset: @@ -1579,11 +1579,11 @@ def _get_dataset_metadata( metadata = {} for i in requested_items: native_key = Metadata.get_native_key(i) - user_id = None + user: Optional[User] = None if native_key == Metadata.USER: - user_id = Auth.get_current_user_id() + user = Auth.token_auth.current_user() try: - metadata[i] = Metadata.getvalue(dataset=dataset, key=i, user_id=user_id) + metadata[i] = Metadata.getvalue(dataset=dataset, key=i, user=user) except MetadataError: metadata[i] = None @@ -1610,11 +1610,11 @@ def _set_dataset_metadata( fail: dict[str, str] = {} for k, v in metadata.items(): native_key = Metadata.get_native_key(k) - user_id = None + user: Optional[User] = None if native_key == Metadata.USER: - user_id = Auth.get_current_user_id() + user = Auth.token_auth.current_user() try: - Metadata.setvalue(key=k, value=v, dataset=dataset, user_id=user_id) + Metadata.setvalue(key=k, value=v, dataset=dataset, user=user) except MetadataError as e: fail[k] = str(e) return fail diff --git a/lib/pbench/server/api/resources/datasets_metadata.py b/lib/pbench/server/api/resources/datasets_metadata.py index fcce2c418e..385e7e4061 100644 --- a/lib/pbench/server/api/resources/datasets_metadata.py +++ b/lib/pbench/server/api/resources/datasets_metadata.py @@ -23,6 +23,7 @@ Metadata, MetadataBadValue, MetadataError, + User, ) diff --git a/lib/pbench/server/api/resources/upload_api.py b/lib/pbench/server/api/resources/upload_api.py index 59d3eaef7b..af737ea0cb 100644 --- a/lib/pbench/server/api/resources/upload_api.py +++ b/lib/pbench/server/api/resources/upload_api.py @@ -182,8 +182,9 @@ def _put(self, args: ApiParams, request: Request, context: ApiContext) -> Respon try: try: - user_id = Auth.token_auth.current_user().id - username = Auth.token_auth.current_user().username + authorized_user = Auth.token_auth.current_user() + user_id = authorized_user.id + username = authorized_user.username except Exception: username = None user_id = None @@ -272,7 +273,7 @@ def _put(self, args: ApiParams, request: Request, context: ApiContext) -> Respon # Create a tracking dataset object; it'll begin in UPLOADING state try: dataset = Dataset( - owner_id=user_id, + owner=authorized_user, name=dataset_name, resource_id=md5sum, access=access, diff --git a/lib/pbench/server/api/resources/users_api.py b/lib/pbench/server/api/resources/users_api.py index 68d4d4d9fe..e69de29bb2 100644 --- a/lib/pbench/server/api/resources/users_api.py +++ b/lib/pbench/server/api/resources/users_api.py @@ -1,546 +0,0 @@ -from datetime import datetime, timedelta, timezone -from http import HTTPStatus -from typing import NamedTuple - -from flask import current_app, jsonify, make_response, request -from flask_bcrypt import check_password_hash -from flask_restful import abort, Resource -from sqlalchemy.exc import IntegrityError, SQLAlchemyError - -import pbench.server.auth.auth as Auth -from pbench.server.database.database import Database -from pbench.server.database.models.auth_tokens import AuthToken -from pbench.server.database.models.server_settings import ServerSetting -from pbench.server.database.models.users import User - - -class RegisterUser(Resource): - """ - Abstracted pbench API for registering a new user - """ - - def __init__(self, config): - self.server_config = config - - def post(self): - """ - Post request for registering a new user. - This requires a JSON data with required user fields - { - "username": "username", - "password": "password", - "first_name": first_name, - "last_name": "last_name", - "email": "user@domain.com" - } - - Required headers include - - Content-Type: application/json - Accept: application/json - - :return: - Success: 201 with empty payload - Failure: , - response_object = { - "message": "failure message" - } - To get the auth token user has to perform the login action - """ - disabled = ServerSetting.get_disabled() - if disabled: - abort(HTTPStatus.SERVICE_UNAVAILABLE, **disabled) - - # get the post data - user_data = request.get_json() - if not user_data: - current_app.logger.warning("Invalid json object: {}", request.url) - abort(HTTPStatus.BAD_REQUEST, message="Invalid json object in request") - - username = user_data.get("username") - if not username: - current_app.logger.warning("Missing username field") - abort(HTTPStatus.BAD_REQUEST, message="Missing username field") - username = username.lower() - if User.is_admin_username(username): - current_app.logger.warning("User tried to register with admin username") - abort( - HTTPStatus.BAD_REQUEST, - message="Please choose another username", - ) - - # check if provided username already exists - try: - user = User.query(username=user_data.get("username")) - except Exception: - current_app.logger.exception("Exception while querying username") - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") - if user: - current_app.logger.warning( - "A user tried to re-register. Username: {}", user.username - ) - abort(HTTPStatus.FORBIDDEN, message="Provided username is already in use.") - - password = user_data.get("password") - if not password: - current_app.logger.warning("Missing password field") - abort(HTTPStatus.BAD_REQUEST, message="Missing password field") - - email_id = user_data.get("email") - if not email_id: - current_app.logger.warning("Missing email field") - abort(HTTPStatus.BAD_REQUEST, message="Missing email field") - # check if provided email already exists - try: - user = User.query(email=email_id) - except Exception: - current_app.logger.exception("Exception while querying user email") - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") - if user: - current_app.logger.warning( - "A user tried to re-register. Email: {}", user.email - ) - abort(HTTPStatus.FORBIDDEN, message="Provided email is already in use") - - first_name = user_data.get("first_name") - if not first_name: - current_app.logger.warning("Missing first_name field") - abort(HTTPStatus.BAD_REQUEST, message="Missing first_name field") - - last_name = user_data.get("last_name") - if not last_name: - current_app.logger.warning("Missing last_name field") - abort(HTTPStatus.BAD_REQUEST, message="Missing last_name field") - - try: - user = User( - username=username, - password=password, - first_name=first_name, - last_name=last_name, - email=email_id, - ) - - # insert the user - user.add() - current_app.logger.info( - "New user registered, username: {}, email: {}", username, email_id - ) - return "", HTTPStatus.CREATED - except Exception: - current_app.logger.exception("Exception while registering a user") - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") - - -class Login(Resource): - """ - Pbench API for User Login or generating an auth token - """ - - def __init__(self, config): - self.server_config = config - self.token_expire_duration = self.server_config.get( - "pbench-server", "token_expiration_duration" - ) - - def post(self): - """ - Post request for logging in user. - The user is allowed to re-login multiple times and each time a new - valid auth token will be returned. - - This requires a JSON data with required user metadata fields - { - "username": "username", - "password": "password", - } - - Required headers include - - Content-Type: application/json - Accept: application/json - - :return: JSON Payload - Success: 200, - response_object = { - "auth_token": "" - "username": - } - Failure: , - response_object = { - "message": "failure message" - } - """ - # get the post data - post_data = request.get_json() - if not post_data: - current_app.logger.warning("Invalid json object: {}", request.url) - abort(HTTPStatus.BAD_REQUEST, message="Invalid json object in request") - - username = post_data.get("username") - if not username: - current_app.logger.warning("Username not provided during the login process") - abort(HTTPStatus.BAD_REQUEST, message="Please provide a valid username") - - password = post_data.get("password") - if not password: - current_app.logger.warning("Password not provided during the login process") - abort(HTTPStatus.BAD_REQUEST, message="Please provide a valid password") - - try: - # fetch the user data - user = User.query(username=username) - except Exception: - current_app.logger.exception("Exception occurred during user login") - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") - - if not user: - current_app.logger.warning( - "No user found in the db for Username: {} while login", username - ) - abort(HTTPStatus.UNAUTHORIZED, message="Bad login") - - # Validate the password - if not check_password_hash(user.password, password): - current_app.logger.warning( - "Wrong password for user: {} during login", username - ) - abort(HTTPStatus.UNAUTHORIZED, message="Bad login") - - auth_token, expiration = Auth.encode_auth_token( - time_delta=timedelta(minutes=int(self.token_expire_duration)), - user_id=user.id, - ) - - # Add the new auth token to the database for later access - try: - token = AuthToken(token=auth_token, expiration=expiration) - user.add_token(token) - except IntegrityError: - current_app.logger.warning( - "Duplicate auth token got created, user might have tried to re-login immediately" - ) - abort(HTTPStatus.CONFLICT, message="Login collision; please wait and retry") - except SQLAlchemyError as e: - current_app.logger.error( - "SQLAlchemy Exception while logging in a user {}", type(e) - ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") - except Exception: - current_app.logger.exception("Exception while logging in a user") - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") - else: - current_app.logger.debug( - "New auth token registered for user {}", user.username - ) - - # We take the opportunity to remove any expired tokens for ANY user - # since we don't have a background reaper task dedicated to this kind - # of an operation. - try: - Database.db_session.query(AuthToken).filter( - AuthToken.expiration <= datetime.now(timezone.utc) - ).delete() - Database.db_session.commit() - except Exception as exc: - Database.db_session.rollback() - current_app.logger.error( - "Error encountered removing expired tokens during login for user {}: {}", - user.username, - exc, - ) - - response_object = { - "auth_token": auth_token, - "username": username, - } - return make_response(jsonify(response_object), HTTPStatus.OK) - - -class Logout(Resource): - """ - Pbench API for User logout and deleting an auth token - """ - - def __init__(self, config): - self.server_config = config - - def post(self): - """ - post request for logging out a user for the current auth token. - This requires a Pbench authentication auth token in the header field - - Required headers include - Authorization: Bearer - - :return: - Success: 200 with empty payload - Failure: , - response_object = { - "message": "failure message" - } - """ - auth_token = Auth.get_auth_token() - state = Auth.verify_internal_token(auth_token) - - # If the token is invalid, it won't be in the database; valid but - # expired tokens may have been removed from the database by other - # requests; otherwise, the token should be in the database. If the - # token is valid and found in the database, it must be associated with - # a user (assertion handled by database schema), so we can simply remove - # it from the database. - - if state == Auth.TokenState.INVALID: - current_app.logger.debug("User logout with invalid token: {}", auth_token) - else: - current_app.logger.debug("User logout with {} token: {}", state, auth_token) - try: - AuthToken.delete(auth_token) - except Exception: - current_app.logger.exception( - "Exception occurred deleting auth token {!r}", - auth_token, - ) - abort( - HTTPStatus.INTERNAL_SERVER_ERROR, - message="INTERNAL ERROR", - ) - - return "", HTTPStatus.OK - - -class UserAPI(Resource): - """ - Abstracted pbench API to get user data - """ - - TargetUser = NamedTuple( - "TargetUser", - [("target_user", User), ("http_status", HTTPStatus), ("http_message", str)], - ) - - def get_valid_target_user( - self, target_username: str, request_type: str - ) -> "UserAPI.TargetUser": - """ - Helper function to determine whether the API call is permitted for the target username - Right now it is only permitted for an admin user and the target user itself. - This returns a target User on success or None on failure; in the case of failure, - also returns the corresponding HTTP status code and message string - """ - current_user = Auth.token_auth.current_user() - if current_user.username == target_username: - return UserAPI.TargetUser( - target_user=current_user, http_status=HTTPStatus.OK, http_message="" - ) - if current_user.is_admin(): - target_user = User.query(username=target_username) - if target_user: - return UserAPI.TargetUser( - target_user=target_user, http_status=HTTPStatus.OK, http_message="" - ) - - current_app.logger.warning( - "User {} requested {} operation but user {} is not found.", - current_user.username, - request_type, - target_username, - ) - return UserAPI.TargetUser( - target_user=None, - http_status=HTTPStatus.NOT_FOUND, - http_message=f"User {target_username} not found", - ) - - current_app.logger.warning( - "User {} is not authorized to {} user {}.", - current_user.username, - request_type, - target_username, - ) - return UserAPI.TargetUser( - target_user=None, - http_status=HTTPStatus.FORBIDDEN, - http_message=f"Not authorized to access user {target_username}", - ) - - @Auth.token_auth.login_required() - def get(self, target_username): - """ - Get request for getting user data. - This requires a Pbench auth token in the header field - - Required headers include - - Content-Type: application/json - Accept: application/json - Authorization: Bearer Pbench_auth_token (user received upon login) - - :return: JSON Payload - Success: 200, - response_object = { - "username": , - "first_name": , - "last_name": , - "registered_on": , - } - Failure: , - response_object = { - "message": "failure message" - } - """ - disabled = ServerSetting.get_disabled(readonly=True) - if disabled: - abort(HTTPStatus.SERVICE_UNAVAILABLE, **disabled) - - result = self.get_valid_target_user(target_username, "GET") - if not result.target_user: - abort(result.http_status, message=result.http_message) - response_object = result.target_user.get_json() - return make_response(jsonify(response_object), HTTPStatus.OK) - - @Auth.token_auth.login_required() - def put(self, target_username): - """ - PUT request for updating user data. - This requires a Pbench auth token in the header field - - This requires a JSON data with required user registration fields that needs an update - Example Json: - { - "first_name": "new_name", - "password": "password", - ... - } - - Required headers include - - Content-Type: application/json - Accept: application/json - Authorization: Bearer Pbench_auth_token (user received upon login) - - :return: JSON Payload - Success: 200, - response_object = { - "username": , - "first_name": , - "last_name": , - "registered_on": , - } - Failure: , - response_object = { - "message": "failure message" - } - """ - disabled = ServerSetting.get_disabled() - if disabled: - abort(HTTPStatus.SERVICE_UNAVAILABLE, **disabled) - - user_payload = request.get_json() - if not user_payload: - current_app.logger.warning("Invalid json object: {}", request.url) - abort(HTTPStatus.BAD_REQUEST, message="Invalid json object in request") - - result = self.get_valid_target_user(target_username, "PUT") - if not result.target_user: - abort(result.http_status, message=result.http_message) - - # Check if the user payload contain fields that are either protected or - # are not present in the user db. If any key in the payload does not match - # with the column name we will abort the update request. - non_existent = set(user_payload.keys()).difference( - set(User.__table__.columns.keys()) - ) - if non_existent: - current_app.logger.warning( - "User trying to update fields that are not present in the user database. Fields: {}", - non_existent, - ) - abort( - HTTPStatus.BAD_REQUEST, - message="Invalid fields in update request payload", - ) - # Only admin user will be allowed to change other user's role. However, - # Admin users will not be able to change their admin role, - # This is done to prevent last admin user from de-admining him/herself - protected_db_fields = User.get_protected() - if ( - not Auth.token_auth.current_user().is_admin() - or Auth.token_auth.current_user() == result.target_user - ): - protected_db_fields.append("role") - - protected = set(user_payload.keys()).intersection(set(protected_db_fields)) - for field in protected: - if getattr(result.target_user, field) != user_payload[field]: - current_app.logger.warning( - "User trying to update the non-updatable fields. {}: {}", - field, - user_payload[field], - ) - abort(HTTPStatus.FORBIDDEN, message="Invalid update request payload") - try: - result.target_user.update(**user_payload) - except Exception: - current_app.logger.exception( - "Exception occurred during updating user object" - ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") - - response_object = result.target_user.get_json() - return make_response(jsonify(response_object), HTTPStatus.OK) - - @Auth.token_auth.login_required() - def delete(self, target_username): - """ - Delete request for deleting a user from database. - This requires a Pbench auth token in the header field - - Required headers include - - Content-Type: application/json - Accept: application/json - Authorization: Bearer Pbench_auth_token (user received upon login) - - :return: - Success: 200 with empty payload - Failure: , - response_object = { - "message": "failure message" - } - """ - disabled = ServerSetting.get_disabled() - if disabled: - abort(HTTPStatus.SERVICE_UNAVAILABLE, **disabled) - - result = self.get_valid_target_user(target_username, "DELETE") - if not result.target_user: - abort(result.http_status, message=result.http_message) - # Do not allow admin user to get self deleted via API - if ( - result.target_user.is_admin() - and Auth.token_auth.current_user() == result.target_user - ): - current_app.logger.warning( - "Admin user is not allowed to self delete via API call. Username: {}", - target_username, - ) - abort(HTTPStatus.FORBIDDEN, message="Not authorized to delete user") - - # If target user is a valid and not an admin proceed to delete - try: - User.delete(target_username) - current_app.logger.info( - "User entry deleted for user with username: {}, by user: {}", - target_username, - Auth.token_auth.current_user().username, - ) - except Exception: - current_app.logger.exception( - "Exception occurred while deleting a user {}", - target_username, - ) - abort(HTTPStatus.INTERNAL_SERVER_ERROR, message="INTERNAL ERROR") - - return "", HTTPStatus.OK diff --git a/lib/pbench/server/auth/__init__.py b/lib/pbench/server/auth/__init__.py index c8292f26f3..7e2856d5e6 100644 --- a/lib/pbench/server/auth/__init__.py +++ b/lib/pbench/server/auth/__init__.py @@ -1,7 +1,6 @@ """OpenID Connect (OIDC) Client object definition.""" from configparser import NoOptionError, NoSectionError -from dataclasses import dataclass from http import HTTPStatus import logging from typing import Any, Optional @@ -152,54 +151,6 @@ def post( return self._method("POST", path, data, json, headers=headers, **kwargs) -@dataclass -class InternalUser: - """Internal user class for storing user related fields fetched - from OIDC token decode. - - Note: Class attributes are duck-typed from the SQL User object, - and they need to match with the respective sql entry! - """ - - id: str - username: str - email: str - first_name: Optional[str] = None - last_name: Optional[str] = None - roles: Optional[list[str]] = None - - def __str__(self) -> str: - return f"User, id: {self.id}, username: {self.username}" - - def is_admin(self): - return self.roles and ("ADMIN" in self.roles) - - @classmethod - def create(cls, client_id: str, token_payload: dict) -> "InternalUser": - """Helper method to return the Internal User object. - - Args: - client_id : authorized client id string - token_payload : Dict representation of decoded token - - Returns: - InternalUser object - """ - audiences = token_payload.get("resource_access", {}) - try: - roles = audiences[client_id].get("roles", []) - except KeyError: - roles = [] - return cls( - id=token_payload["sub"], - username=token_payload.get("preferred_username"), - email=token_payload.get("email"), - first_name=token_payload.get("given_name"), - last_name=token_payload.get("family_name"), - roles=roles, - ) - - class OpenIDClient: """OpenID Connect client object""" diff --git a/lib/pbench/server/auth/auth.py b/lib/pbench/server/auth/auth.py index a2ed2720a5..79cd2a7477 100644 --- a/lib/pbench/server/auth/auth.py +++ b/lib/pbench/server/auth/auth.py @@ -1,7 +1,7 @@ from datetime import datetime, timedelta, timezone import enum from http import HTTPStatus -from typing import Optional, Tuple, Union +from typing import Optional, Tuple from flask import current_app, Flask, request from flask_httpauth import HTTPTokenAuth @@ -9,7 +9,7 @@ import jwt from pbench.server import PbenchServerConfig -from pbench.server.auth import InternalUser, OpenIDClient, OpenIDTokenInvalid +from pbench.server.auth import OpenIDClient, OpenIDTokenInvalid from pbench.server.database.models.auth_tokens import AuthToken from pbench.server.database.models.users import User @@ -124,7 +124,7 @@ def get_auth_token() -> str: @token_auth.verify_token -def verify_auth(auth_token: str) -> Optional[Union[User, InternalUser]]: +def verify_auth(auth_token: str) -> Optional[User]: """Validates the auth token of the current request. If an OpenID Connect client is configured, then actual token verification @@ -135,8 +135,7 @@ def verify_auth(auth_token: str) -> Optional[Union[User, InternalUser]]: auth_token : authorization token string Returns: - None if the token is not valid, a `User` object when the token is - an internally generated one, and an `InternalUser` object when the + None if the token is not valid, a `User` object when the token is validated using the OpenID Connect client. """ if not auth_token: @@ -209,35 +208,43 @@ def verify_auth_internal(auth_token_s: str) -> Optional[User]: return user -def verify_auth_oidc(auth_token: str) -> Optional[InternalUser]: +def verify_auth_oidc(auth_token: str) -> Optional[User]: """Verify a token provided to the Pbench server which was obtained from a third party identity provider. + Note: Upon token introspection if we get a valid token, we import the + available user information from the token into our internal User database. + Args: auth_token : Token to authenticate Returns: - InternalUser object if the verification succeeds, None on failure. + User object if the verification succeeds, None on failure. """ + user = None try: token_payload = oidc_client.token_introspect(token=auth_token) except OpenIDTokenInvalid: - token_payload = None + pass except Exception: current_app.logger.exception( "Unexpected exception occurred while verifying the auth token {}", auth_token, ) - token_payload = None - return ( - None - if token_payload is None - else InternalUser.create( - # FIXME - `client_id` is the value pulled from the Pbench Server - # "openid-connect" "client" field in the configuration file. This - # needs to be an ID from the OpenID Connect response payload (online - # case) or decoded token (offline case). - client_id=oidc_client.client_id, - token_payload=token_payload, - ) - ) + pass + else: + # Extract what we want to cache from the access token + user_id = token_payload["sub"] + username = token_payload.get("preferred_username", user_id) + audiences = token_payload.get("resource_access", {}) + pb_aud = audiences.get(oidc_client.client_id, {}) + roles = pb_aud.get("roles", []) + + # Create or update the user in our cache + user = User.query(id=user_id) + if not user: + user = User(id=user_id, username=username, roles=roles) + user.add() + else: + user.update(username=username, roles=roles) + return user diff --git a/lib/pbench/server/database/alembic/env.py b/lib/pbench/server/database/alembic/env.py index 3142264ea3..771ddf26d4 100644 --- a/lib/pbench/server/database/alembic/env.py +++ b/lib/pbench/server/database/alembic/env.py @@ -59,7 +59,9 @@ def run_migrations_online() -> None: ) with connectable.connect() as connection: - context.configure(connection=connection, target_metadata=target_metadata) + context.configure( + connection=connection, target_metadata=target_metadata, compare_type=True + ) with context.begin_transaction(): context.run_migrations() diff --git a/lib/pbench/server/database/alembic/versions/f628657bed56_user_table_update_oidc.py b/lib/pbench/server/database/alembic/versions/f628657bed56_user_table_update_oidc.py new file mode 100644 index 0000000000..99fcbcfc66 --- /dev/null +++ b/lib/pbench/server/database/alembic/versions/f628657bed56_user_table_update_oidc.py @@ -0,0 +1,117 @@ +"""Update User table to only keep username, oidc_id, roles of the user + +Revision ID: f628657bed56 +Revises: fa12f45a2a5a +Create Date: 2023-02-26 23:24:16.650879 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "f628657bed56" +down_revision = "fa12f45a2a5a" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column("users", sa.Column("_roles", sa.String(length=255), nullable=True)) + op.execute("ALTER TABLE users DROP CONSTRAINT users_pkey CASCADE") + op.alter_column( + "users", + "id", + existing_type=sa.INTEGER(), + type_=sa.String(length=255), + existing_nullable=False, + ) + op.alter_column( + "users", "username", existing_type=sa.VARCHAR(length=255), nullable=False + ) + op.create_primary_key("user_primary", "users", ["id"]) + op.drop_constraint("users_email_key", "users", type_="unique") + op.drop_column("users", "role") + op.drop_column("users", "password") + op.drop_column("users", "first_name") + op.drop_column("users", "last_name") + op.drop_column("users", "email") + op.drop_column("users", "registered_on") + op.drop_column("active_tokens", "user_id") + op.add_column("dataset_metadata", sa.Column("user_ref", sa.String(), nullable=True)) + op.create_foreign_key(None, "dataset_metadata", "users", ["user_ref"], ["id"]) + op.drop_column("dataset_metadata", "user_id") + op.create_foreign_key(None, "datasets", "users", ["owner_id"], ["id"]) + # ### end Alembic commands ### + + +def downgrade() -> None: + op.execute("ALTER TABLE users DROP CONSTRAINT user_primary CASCADE") + op.add_column( + "users", + sa.Column( + "registered_on", postgresql.TIMESTAMP(), autoincrement=False, nullable=False + ), + ) + op.add_column( + "users", + sa.Column("email", sa.VARCHAR(length=255), autoincrement=False, nullable=False), + ) + op.add_column( + "users", + sa.Column( + "last_name", sa.VARCHAR(length=255), autoincrement=False, nullable=False + ), + ) + op.add_column( + "users", + sa.Column( + "first_name", sa.VARCHAR(length=255), autoincrement=False, nullable=False + ), + ) + op.add_column( + "users", + sa.Column("password", postgresql.BYTEA(), autoincrement=False, nullable=False), + ) + op.add_column( + "users", + sa.Column( + "role", + postgresql.ENUM("ADMIN", name="roles"), + autoincrement=False, + nullable=True, + ), + ) + op.create_unique_constraint("users_email_key", "users", ["email"]) + op.alter_column( + "users", "username", existing_type=sa.VARCHAR(length=255), nullable=False + ) + op.alter_column( + "users", + "id", + existing_type=sa.String(length=255), + type_=sa.INTEGER(), + existing_nullable=False, + ) + op.create_primary_key("user_primary", "users", ["id"]) + op.drop_column("users", "_roles") + op.add_column( + "dataset_metadata", + sa.Column( + "user_id", sa.VARCHAR(length=255), autoincrement=False, nullable=True + ), + ) + op.drop_column("dataset_metadata", "user_ref") + op.add_column( + "active_tokens", + sa.Column("user_id", sa.INTEGER(), autoincrement=False, nullable=False), + ) + op.create_foreign_key( + "active_tokens_user_id_fkey", + "active_tokens", + "users", + ["user_id"], + ["id"], + ondelete="CASCADE", + ) + # ### end Alembic commands ### diff --git a/lib/pbench/server/database/models/active_tokens.py b/lib/pbench/server/database/models/active_tokens.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lib/pbench/server/database/models/datasets.py b/lib/pbench/server/database/models/datasets.py index 6efc32c8a4..0ae0d88ec6 100644 --- a/lib/pbench/server/database/models/datasets.py +++ b/lib/pbench/server/database/models/datasets.py @@ -16,6 +16,7 @@ OPTION_DATASET_LIFETIME, ServerSetting, ) +from pbench.server.database.models.users import User class DatasetError(Exception): @@ -277,6 +278,7 @@ class Dataset(Database.Base): metadatas: A sequence of Metadata objects linked to this dataset operations: A sequence of component Operation objects linked to this dataset + users: Owner's user table entry linked in this dataset """ __tablename__ = "datasets" @@ -302,8 +304,11 @@ class Dataset(Database.Base): # Dataset name name = Column(String(1024), unique=False, nullable=False) - # OIDC UUID of the owning user - owner_id = Column(String(255), nullable=False) + # ID of the owning user + owner_id = Column(String, ForeignKey("users.id"), nullable=False) + + # Indirect reference to the owning User record + owner = relationship("User") # This is the MD5 hash of the dataset tarball, which we use as the unique # dataset resource ID throughout the Pbench server. @@ -420,7 +425,7 @@ def as_dict(self) -> Dict[str, Any]: return { "access": self.access, "name": self.name, - "owner_id": self.owner_id, + "owner": self.owner.username, "uploaded": self.uploaded.isoformat(), "metalog": metadata_log, "operations": { @@ -435,7 +440,7 @@ def __str__(self) -> str: Returns: string: Representation of the dataset """ - return f"({self.owner_id})|{self.name}" + return f"({self.owner.username})|{self.name}" def add(self): """Add the Dataset object to the database.""" @@ -676,9 +681,10 @@ class Metadata(Database.Base): key = Column(String(255), unique=False, nullable=False, index=True) value = Column(JSON, unique=False, nullable=True) dataset_ref = Column(Integer, ForeignKey("datasets.id"), nullable=False) + user_ref = Column(String, ForeignKey("users.id"), nullable=True) dataset = relationship("Dataset", back_populates="metadatas") - user_id = Column(String(255), nullable=True) + user = relationship("User", back_populates="dataset_metadata", single_parent=True) @validates("key") def validate_key(self, _, value: Any) -> str: @@ -777,7 +783,7 @@ def is_key_path(key: str, valid: List[str], metalog_key_ok: bool = False) -> boo @staticmethod def getvalue( - dataset: Dataset, key: str, user_id: Optional[str] = None + dataset: Dataset, key: str, user: Optional[User] = None ) -> Optional[JSON]: """Returns the value of the specified metadata key. @@ -838,7 +844,7 @@ def getvalue( value = dataset.as_dict() else: try: - meta = Metadata.get(dataset, native_key, user_id) + meta = Metadata.get(dataset, native_key, user) except MetadataNotFound: return None value = meta.value @@ -933,7 +939,7 @@ def validate(dataset: Optional[Dataset], key: str, value: Any) -> Any: return v @staticmethod - def setvalue(dataset: Dataset, key: str, value: Any, user_id: Optional[str] = None): + def setvalue(dataset: Dataset, key: str, value: Any, user: Optional[User] = None): """Set a metadata value. This method supports hierarchical dotted paths like "dashboard.seen" @@ -972,7 +978,7 @@ def setvalue(dataset: Dataset, key: str, value: Any, user_id: Optional[str] = No return try: - meta = Metadata.get(dataset, native_key, user_id) + meta = Metadata.get(dataset, native_key, user) # SQLAlchemy determines whether to perform an `update` based on the # Python object reference. We make a copy here to ensure that it @@ -1010,17 +1016,17 @@ def setvalue(dataset: Dataset, key: str, value: Any, user_id: Optional[str] = No meta.update() else: Metadata.create( - dataset=dataset, key=native_key, value=meta_value, user_id=user_id + dataset=dataset, key=native_key, value=meta_value, user=user ) @staticmethod - def _query(dataset: Dataset, key: str, user_id: Optional[str]) -> Query: + def _query(dataset: Dataset, key: str, user: Optional[User]) -> Query: return Database.db_session.query(Metadata).filter_by( - dataset=dataset, key=key, user_id=user_id + dataset=dataset, key=key, user=user ) @staticmethod - def get(dataset: Dataset, key: str, user_id: Optional[str] = None) -> "Metadata": + def get(dataset: Dataset, key: str, user: Optional[User] = None) -> "Metadata": """Fetch a Metadata (row) from the database by key name. Args: @@ -1035,7 +1041,7 @@ def get(dataset: Dataset, key: str, user_id: Optional[str] = None) -> "Metadata" The Metadata model object """ try: - meta = __class__._query(dataset, key, user_id).first() + meta = __class__._query(dataset, key, user).first() except SQLAlchemyError as e: Metadata.logger.error("Can't get {}>>{} from DB: {}", dataset, key, str(e)) raise MetadataSqlError("getting", dataset, key) from e @@ -1045,7 +1051,7 @@ def get(dataset: Dataset, key: str, user_id: Optional[str] = None) -> "Metadata" return meta @staticmethod - def remove(dataset: Dataset, key: str, user_id: Optional[str] = None): + def remove(dataset: Dataset, key: str, user: Optional[User] = None): """Remove a metadata key from the dataset. Args: @@ -1056,7 +1062,7 @@ def remove(dataset: Dataset, key: str, user_id: Optional[str] = None): DatasetSqlError : Something went wrong """ try: - __class__._query(dataset, key, user_id).delete() + __class__._query(dataset, key, user).delete() Database.db_session.commit() except SQLAlchemyError as e: Metadata.logger.error( @@ -1073,7 +1079,7 @@ def add(self, dataset: Dataset): raise DatasetBadParameterType(dataset, Dataset) try: - Metadata.get(dataset, self.key, self.user_id) + Metadata.get(dataset, self.key, self.user) except MetadataNotFound: pass else: diff --git a/lib/pbench/server/database/models/users.py b/lib/pbench/server/database/models/users.py index 3ded0bda6a..2008326f02 100644 --- a/lib/pbench/server/database/models/users.py +++ b/lib/pbench/server/database/models/users.py @@ -1,58 +1,123 @@ -import datetime import enum from typing import Optional -from flask_bcrypt import generate_password_hash -from sqlalchemy import Column, DateTime, Enum, Integer, LargeBinary, String -from sqlalchemy.orm import relationship, validates -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy import Column, String +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import relationship +from pbench.server import JSONOBJECT from pbench.server.database.database import Database -from pbench.server.database.models.auth_tokens import AuthToken class Roles(enum.Enum): ADMIN = 1 +class UserError(Exception): + """A base class for errors reported by the user class. + + It is never raised directly, but may be used in "except" clauses. + """ + + +class UserSqlError(UserError): + """SQLAlchemy errors reported through User operations. + + The exception will identify the operation being performed and the config + key; the cause will specify the original SQLAlchemy exception. + """ + + def __init__(self, operation: str, params: JSONOBJECT, cause: str): + self.operation = operation + self.params = params + self.cause = cause + + def __str__(self) -> str: + return f"Error {self.operation} {self.params!r}: {self.cause}" + + +class UserDuplicate(UserError): + """Attempt to commit a duplicate unique value.""" + + def __init__(self, user: "User", cause: str): + self.user = user + self.cause = cause + + def __str__(self) -> str: + return f"Duplicate user setting in {self.user.get_json()}: {self.cause}" + + +class UserNullKey(UserError): + """Attempt to commit a User row with an empty required column.""" + + def __init__(self, user: "User", cause: str): + self.user = user + self.cause = cause + + def __str__(self) -> str: + return f"Missing required key in {self.user.get_json()}: {self.cause}" + + class User(Database.Base): """User Model for storing user related details.""" __tablename__ = "users" - id = Column(Integer, primary_key=True, autoincrement=True) + id = Column(String(255), primary_key=True) username = Column(String(255), unique=True, nullable=False) - first_name = Column(String(255), unique=False, nullable=False) - last_name = Column(String(255), unique=False, nullable=False) - password = Column(LargeBinary(128), nullable=False) - registered_on = Column(DateTime, nullable=False, default=datetime.datetime.now()) - email = Column(String(255), unique=True, nullable=False) - role = Column(Enum(Roles), unique=False, nullable=True) - auth_tokens = relationship("AuthToken", back_populates="user") + dataset_metadata = relationship( + "Metadata", back_populates="user", cascade="all, delete-orphan" + ) + _roles = Column(String(255), unique=False, nullable=True) + + @property + def roles(self): + if self._roles: + return self._roles.split(";") + else: + return [] + + @roles.setter + def roles(self, value): + try: + self._roles = ";".join(value) + except Exception as e: + raise UserSqlError("Setting role", value, str(e)) from e def __str__(self): return f"User, id: {self.id}, username: {self.username}" - def get_json(self): - return { - "username": self.username, - "email": self.email, - "first_name": self.first_name, - "last_name": self.last_name, - "registered_on": self.registered_on, - } + def get_json(self) -> JSONOBJECT: + """Return a JSON object for this User object. - @staticmethod - def get_protected() -> list[str]: - """Return protected column names that are not allowed for external updates. + Returns: + A JSONOBJECT with all the object fields mapped to appropriate names. + """ + return {"username": self.username, "id": self.id, "roles": self.roles} + + def _decode(self, exception: IntegrityError) -> Exception: + """Decode a SQLAlchemy IntegrityError to look for a recognizable UNIQUE + or NOT NULL constraint violation. - `auth_tokens` is already protected from external updates via PUT api since - it is of type sqlalchemy relationship ORM package and not a sqlalchemy column. + Return the original exception if it doesn't match. + + Args: + exception : An IntegrityError to decode + + Returns: + a more specific exception, or the original if decoding fails """ - return ["registered_on", "id"] + # Postgres engine returns (code, message) but sqlite3 engine only + # returns (message); so always take the last element. + cause = exception.orig.args[-1] + if "UNIQUE constraint" in cause: + return UserDuplicate(self, cause) + elif "NOT NULL constraint" in cause: + return UserNullKey(self, cause) + return exception @staticmethod - def query(id=None, username=None, email=None) -> Optional["User"]: + def query(id: str = None, username: str = None) -> Optional["User"]: """Find a user using one of the provided arguments. The first argument which is not None is used in the query. The order @@ -61,12 +126,11 @@ def query(id=None, username=None, email=None) -> Optional["User"]: Returns: A User object if a user is found, None otherwise. """ + dbsq = Database.db_session.query(User) if id: - user = Database.db_session.query(User).filter_by(id=id).first() + user = dbsq.filter_by(id=id).first() elif username: - user = Database.db_session.query(User).filter_by(username=username).first() - elif email: - user = Database.db_session.query(User).filter_by(email=email).first() + user = dbsq.filter_by(username=username).first() else: user = None return user @@ -80,34 +144,11 @@ def add(self): try: Database.db_session.add(self) Database.db_session.commit() - except Exception: + except Exception as e: Database.db_session.rollback() - raise - - @validates("role") - def evaluate_role(self, key: str, value: str) -> Optional[str]: - try: - return Roles[value.upper()] - except KeyError: - return None - - @validates("password") - def evaluate_password(self, key: str, value: str) -> str: - return generate_password_hash(value) - - def add_token(self, auth_token: AuthToken): - """Add the given token to the database - - Args: - token : An AuthToken object add for this user - """ - try: - self.auth_tokens.append(auth_token) - Database.db_session.add(auth_token) - Database.db_session.commit() - except Exception: - Database.db_session.rollback() - raise + if isinstance(e, IntegrityError): + raise self._decode(e) from e + raise UserSqlError("adding", self.get_json(), str(e)) from e def update(self, **kwargs): """Update the current user object with given keyword arguments.""" @@ -115,26 +156,20 @@ def update(self, **kwargs): for key, value in kwargs.items(): setattr(self, key, value) Database.db_session.commit() - except Exception: + except Exception as e: Database.db_session.rollback() - raise - - @staticmethod - def delete(username: str): - """Delete the user with a given username, except admin. + if isinstance(e, IntegrityError): + raise self._decode(e) from e + raise UserSqlError("Updating", self.get_json(), str(e)) from e - Args: - username : the username to delete - """ - user_query = Database.db_session.query(User).filter_by(username=username) - if user_query.count() == 0: - raise NoResultFound(f"User {username} does not exist") + def delete(self): + """Delete the user with a given username, except admin.""" try: - user_query.delete() + Database.db_session.delete(self) Database.db_session.commit() - except Exception: + except Exception as e: Database.db_session.rollback() - raise + raise UserSqlError("deleting", self.get_json(), str(e)) from e def is_admin(self) -> bool: """This method checks whether the given user has an admin role. @@ -146,14 +181,4 @@ def is_admin(self) -> bool: Returns: True if the user's role is ADMIN, False otherwise. """ - return self.role is Roles.ADMIN - - @staticmethod - def is_admin_username(username: str) -> bool: - """Determine if the given user name is an admin user. - - Returns: - True if the user is an admin; False otherwise. - """ - admins = ["admin"] - return username in admins + return Roles.ADMIN.name in self.roles diff --git a/lib/pbench/test/functional/server/conftest.py b/lib/pbench/test/functional/server/conftest.py index 060f7b257b..618bd9bd49 100644 --- a/lib/pbench/test/functional/server/conftest.py +++ b/lib/pbench/test/functional/server/conftest.py @@ -13,7 +13,7 @@ LAST_NAME: str = "User" -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def server_client(): """ Used by Pbench Server functional tests to connect to a server. @@ -31,7 +31,7 @@ def server_client(): return client -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def oidc_admin(server_client: PbenchServerClient): """ Used by Pbench Server functional tests to get admin access @@ -40,7 +40,7 @@ def oidc_admin(server_client: PbenchServerClient): return OIDCAdmin(server_url=server_client.endpoints["openid"]["server"]) -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def register_test_user(oidc_admin: OIDCAdmin): """Create a test user for functional tests.""" response = oidc_admin.create_new_user( diff --git a/lib/pbench/test/unit/server/auth/test_auth.py b/lib/pbench/test/unit/server/auth/test_auth.py index 4559df6dac..17f042e7c6 100644 --- a/lib/pbench/test/unit/server/auth/test_auth.py +++ b/lib/pbench/test/unit/server/auth/test_auth.py @@ -1,7 +1,7 @@ import configparser from dataclasses import dataclass from http import HTTPStatus -from typing import Any, Dict, Optional, Tuple, Union +from typing import Any, Dict, Optional, Tuple from flask import current_app, Flask import jwt @@ -13,7 +13,6 @@ import pbench.server.auth from pbench.server.auth import ( Connection, - InternalUser, OpenIDClient, OpenIDClientError, OpenIDTokenInvalid, @@ -191,79 +190,12 @@ def test_post(self, fake_method, conn): assert args["kwargs"] == {"headers": None, "five": "six"} -class TestInternalUser: - """Verify the behavior of the InternalUser class""" - - def test_str(self): - user = InternalUser(id="X", username="Y", email="Z") - assert str(user) == "User, id: X, username: Y" - - def test_is_admin(self): - user = InternalUser(id="X", username="Y", email="Z") - assert not user.is_admin() - user = InternalUser(id="X", username="Y", email="Z", roles=["ADMIN"]) - assert user.is_admin() - - def test_create_missing_sub(self): - with pytest.raises(KeyError): - InternalUser.create("us", {}) - - def test_create_just_sub(self): - user = InternalUser.create("us", {"sub": "them"}) - assert user.id == "them" - assert user.username is None - assert user.email is None - assert user.first_name is None - assert user.last_name is None - assert user.roles == [] - - def test_create_full(self): - user = InternalUser.create( - "us", - { - "sub": "them", - "preferred_username": "userA", - "email": "userA@hostB.net", - "given_name": "Agiven", - "family_name": "Family", - }, - ) - assert user.id == "them" - assert user.username == "userA" - assert user.email == "userA@hostB.net" - assert user.first_name == "Agiven" - assert user.last_name == "Family" - assert user.roles == [] - - def test_create_w_roles(self): - # Verify not in audience list - # FIXME - this is weird as coded, why would we get a payload where no - # other audience is listed? - user = InternalUser.create( - "us", - {"sub": "them", "resource_access": {"not-us": {}}}, - ) - assert user.id == "them" - assert user.username is None - assert user.email is None - assert user.first_name is None - assert user.last_name is None - assert user.roles == [] - - user = InternalUser.create( - "us", - {"sub": "them", "resource_access": {"us": {"roles": ["roleA", "roleB"]}}}, - ) - assert user.id == "them" - assert user.username is None - assert user.email is None - assert user.first_name is None - assert user.last_name is None - assert user.roles == ["roleA", "roleB"] - - def gen_rsa_token( - audience: str, private_key: str, exp: str = "99999999999" + audience: str, + private_key: str, + exp: str = "99999999999", + username: str = "dummy", + oidc_client_roles: Optional[list[str]] = None, ) -> Tuple[str, Dict[str, str]]: """Helper function for generating an RSA token using the given private key. @@ -272,8 +204,20 @@ def gen_rsa_token( private_key : The private key to use for encoding exp : Optional expiration Epoch time stamp to use, defaults to Wednesday November 16th, 5138 at 9:47:39 AM UTC + username: username value to encode in the token + oidc_client_roles: Any OIDC client roles to include in the token """ - payload = {"iat": 1659476706, "exp": exp, "sub": "12345", "aud": audience} + payload = { + "iat": 1659476706, + "exp": exp, + "sub": "12345", + "aud": audience, + "preferred_username": username, + } + if oidc_client_roles: + payload["resource_access"] = { + audience: {"roles": oidc_client_roles}, + } return jwt.encode(payload, key=private_key, algorithm="RS256"), payload @@ -553,7 +497,7 @@ def record_abort(code: int, message: str = ""): def test_verify_auth_exc(self, monkeypatch, make_logger): """Verify exception handling originating from verify_auth_internal""" - def vai_exc(token_auth: str) -> Optional[Union[User, InternalUser]]: + def vai_exc(token_auth: str) -> Optional[User]: raise Exception("Some failure") monkeypatch.setattr(Auth, "verify_auth_internal", vai_exc) @@ -606,7 +550,37 @@ def delete(*args, **kwargs): user = Auth.verify_auth(pbench_drb_token_invalid) assert user is None - def test_verify_auth_oidc_offline(self, monkeypatch, rsa_keys, make_logger): + @pytest.mark.parametrize("roles", [["ROLE"], ["ROLE1", "ROLE2"], [], None]) + def test_verify_auth_oidc(self, monkeypatch, rsa_keys, make_logger, roles): + """Verify OIDC token offline verification success path""" + client_id = "us" + if roles is None: + token, expected_payload = gen_rsa_token(client_id, rsa_keys["private_key"]) + else: + token, expected_payload = gen_rsa_token( + client_id, rsa_keys["private_key"], oidc_client_roles=roles + ) + + # Mock the Connection object and generate an OpenIDClient object, + # installing it as Auth module's OIDC client. + config = mock_connection( + monkeypatch, client_id, public_key=rsa_keys["public_key"] + ) + oidc_client = OpenIDClient.construct_oidc_client(config) + monkeypatch.setattr(Auth, "oidc_client", oidc_client) + + app = Flask("test-verify-auth-oidc-offline") + app.logger = make_logger + with app.app_context(): + user = Auth.verify_auth(token) + + assert user.id == "12345" + if roles is not None: + assert user.roles == roles + else: + assert user.roles == [] + + def test_verify_auth_oidc_user_update(self, monkeypatch, rsa_keys, make_logger): """Verify OIDC token offline verification success path""" client_id = "us" token, expected_payload = gen_rsa_token(client_id, rsa_keys["private_key"]) @@ -625,8 +599,22 @@ def test_verify_auth_oidc_offline(self, monkeypatch, rsa_keys, make_logger): user = Auth.verify_auth(token) assert user.id == "12345" + assert user.roles == [] + + # Generate a new token with a role for the same user + token, expected_payload = gen_rsa_token( + client_id, + rsa_keys["private_key"], + username="new_dummy", + oidc_client_roles=["ROLE"], + ) + with app.app_context(): + user = Auth.verify_auth(token) + assert user.id == "12345" + assert user.roles == ["ROLE"] + assert user.username == "new_dummy" - def test_verify_auth_oidc_offline_invalid(self, monkeypatch, rsa_keys, make_logger): + def test_verify_auth_oidc_invalid(self, monkeypatch, rsa_keys, make_logger): """Verify OIDC token offline verification via Auth.verify_auth() fails gracefully with an invalid token """ diff --git a/lib/pbench/test/unit/server/conftest.py b/lib/pbench/test/unit/server/conftest.py index 8f132f1531..0ead6d8d8e 100644 --- a/lib/pbench/test/unit/server/conftest.py +++ b/lib/pbench/test/unit/server/conftest.py @@ -260,12 +260,8 @@ def create_user(client) -> User: client : Fixture to ensure we have a database """ user = User( - email="test@example.com", id=TEST_USER_ID, - password=generic_password, username="test", - first_name="Test", - last_name="Account", ) user.add() return user @@ -279,13 +275,9 @@ def create_admin_user(client) -> User: client : Fixture to ensure we have a database """ user = User( - email=admin_email, id=ADMIN_USER_ID, - password=generic_password, username=admin_username, - first_name="Admin", - last_name="Account", - role="ADMIN", + roles=["ADMIN"], ) user.add() return user @@ -299,12 +291,8 @@ def create_drb_user(client): client : Fixture to ensure we have a database """ drb = User( - email="drb@example.com", id=DRB_USER_ID, - password=generic_password, username="drb", - first_name="Authorized", - last_name="User", ) drb.add() return drb @@ -366,14 +354,14 @@ def attach_dataset(create_drb_user, create_user): # for one Dataset and letting it default for the other. with freeze_time("1970-01-01 00:42:00"): Dataset( - owner_id=str(create_drb_user.id), + owner=create_drb_user, uploaded=datetime.datetime(2022, 1, 1), name="drb", access="private", resource_id="random_md5_string1", ).add() Dataset( - owner_id=str(create_user.id), + owner=create_user, name="test", access="private", resource_id="random_md5_string2", @@ -414,38 +402,38 @@ def more_datasets( """ with freeze_time("1978-06-26 08:00:00"): Dataset( - owner_id=str(create_drb_user.id), + owner=create_drb_user, name="fio_1", access="public", resource_id="random_md5_string3", ).add() Dataset( - owner_id=str(create_user.id), + owner=create_user, uploaded=datetime.datetime(2022, 1, 1), name="fio_2", access="public", resource_id="random_md5_string4", ).add() Dataset( - owner_id=str(create_user.id), + owner=create_user, name="uperf_1", access="private", resource_id="random_md5_string5", ).add() Dataset( - owner_id=str(create_user.id), + owner=create_user, name="uperf_2", access="private", resource_id="random_md5_string6", ).add() Dataset( - owner_id=str(create_user.id), + owner=create_user, name="uperf_3", access="private", resource_id="random_md5_string7", ).add() Dataset( - owner_id=str(create_user.id), + owner=create_user, name="uperf_4", access="private", resource_id="random_md5_string8", @@ -897,11 +885,11 @@ def generate_token( "scope": "openid profile email", "sid": "1988612e-774d-43b8-8d4a-bbc05ee55edb", "email_verified": True, - "name": user.first_name + " " + user.last_name, + "name": "first_name last_name", "preferred_username": username, - "given_name": user.first_name, - "family_name": user.last_name, - "email": user.email, + "given_name": "first_name", + "family_name": "last_name", + "email": "dummy@example.com", } if pbench_client_roles: payload["resource_access"].update({client_id: {"roles": pbench_client_roles}}) diff --git a/lib/pbench/test/unit/server/database/__init__.py b/lib/pbench/test/unit/server/database/__init__.py index 3d6b6be66d..6afb0ffc74 100644 --- a/lib/pbench/test/unit/server/database/__init__.py +++ b/lib/pbench/test/unit/server/database/__init__.py @@ -220,6 +220,10 @@ def filter(self, *criteria) -> "FakeQuery": self.session.filters.append(", ".join(filters)) return self + def count(self) -> int: + """Returns the number of results filter query returned""" + return int(len(self.session.filters)) + def order_by(self, column: Column) -> "FakeQuery": """Sort the currently selected records by a specified column""" self.selected.sort(key=lambda o: getattr(o, column.key)) @@ -250,6 +254,7 @@ def __init__(self, cls): self.id = 1 self.cls = cls self.added: list[Database.Base] = [] + self.removed: list[Database.Base] = [] self.known: dict[int, Database.Base] = {} self.committed: dict[int, FakeRow] = {} self.rolledback = 0 @@ -265,6 +270,10 @@ def reset_context(self): self.raise_on_commit = None __class__.throw_query = False + def remove(self): + """Mocks the db session remove method""" + self.reset_context() + def query(self, *entities, **kwargs) -> FakeQuery: """Perform a mocked query on the session, setting up the query context and returning it @@ -290,6 +299,14 @@ def add(self, instance: Database.Base): """ self.added.append(instance) + def delete(self, instance: Database.Base): + """Delete a DB object from a list for testing + + Args: + instance: A DB object + """ + self.removed.append(instance) + def commit(self): """Mock a commit operation on the DB session. If the 'raise_on_commit' property has been set, "fail" by raising the exception. Otherwise, @@ -303,7 +320,7 @@ def commit(self): for k, object in self.known.items(): self.committed[k] = FakeRow.clone(object) for a in self.added: - a.id = self.id + a.id = self.id if not a.id else a.id self.id += 1 for c in a.__table__._columns: if c.default: @@ -314,7 +331,10 @@ def commit(self): setattr(a, c.name, default.arg(None)) self.known[a.id] = a self.committed[a.id] = FakeRow.clone(a) + for r in self.removed: + del self.committed[r.id] self.added = [] + self.removed = [] def rollback(self): """Just record that rollback was called, since we always raise an error @@ -374,10 +394,11 @@ def check_session( # state and 'rollback' clears the list. We don't ever expect to see # anything on this list. assert not self.added + assert not self.removed # Check that the 'committed' list (which stands in for the actual DB # table) contains the expected rows. - assert committed is None or sorted(self.committed.values()) == sorted(committed) + assert committed is None or all(i in self.committed.values() for i in committed) # Check whether we've rolled back transaction(s). assert self.rolledback == rolledback diff --git a/lib/pbench/test/unit/server/database/test_datasets_db.py b/lib/pbench/test/unit/server/database/test_datasets_db.py index fa8564fe5e..fe8445622b 100644 --- a/lib/pbench/test/unit/server/database/test_datasets_db.py +++ b/lib/pbench/test/unit/server/database/test_datasets_db.py @@ -2,8 +2,6 @@ import pytest from pbench.server.database.models.datasets import Dataset, DatasetNotFound -from pbench.server.database.models.users import User -from pbench.test.unit.server import DRB_USER_ID class TestDatasets: @@ -11,17 +9,17 @@ def test_construct(self, db_session, create_user): """Test dataset contructor""" user = create_user with freeze_time("1970-01-01"): - ds = Dataset(owner_id=str(user.id), name="fio", resource_id="f00b0ad") + ds = Dataset(owner=user, name="fio", resource_id="f00b0ad") ds.add() - assert ds.owner_id == str(user.id) + assert ds.owner.get_json() == user.get_json() assert ds.name == "fio" assert ds.resource_id == "f00b0ad" assert ds.id is not None - assert f"({user.id})|fio" == str(ds) + assert f"({user.username})|fio" == str(ds) assert ds.as_dict() == { "access": "private", "name": "fio", - "owner_id": str(user.id), + "owner": str(user.username), "uploaded": "1970-01-01T00:00:00+00:00", "metalog": None, "operations": {}, @@ -32,9 +30,9 @@ def test_dataset_survives_user(self, db_session, create_user): user is removed. """ user = create_user - ds = Dataset(owner_id=str(user.id), name="fio", resource_id="deadbeef") + ds = Dataset(owner=user, name="fio", resource_id="deadbeef") ds.add() - User.delete(username=user.username) + user.delete() ds1 = Dataset.query(resource_id="deadbeef") assert ds1 == ds @@ -47,7 +45,7 @@ def test_dataset_metadata_log(self, db_session, create_user, provide_metadata): assert ds1.as_dict() == { "access": "private", "name": "drb", - "owner_id": DRB_USER_ID, + "owner": "drb", "uploaded": "2022-01-01T00:00:00+00:00", "metalog": { "pbench": { @@ -63,21 +61,19 @@ def test_dataset_metadata_log(self, db_session, create_user, provide_metadata): def test_query_name(self, db_session, create_user): """Test that we can find a dataset by name alone""" - ds1 = Dataset(owner_id=str(create_user.id), resource_id="deed1e", name="fio") + ds1 = Dataset(owner=create_user, resource_id="deed1e", name="fio") ds1.add() ds2 = Dataset.query(name="fio") assert ds2.name == "fio" - assert ds2.owner_id == ds1.owner_id + assert ds2.owner == ds1.owner assert ds2.name == ds1.name assert ds2.resource_id == ds1.resource_id assert ds2.id == ds1.id def test_delete(self, db_session, create_user): """Test that we can delete a dataset""" - ds1 = Dataset( - owner_id=str(create_user.id), name="foobar", resource_id="f00dea7" - ) + ds1 = Dataset(owner=create_user, name="foobar", resource_id="f00dea7") ds1.add() # we can find it diff --git a/lib/pbench/test/unit/server/database/test_datasets_metadata_db.py b/lib/pbench/test/unit/server/database/test_datasets_metadata_db.py index 0680072828..01f9a22821 100644 --- a/lib/pbench/test/unit/server/database/test_datasets_metadata_db.py +++ b/lib/pbench/test/unit/server/database/test_datasets_metadata_db.py @@ -35,7 +35,7 @@ def test_metadata(self, more_datasets): assert m.dataset_ref == m1.dataset_ref # Check the str() - assert "(3)|drb>>global" == str(m) + assert "(drb)|drb>>global" == str(m) # Try to get a metadata key that doesn't exist with pytest.raises(MetadataNotFound) as exc: @@ -120,7 +120,7 @@ def test_dataset_full(self, provide_metadata, create_drb_user): assert metadata == { "access": "private", "name": "drb", - "owner_id": str(create_drb_user.id), + "owner": create_drb_user.username, "uploaded": "2022-01-01T00:00:00+00:00", "metalog": { "pbench": { @@ -183,44 +183,44 @@ def test_user_metadata(self, attach_dataset): """Various tests on user-mapped Metadata keys""" # See if we can create a metadata row ds = Dataset.query(name="drb") - user1 = str(User.query(username="drb").id) + user1 = User.query(username="drb") metadata_db_query = Database.db_session.query(Metadata) assert ds.metadatas == [] - t = Metadata.create(key="user", value=True, dataset=ds, user_id=user1) + t = Metadata.create(key="user", value=True, dataset=ds, user=user1) assert t is not None assert ds.metadatas == [t] - assert Database.db_session.query(Metadata).filter_by(user_id=user1).all() == [t] + assert Database.db_session.query(Metadata).filter_by(user=user1).all() == [t] - user2 = str(User.query(username="test").id) - d = Metadata.create(key="user", value=False, dataset=ds, user_id=user2) + user2 = User.query(username="test") + d = Metadata.create(key="user", value=False, dataset=ds, user=user2) assert d is not None assert ds.metadatas == [t, d] - assert metadata_db_query.filter_by(user_id=user1).all() == [t] - assert metadata_db_query.filter_by(user_id=user2).all() == [d] + assert metadata_db_query.filter_by(user=user1).all() == [t] + assert metadata_db_query.filter_by(user=user2).all() == [d] g = Metadata.create(key="user", value="text", dataset=ds) assert g is not None assert ds.metadatas == [t, d, g] - assert metadata_db_query.filter_by(user_id=user1).all() == [t] - assert metadata_db_query.filter_by(user_id=user2).all() == [d] + assert metadata_db_query.filter_by(user=user1).all() == [t] + assert metadata_db_query.filter_by(user=user2).all() == [d] assert Metadata.get(key="user", dataset=ds).value == "text" - assert Metadata.get(key="user", dataset=ds, user_id=user1).value is True - assert Metadata.get(key="user", dataset=ds, user_id=user2).value is False + assert Metadata.get(key="user", dataset=ds, user=user1).value is True + assert Metadata.get(key="user", dataset=ds, user=user2).value is False - Metadata.remove(key="user", dataset=ds, user_id=user1) - assert metadata_db_query.filter_by(user_id=user1).all() == [] - assert metadata_db_query.filter_by(user_id=user2).all() == [d] + Metadata.remove(key="user", dataset=ds, user=user1) + assert metadata_db_query.filter_by(user=user1).all() == [] + assert metadata_db_query.filter_by(user=user2).all() == [d] assert ds.metadatas == [d, g] Metadata.remove(key="user", dataset=ds) - assert metadata_db_query.filter_by(user_id=user1).all() == [] - assert metadata_db_query.filter_by(user_id=user2).all() == [d] + assert metadata_db_query.filter_by(user=user1).all() == [] + assert metadata_db_query.filter_by(user=user2).all() == [d] assert ds.metadatas == [d] - Metadata.remove(key="user", dataset=ds, user_id=user2) - assert metadata_db_query.filter_by(user_id=user1).all() == [] - assert metadata_db_query.filter_by(user_id=user2).all() == [] + Metadata.remove(key="user", dataset=ds, user=user2) + assert metadata_db_query.filter_by(user=user1).all() == [] + assert metadata_db_query.filter_by(user=user2).all() == [] assert ds.metadatas == [] # Peek under the carpet to look for orphaned metadata objects linked @@ -229,9 +229,9 @@ def test_user_metadata(self, attach_dataset): assert metadata is None metadata = metadata_db_query.filter( or_( - Metadata.user_id == user1, - Metadata.user_id == user2, - Metadata.user_id is None, + Metadata.user == user1, + Metadata.user == user2, + Metadata.user is None, ) ).first() assert metadata is None @@ -273,7 +273,7 @@ def test_get_bad_path(self, attach_dataset): assert exc.value.element == "contact" assert ( str(exc.value) - == "Key 'contact' value for 'global.contact.email' in (3)|drb is not a JSON object" + == "Key 'contact' value for 'global.contact.email' in (drb)|drb is not a JSON object" ) def test_set_bad_path(self, attach_dataset): @@ -289,7 +289,7 @@ def test_set_bad_path(self, attach_dataset): assert exc.value.element == "contact" assert ( str(exc.value) - == "Key 'contact' value for 'global.contact.email' in (3)|drb is not a JSON object" + == "Key 'contact' value for 'global.contact.email' in (drb)|drb is not a JSON object" ) def test_get_outer_path(self, attach_dataset): @@ -352,19 +352,19 @@ def test_setgetvalue_user(self, attach_dataset): None, as we use this column only for the "user" key value.) """ ds = Dataset.query(name="drb") - user1 = str(User.query(username="drb").id) - user2 = str(User.query(username="test").id) + user1 = User.query(username="drb") + user2 = User.query(username="test") Metadata.setvalue(dataset=ds, key="user.contact", value="Barney") - Metadata.setvalue(dataset=ds, key="user.contact", value="Fred", user_id=user2) - Metadata.setvalue(dataset=ds, key="user.contact", value="Wilma", user_id=user1) + Metadata.setvalue(dataset=ds, key="user.contact", value="Fred", user=user2) + Metadata.setvalue(dataset=ds, key="user.contact", value="Wilma", user=user1) - assert Metadata.getvalue(dataset=ds, user_id=None, key="user") == { + assert Metadata.getvalue(dataset=ds, user=None, key="user") == { "contact": "Barney" } - assert Metadata.getvalue(dataset=ds, user_id=user2, key="user") == { + assert Metadata.getvalue(dataset=ds, user=user2, key="user") == { "contact": "Fred" } - assert Metadata.getvalue(dataset=ds, user_id=user1, key="user") == { + assert Metadata.getvalue(dataset=ds, user=user1, key="user") == { "contact": "Wilma" } @@ -400,7 +400,7 @@ def test_mutable_dataset_name_bad(self, attach_dataset, value): Metadata.setvalue(ds, "dataset.name", value) assert ( str(exc.value) - == f"Metadata key 'dataset.name' value {value!r} for dataset (3)|drb must be a UTF-8 string of 1 to 1024 characters" + == f"Metadata key 'dataset.name' value {value!r} for dataset (drb)|drb must be a UTF-8 string of 1 to 1024 characters" ) assert Metadata.getvalue(ds, "dataset.name") == name @@ -415,11 +415,11 @@ def test_mutable_server(self, server_config, attach_dataset): [ ( "Not a date", - "Metadata key 'server.deletion' value 'Not a date' for dataset (3)|drb must be a date/time", + "Metadata key 'server.deletion' value 'Not a date' for dataset (drb)|drb must be a date/time", ), ( "2040-12-25", - "Metadata key 'server.deletion' value '2040-12-25' for dataset (3)|drb must be a date/time before 2031-12-30", + "Metadata key 'server.deletion' value '2040-12-25' for dataset (drb)|drb must be a date/time before 2031-12-30", ), ], ) diff --git a/lib/pbench/test/unit/server/database/test_users_db.py b/lib/pbench/test/unit/server/database/test_users_db.py new file mode 100644 index 0000000000..29a06d1697 --- /dev/null +++ b/lib/pbench/test/unit/server/database/test_users_db.py @@ -0,0 +1,126 @@ +import uuid + +import pytest +from sqlalchemy.exc import IntegrityError + +from pbench.server.database.database import Database +from pbench.server.database.models.datasets import Dataset, DatasetNotFound +from pbench.server.database.models.users import User, UserDuplicate, UserSqlError +from pbench.test.unit.server.database import FakeDBOrig, FakeRow, FakeSession + + +class TestUsers: + session = None + + @pytest.fixture() + def fake_db(self, monkeypatch, server_config): + """ + Fixture to mock a DB session for testing. + + We patch the SQLAlchemy db_session to our fake session. We also store a + server configuration object directly on the Database.Base (normally + done during DB initialization) because that can't be monkeypatched. + """ + __class__.session = FakeSession(User) + monkeypatch.setattr(Database, "db_session", __class__.session) + Database.Base.config = server_config + yield monkeypatch + + @staticmethod + def add_dummy_user(fake_db): + dummy_user = User( + id=str(uuid.uuid4()), + username="dummy", + ) + dummy_user.add() + return dummy_user + + def test_construct(self, fake_db): + """Test User db contructor""" + user = self.add_dummy_user(fake_db) + assert user.username == "dummy" + + expected_commits = [FakeRow.clone(user)] + self.session.check_session(queries=0, committed=expected_commits) + self.session.reset_context() + + def test_is_admin(self, fake_db): + uuid4 = str(uuid.uuid4()) + user = User(id=uuid4, username="dummy_admin", roles=["ADMIN"]) + user.add() + assert user.is_admin() + assert user.id == uuid4 + user1 = User(id="1", username="non_admin") + user1.add() + assert not user1.is_admin() + + expected_commits = [FakeRow.clone(user), FakeRow.clone(user1)] + self.session.check_session(queries=0, committed=expected_commits) + self.session.reset_context() + + def test_user_survives_dataset_real_session(self, db_session, create_user): + """The User isn't automatically removed when the referenced + dataset is deleted. + """ + user = create_user + ds = Dataset(owner=user, name="fio", resource_id="deadbeef") + ds.add() + ds.delete() + with pytest.raises(DatasetNotFound): + Dataset.query(resource_id=ds.resource_id) + assert User.query(username=user.username) + + def test_construct_duplicate(self, fake_db): + """Test handling of User record duplicate value error""" + self.session.raise_on_commit = IntegrityError( + statement="", params="", orig=FakeDBOrig("UNIQUE constraint") + ) + with pytest.raises( + UserDuplicate, + match="Duplicate user setting in {'username': 'dummy', 'id': .*?, 'roles': \\[]}: UNIQUE constraint", + ): + self.add_dummy_user(fake_db) + self.session.check_session(rolledback=1) + self.session.reset_context() + + def test_user_update(self, fake_db): + """Test updating user roles""" + + data = {"roles": ["NEW_ROLE"]} + TestUsers.add_dummy_user(fake_db) + + user = User.query(username="dummy") + user.update(**data) + assert user.roles == ["NEW_ROLE"] + assert user._roles == "NEW_ROLE" + + expected_commits = [FakeRow.clone(user)] + self.session.check_session( + queries=1, committed=expected_commits, filters=["username=dummy"] + ) + self.session.reset_context() + + def test_user_delete(self, fake_db): + """Test deleting the user from the User table""" + self.add_dummy_user(fake_db) + user = User.query(username="dummy") + expected_commits = [FakeRow.clone(user)] + self.session.check_session( + queries=1, filters=["username=dummy"], committed=expected_commits + ) + assert user.username == "dummy" + user.delete() + self.session.check_session(queries=0, filters=[], committed=[]) + + def test_delete_exception(self, fake_db): + """Test exception raised during the delete operation""" + user = User( + id="1", + username="dummy", + ) + with pytest.raises( + UserSqlError, + match=r"deleting", + ): + user.delete() + self.session.reset_context() diff --git a/lib/pbench/test/unit/server/query_apis/test_datasets_update.py b/lib/pbench/test/unit/server/query_apis/test_datasets_update.py index da685be333..73f3e3b85d 100644 --- a/lib/pbench/test/unit/server/query_apis/test_datasets_update.py +++ b/lib/pbench/test/unit/server/query_apis/test_datasets_update.py @@ -222,9 +222,7 @@ def test_query_owner_publish( query_json["access"] = access if owner: query_json["owner"] = owner - assert_id = ( - str(create_drb_user.id) if owner == "drb" else str(create_user.id) - ) + assert_id = create_drb_user.id if owner == "drb" else create_user.id is_admin = build_auth_header["header_param"] == HeaderTypes.VALID_ADMIN if not HeaderTypes.is_valid(build_auth_header["header_param"]): diff --git a/lib/pbench/test/unit/server/test_datasets_metadata.py b/lib/pbench/test/unit/server/test_datasets_metadata.py index b584522164..16c4c638c7 100644 --- a/lib/pbench/test/unit/server/test_datasets_metadata.py +++ b/lib/pbench/test/unit/server/test_datasets_metadata.py @@ -113,7 +113,7 @@ def test_get2(self, query_get_as): "dataset": { "access": "private", "name": "drb", - "owner_id": "3", + "owner": "drb", "uploaded": "2022-01-01T00:00:00+00:00", "metalog": { "pbench": { @@ -418,7 +418,7 @@ def test_put_invalid_name(self, query_get_as, query_put_as): json = put.json assert json["message"] == "at least one specified metadata key is invalid" assert json["errors"] == [ - "Metadata key 'dataset.name' value 1 for dataset (3)|drb must be a UTF-8 string of 1 to 1024 characters" + "Metadata key 'dataset.name' value 1 for dataset (drb)|drb must be a UTF-8 string of 1 to 1024 characters" ] # verify that the values didn't change @@ -455,7 +455,7 @@ def test_put_invalid_deletion(self, query_get_as, query_put_as): json = put.json assert json["message"] == "at least one specified metadata key is invalid" assert json["errors"] == [ - "Metadata key 'server.deletion' value '1800-25-55' for dataset (3)|drb must be a date/time" + "Metadata key 'server.deletion' value '1800-25-55' for dataset (drb)|drb must be a date/time" ] # verify that the values didn't change @@ -500,7 +500,7 @@ def test_put_set_errors(self, capinternal, monkeypatch, query_get_as, query_put_ "errors": { "global.dashboard.nested.dummy": "Key 'nested' value for " "'global.dashboard.nested.dummy' " - "in (3)|drb is not a JSON object" + "in (drb)|drb is not a JSON object" }, "metadata": { "global.dashboard.nested.dummy": None, diff --git a/lib/pbench/test/unit/server/test_endpoint_configure.py b/lib/pbench/test/unit/server/test_endpoint_configure.py index a03ad32e53..20ed29242b 100644 --- a/lib/pbench/test/unit/server/test_endpoint_configure.py +++ b/lib/pbench/test/unit/server/test_endpoint_configure.py @@ -54,13 +54,9 @@ def check_config(self, client, server_config, host, my_headers={}): "datasets_search": f"{uri}/datasets/search", "datasets_values": f"{uri}/datasets/values", "endpoints": f"{uri}/endpoints", - "login": f"{uri}/login", - "logout": f"{uri}/logout", - "register": f"{uri}/register", "server_audit": f"{uri}/server/audit", "server_settings": f"{uri}/server/settings", "upload": f"{uri}/upload", - "user": f"{uri}/user", }, "uri": { "datasets": { @@ -114,9 +110,6 @@ def check_config(self, client, server_config, host, my_headers={}): }, }, "endpoints": {"template": f"{uri}/endpoints", "params": {}}, - "login": {"template": f"{uri}/login", "params": {}}, - "logout": {"template": f"{uri}/logout", "params": {}}, - "register": {"template": f"{uri}/register", "params": {}}, "server_audit": {"template": f"{uri}/server/audit", "params": {}}, "server_settings": { "template": f"{uri}/server/settings/{{key}}", @@ -126,10 +119,6 @@ def check_config(self, client, server_config, host, my_headers={}): "template": f"{uri}/upload/{{filename}}", "params": {"filename": {"type": "string"}}, }, - "user": { - "template": f"{uri}/user/{{target_username}}", - "params": {"target_username": {"type": "string"}}, - }, }, } diff --git a/lib/pbench/test/unit/server/test_schema.py b/lib/pbench/test/unit/server/test_schema.py index df8668b796..20babe2d79 100644 --- a/lib/pbench/test/unit/server/test_schema.py +++ b/lib/pbench/test/unit/server/test_schema.py @@ -125,12 +125,8 @@ def test_successful_conversions( self, client, monkeypatch, current_user_drb, ptype, kwd, value, expected ): user = User( - id=1, + id="1", username="drb", - password="password", - first_name="first_name", - last_name="last_name", - email="test@test.com", ) def ok(username: str) -> User: @@ -175,12 +171,8 @@ def test_unauthenticated_username(self, monkeypatch, current_user_none): client is unauthenticated even when the username exists. """ user = User( - id=1, username="drb", - password="password", - first_name="first_name", - last_name="last_name", - email="test@test.com", + id="1", ) def ok(username: str) -> User: diff --git a/lib/pbench/test/unit/server/test_user_management_cli.py b/lib/pbench/test/unit/server/test_user_management_cli.py index 295315c9ac..e4fd717943 100644 --- a/lib/pbench/test/unit/server/test_user_management_cli.py +++ b/lib/pbench/test/unit/server/test_user_management_cli.py @@ -1,5 +1,3 @@ -import datetime - from click.testing import CliRunner import pytest @@ -10,11 +8,7 @@ def create_user(): user = User( username=TestUserManagement.USER_TEXT, - password=TestUserManagement.PSWD_TEXT, - first_name=TestUserManagement.FIRST_NAME_TEXT, - last_name=TestUserManagement.LAST_NAME_TEXT, - email=TestUserManagement.EMAIL_TEXT, - registered_on=TestUserManagement.USER_CREATE_TIMESTAMP, + id=TestUserManagement.OIDC_ID_TEXT, ) return user @@ -24,14 +18,6 @@ def mock_valid_list(): return [user] -def mock_valid_delete(**kwargs): - return - - -def mock_valid_query(**kwargs): - return create_user() - - @pytest.fixture(autouse=True) def server_config_env(on_disk_server_config, monkeypatch): """Provide a pbench server configuration environment variable for all user @@ -42,112 +28,16 @@ def server_config_env(on_disk_server_config, monkeypatch): class TestUserManagement: - USER_SWITCH = "--username" - PSWD_SWITCH = "--password" - PSWD_PROMPT = "Password: " - EMAIL_SWITCH = "--email" - FIRST_NAME_SWITCH = "--first-name" - LAST_NAME_SWITCH = "--last-name" - ROLE_SWITCH = "--role" USER_TEXT = "test_user" - PSWD_TEXT = "password" - EMAIL_TEXT = "test@domain.com" - FIRST_NAME_TEXT = "First" - LAST_NAME_TEXT = "Last" - USER_CREATE_TIMESTAMP = datetime.datetime.now() + OIDC_ID_TEXT = "12345" @staticmethod def test_help(): runner = CliRunner(mix_stderr=False) - result = runner.invoke(cli.user_create, ["--help"]) + result = runner.invoke(cli.user_list, ["--help"]) assert result.exit_code == 0, result.stderr assert str(result.stdout).startswith("Usage:") - @staticmethod - def test_valid_user_registration_with_password_input(server_config): - runner = CliRunner(mix_stderr=False) - result = runner.invoke( - cli.user_create, - args=[ - TestUserManagement.USER_SWITCH, - TestUserManagement.USER_TEXT, - TestUserManagement.EMAIL_SWITCH, - TestUserManagement.EMAIL_TEXT, - TestUserManagement.FIRST_NAME_SWITCH, - TestUserManagement.FIRST_NAME_TEXT, - TestUserManagement.LAST_NAME_SWITCH, - TestUserManagement.LAST_NAME_TEXT, - ], - input=f"{TestUserManagement.PSWD_TEXT}\n", - ) - assert result.exit_code == 0, result.stderr - assert ( - result.stdout - == f"{TestUserManagement.PSWD_PROMPT}\n" + "User test_user registered\n" - ) - - @staticmethod - def test_admin_user_creation(server_config): - runner = CliRunner(mix_stderr=False) - result = runner.invoke( - cli.user_create, - args=[ - TestUserManagement.USER_SWITCH, - TestUserManagement.USER_TEXT, - TestUserManagement.PSWD_SWITCH, - TestUserManagement.PSWD_TEXT, - TestUserManagement.EMAIL_SWITCH, - TestUserManagement.EMAIL_TEXT, - TestUserManagement.FIRST_NAME_SWITCH, - TestUserManagement.FIRST_NAME_TEXT, - TestUserManagement.LAST_NAME_SWITCH, - TestUserManagement.LAST_NAME_TEXT, - TestUserManagement.ROLE_SWITCH, - "ADMIN", - ], - ) - assert result.exit_code == 0, result.stderr - assert result.stdout == "Admin user test_user registered\n" - - @staticmethod - def test_user_creation_with_invalid_role(server_config): - runner = CliRunner(mix_stderr=False) - result = runner.invoke( - cli.user_create, - args=[ - TestUserManagement.USER_SWITCH, - TestUserManagement.USER_TEXT, - TestUserManagement.PSWD_SWITCH, - TestUserManagement.PSWD_TEXT, - TestUserManagement.EMAIL_SWITCH, - TestUserManagement.EMAIL_TEXT, - TestUserManagement.FIRST_NAME_SWITCH, - TestUserManagement.FIRST_NAME_TEXT, - TestUserManagement.LAST_NAME_SWITCH, - TestUserManagement.LAST_NAME_TEXT, - TestUserManagement.ROLE_SWITCH, - "ADMN", - ], - ) - assert result.exit_code == 2, result.stderr - assert result.stderr.find("Invalid value for '--role'") > -1 - - @staticmethod - def test_valid_user_delete(monkeypatch, server_config): - monkeypatch.setattr(User, "delete", mock_valid_delete) - runner = CliRunner(mix_stderr=False) - - result = runner.invoke(cli.user_delete, args=[TestUserManagement.USER_TEXT]) - assert result.exit_code == 0, result.stderr - - @staticmethod - def test_invalid_user_delete(server_config): - runner = CliRunner(mix_stderr=False) - # Give username that doesn't exists to delete - result = runner.invoke(cli.user_delete, args=["wrong_username"]) - assert result.exit_code == 1 - assert result.stderr == "User wrong_username does not exist\n" - @staticmethod def test_valid_user_list(monkeypatch, server_config): monkeypatch.setattr(User, "query_all", mock_valid_list) @@ -162,71 +52,7 @@ def test_valid_user_list(monkeypatch, server_config): == cli.USER_LIST_HEADER_ROW + "\n" + cli.USER_LIST_ROW_FORMAT.format( - TestUserManagement.USER_TEXT, - TestUserManagement.FIRST_NAME_TEXT, - TestUserManagement.LAST_NAME_TEXT, - TestUserManagement.USER_CREATE_TIMESTAMP.strftime("%Y-%m-%d"), - TestUserManagement.EMAIL_TEXT, + TestUserManagement.USER_TEXT, TestUserManagement.OIDC_ID_TEXT ) + "\n" ) - - @staticmethod - @pytest.mark.parametrize( - "switch, value", - [ - (USER_SWITCH, "new_test"), - (EMAIL_SWITCH, "new_test@domain.com"), - (ROLE_SWITCH, "ADMIN"), - (FIRST_NAME_SWITCH, "newfirst"), - (LAST_NAME_SWITCH, "newlast"), - (LAST_NAME_SWITCH, "newuser"), - ], - ) - def test_valid_user_update(monkeypatch, server_config, switch, value): - user = create_user() - - def mock_valid_update(**kwargs): - for key, value in kwargs.items(): - setattr(user, key, value) - return - - monkeypatch.setattr(User, "query", mock_valid_query) - monkeypatch.setattr(user, "update", mock_valid_update) - - runner = CliRunner(mix_stderr=False) - result = runner.invoke( - cli.user_update, - args=[TestUserManagement.USER_TEXT, switch, value], - ) - assert result.exit_code == 0, result.stderr - assert result.stdout == "User test_user updated\n" - - @staticmethod - def test_invalid_role_update(server_config): - runner = CliRunner(mix_stderr=False) - - # Update with invalid role for the user - result = runner.invoke( - cli.user_update, - args=["test_user", TestUserManagement.ROLE_SWITCH, "ADMN"], - ) - assert result.exit_code == 2 - assert result.stderr.find("Invalid value for '--role'") > -1 - - @staticmethod - def test_invalid_user_update(server_config): - runner = CliRunner(mix_stderr=False) - - # Update with non-existent username - result = runner.invoke( - cli.user_update, - args=[ - TestUserManagement.USER_TEXT, - TestUserManagement.EMAIL_SWITCH, - "new_test@domain.com", - ], - ) - assert result.exit_code == 1 - assert result.stdout == f"User {TestUserManagement.USER_TEXT} doesn't exist\n" - assert not result.stderr_bytes