diff --git a/build.gradle b/build.gradle index 99ddb470..55d7bc5e 100644 --- a/build.gradle +++ b/build.gradle @@ -29,16 +29,17 @@ dependencies { implementation 'org.springframework.boot:spring-boot-starter-data-jpa' implementation 'org.springframework.boot:spring-boot-starter-web' implementation 'org.springframework.boot:spring-boot-starter-validation' - testImplementation 'org.projectlombok:lombok' - testImplementation 'org.projectlombok:lombok' - testCompileOnly 'org.projectlombok:lombok' - testAnnotationProcessor 'org.projectlombok:lombok' developmentOnly 'org.springframework.boot:spring-boot-devtools' - compileOnly 'org.projectlombok:lombok' - annotationProcessor 'org.projectlombok:lombok' testImplementation 'org.springframework.boot:spring-boot-starter-test' testRuntimeOnly 'org.junit.platform:junit-platform-launcher' + // Lombok + compileOnly 'org.projectlombok:lombok' + annotationProcessor 'org.projectlombok:lombok' + testCompileOnly 'org.projectlombok:lombok' + testImplementation 'org.projectlombok:lombok' + testAnnotationProcessor 'org.projectlombok:lombok' + // 인증사 관련 의존성 implementation 'javax.servlet:jstl:1.2' implementation "org.apache.tomcat.embed:tomcat-embed-jasper" @@ -69,7 +70,6 @@ dependencies { testImplementation 'org.testcontainers:junit-jupiter:1.19.3' testImplementation 'org.testcontainers:mysql:1.20.0' - // security implementation 'org.springframework.boot:spring-boot-starter-security' implementation 'org.springframework.boot:spring-boot-starter-oauth2-client' @@ -103,11 +103,6 @@ dependencies { runtimeOnly 'com.h2database:h2' - testImplementation 'org.springframework.boot:spring-boot-testcontainers:3.3.5' - testImplementation 'org.testcontainers:testcontainers:1.19.3' - testImplementation 'org.testcontainers:junit-jupiter:1.19.3' - testImplementation 'org.testcoscntainers:mysql:1.20.0' - annotationProcessor "org.springframework.boot:spring-boot-configuration-processor" implementation 'org.apache.commons:commons-pool2:2.12.1' diff --git a/src/main/java/life/mosu/mosuserver/application/oauth/OAuthUserPersistenceProcessor.java b/src/main/java/life/mosu/mosuserver/application/oauth/OAuthUserPersistenceProcessor.java new file mode 100644 index 00000000..3e823364 --- /dev/null +++ b/src/main/java/life/mosu/mosuserver/application/oauth/OAuthUserPersistenceProcessor.java @@ -0,0 +1,47 @@ +package life.mosu.mosuserver.application.oauth; + +import life.mosu.mosuserver.domain.user.entity.AuthProvider; +import life.mosu.mosuserver.domain.user.entity.UserJpaEntity; +import life.mosu.mosuserver.domain.user.entity.UserRole; +import life.mosu.mosuserver.domain.user.repository.UserJpaRepository; +import life.mosu.mosuserver.global.processor.StepProcessor; +import lombok.RequiredArgsConstructor; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +@Component +@RequiredArgsConstructor +public class OAuthUserPersistenceProcessor implements StepProcessor { + + private final UserJpaRepository userRepository; + + @Override + @Transactional + public UserJpaEntity process(final OAuthUserInfo info) { + return userRepository.findByLoginId(info.email()) + .map(existingUser -> { + existingUser.updateOAuthUser( + info.gender(), + info.name(), + info.phoneNumber(), + info.birthDay(), + info.marketingAgreed()); + return existingUser; + }) + .orElseGet(() -> { + final UserJpaEntity newUser = UserJpaEntity.builder() + .loginId(info.email()) + .gender(info.gender()) + .name(info.name()) + .birth(info.birthDay()) + .phoneNumber(info.phoneNumber()) + .userRole(UserRole.ROLE_PENDING) + .provider(AuthProvider.KAKAO) + .agreedToTermsOfService(true) + .agreedToPrivacyPolicy(true) + .agreedToMarketing(info.marketingAgreed()) + .build(); + return userRepository.save(newUser); + }); + } +} diff --git a/src/main/java/life/mosu/mosuserver/application/oauth/OAuthUserService.java b/src/main/java/life/mosu/mosuserver/application/oauth/OAuthUserService.java index f67b968d..3c0ddf67 100644 --- a/src/main/java/life/mosu/mosuserver/application/oauth/OAuthUserService.java +++ b/src/main/java/life/mosu/mosuserver/application/oauth/OAuthUserService.java @@ -1,15 +1,10 @@ package life.mosu.mosuserver.application.oauth; -import java.time.LocalDate; import java.util.Collections; import java.util.List; import java.util.Map; -import life.mosu.mosuserver.domain.profile.entity.Gender; import life.mosu.mosuserver.domain.profile.repository.ProfileJpaRepository; -import life.mosu.mosuserver.domain.user.entity.AuthProvider; import life.mosu.mosuserver.domain.user.entity.UserJpaEntity; -import life.mosu.mosuserver.domain.user.entity.UserRole; -import life.mosu.mosuserver.domain.user.repository.UserJpaRepository; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.ParameterizedTypeReference; @@ -25,7 +20,7 @@ @RequiredArgsConstructor public class OAuthUserService extends DefaultOAuth2UserService { - private final UserJpaRepository userRepository; + private final OAuthUserPersistenceProcessor oAuthUserPersistenceProcessor; private final ProfileJpaRepository profileRepository; private final WebClient webClient; @@ -44,12 +39,15 @@ public OAuth2User loadUser(final OAuth2UserRequest userRequest) agreedToMarketing = termsList.stream() .filter(term -> term instanceof Map) .map(term -> (Map) term) - .filter(termMap -> "terms_03".equals(termMap.get("tag"))) + .filter(termMap -> + "terms_03".equals(termMap.get("tag"))) .findFirst() .map(termMap -> (Boolean) termMap.get("agreed")) .orElse(false); } + log.info("동의 여부{}", agreedToMarketing); + final String registrationId = userRequest.getClientRegistration().getRegistrationId(); final String userNameAttributeName = userRequest.getClientRegistration() .getProviderDetails() @@ -59,7 +57,7 @@ public OAuth2User loadUser(final OAuth2UserRequest userRequest) final OAuthUserInfo userInfo = OAuthUserInfo.of(OAuthProvider.from(registrationId), oAuth2UserAttributes, agreedToMarketing); - final UserJpaEntity oAuthUser = updateOrWrite(userInfo); + final UserJpaEntity oAuthUser = oAuthUserPersistenceProcessor.process(userInfo); Boolean isProfileRegistered = profileRepository.existsByUserId(oAuthUser.getId()); @@ -67,35 +65,6 @@ public OAuth2User loadUser(final OAuth2UserRequest userRequest) isProfileRegistered); } - private UserJpaEntity updateOrWrite(final OAuthUserInfo info) { - return userRepository.findByLoginId(info.email()) - .map(existingUser -> { - existingUser.updateOAuthUser( - info.gender(), - info.name(), - info.phoneNumber(), - info.birthDay() != null ? info.birthDay() : LocalDate.of(1900, 1, 1)); - return existingUser; - }) - .orElseGet(() -> { - final UserJpaEntity newUser = UserJpaEntity.builder() - .loginId(info.email() != null ? info.email() : "NA") - .gender(info.gender() != null ? info.gender() : Gender.PENDING) - .name(info.name() != null ? info.name() : "NA") - .birth(info.birthDay() != null ? info.birthDay() - : LocalDate.EPOCH) - .phoneNumber(info.phoneNumber() != null ? info.phoneNumber() - : "010-0000-0000") - .userRole(UserRole.ROLE_PENDING) - .provider(AuthProvider.KAKAO) - .agreedToTermsOfService(true) - .agreedToPrivacyPolicy(true) - .agreedToMarketing(info.marketingAgreed()) - .build(); - return userRepository.save(newUser); - }); - } - private Map getServiceTerms(String accessToken) { String url = "https://kapi.kakao.com/v2/user/service_terms"; diff --git a/src/main/java/life/mosu/mosuserver/domain/caffeine/dto/BlockedIp.java b/src/main/java/life/mosu/mosuserver/domain/caffeine/dto/BlockedIp.java index 0c2d59c4..77d54a97 100644 --- a/src/main/java/life/mosu/mosuserver/domain/caffeine/dto/BlockedIp.java +++ b/src/main/java/life/mosu/mosuserver/domain/caffeine/dto/BlockedIp.java @@ -6,15 +6,18 @@ @Getter public class BlockedIp { + private final TimePenalty penaltyLevel; public BlockedIp(TimePenalty penaltyLevel) { this.penaltyLevel = penaltyLevel; } - public Duration getTtl(){ - return penaltyLevel.getDuration(); + public static BlockedIp init() { + return new BlockedIp(TimePenalty.LEVEL_0); } - + public Duration getTtl() { + return penaltyLevel.getDuration(); + } } diff --git a/src/main/java/life/mosu/mosuserver/domain/caffeine/dto/RequestCounter.java b/src/main/java/life/mosu/mosuserver/domain/caffeine/dto/RequestCounter.java index 7e852423..c05c272b 100644 --- a/src/main/java/life/mosu/mosuserver/domain/caffeine/dto/RequestCounter.java +++ b/src/main/java/life/mosu/mosuserver/domain/caffeine/dto/RequestCounter.java @@ -1,12 +1,16 @@ package life.mosu.mosuserver.domain.caffeine.dto; -import lombok.Getter; +import java.util.concurrent.atomic.AtomicInteger; -@Getter public class RequestCounter { - private int count = 0; - public void increment() { - count++; + private final AtomicInteger count = new AtomicInteger(); + + public int incrementAndGet() { + return count.incrementAndGet(); + } + + public int getCount() { + return count.get(); } } diff --git a/src/main/java/life/mosu/mosuserver/domain/user/entity/UserJpaEntity.java b/src/main/java/life/mosu/mosuserver/domain/user/entity/UserJpaEntity.java index 280b4a13..a936bcc2 100644 --- a/src/main/java/life/mosu/mosuserver/domain/user/entity/UserJpaEntity.java +++ b/src/main/java/life/mosu/mosuserver/domain/user/entity/UserJpaEntity.java @@ -93,12 +93,14 @@ public void updateOAuthUser( Gender gender, String name, String phoneNumber, - LocalDate birth + LocalDate birth, + boolean agreedToMarketing ) { this.gender = gender; this.name = name; this.phoneNumber = phoneNumber; this.birth = birth; + this.agreedToMarketing = agreedToMarketing; } public void updateUserInfo( diff --git a/src/main/java/life/mosu/mosuserver/global/config/CaffeineCacheConfig.java b/src/main/java/life/mosu/mosuserver/global/config/CaffeineCacheConfig.java index 627b9545..79d43652 100644 --- a/src/main/java/life/mosu/mosuserver/global/config/CaffeineCacheConfig.java +++ b/src/main/java/life/mosu/mosuserver/global/config/CaffeineCacheConfig.java @@ -5,8 +5,8 @@ import com.github.benmanes.caffeine.cache.Expiry; import com.github.benmanes.caffeine.cache.LoadingCache; import java.util.concurrent.TimeUnit; -import life.mosu.mosuserver.domain.caffeine.dto.BlockedIpHistory; import life.mosu.mosuserver.domain.caffeine.dto.BlockedIp; +import life.mosu.mosuserver.domain.caffeine.dto.BlockedIpHistory; import life.mosu.mosuserver.domain.caffeine.dto.RequestCounter; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -41,12 +41,14 @@ public long expireAfterCreate(String key, BlockedIp value, long currentTime) { } @Override - public long expireAfterUpdate(String key, BlockedIp value, long currentTime, long currentDuration) { - return currentDuration; + public long expireAfterUpdate(String key, BlockedIp value, long currentTime, + long currentDuration) { + return value.getTtl().toNanos(); } @Override - public long expireAfterRead(String key, BlockedIp value, long currentTime, long currentDuration) { + public long expireAfterRead(String key, BlockedIp value, long currentTime, + long currentDuration) { return currentDuration; } }) diff --git a/src/main/java/life/mosu/mosuserver/global/filter/IpRateLimitingFilter.java b/src/main/java/life/mosu/mosuserver/global/filter/IpRateLimitingFilter.java index 49f1f3bb..053d2e47 100644 --- a/src/main/java/life/mosu/mosuserver/global/filter/IpRateLimitingFilter.java +++ b/src/main/java/life/mosu/mosuserver/global/filter/IpRateLimitingFilter.java @@ -9,13 +9,12 @@ import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; import java.io.IOException; +import life.mosu.mosuserver.domain.caffeine.dto.BlockedIp; +import life.mosu.mosuserver.domain.caffeine.dto.BlockedIpHistory; +import life.mosu.mosuserver.domain.caffeine.dto.RequestCounter; import life.mosu.mosuserver.global.config.IpRateLimitingProperties; import life.mosu.mosuserver.global.exception.CustomRuntimeException; import life.mosu.mosuserver.global.exception.ErrorCode; -import life.mosu.mosuserver.domain.caffeine.dto.BlockedIpHistory; -import life.mosu.mosuserver.domain.caffeine.dto.BlockedIp; - -import life.mosu.mosuserver.domain.caffeine.dto.RequestCounter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; @@ -31,59 +30,60 @@ public class IpRateLimitingFilter extends OncePerRequestFilter { private final Cache blockedHistoryCache; private final LoadingCache blockedIpCache; - @Override - protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, - FilterChain filterChain) - throws ServletException, IOException { + protected void doFilterInternal( + HttpServletRequest request, + HttpServletResponse response, + FilterChain filterChain + ) throws ServletException, IOException { if (!ipRateLimitingProperties.isEnabled()) { - log.info("IpRateLimitingFilter disabled"); + log.debug("IpRateLimitingFilter disabled"); filterChain.doFilter(request, response); return; } String ip = getClientIp(request); - isAlreadyBlocked(ip); RequestCounter counter = ipRequestCountsCache.get(ip, k -> new RequestCounter()); + int after = counter.incrementAndGet(); + int max = ipRateLimitingProperties.getMaxRequestsPerMinute(); - synchronized (counter) { - counter.increment(); - - if (isOverPerMaxRequest(counter)) { - log.warn("차단된 IP: {}, 요청 횟수: {}", ip, counter.getCount()); - handleBlockedIp(ip); - } + if (after > max) { + handleBlockedIp(ip); } - log.debug("IP: {}, 요청 횟수 증가 후: {}", ip, counter.getCount()); + log.debug("IP: {}, 요청 횟수 증가 후: {}", ip, after); log.debug("Cache stats: {}", ipRequestCountsCache.stats()); filterChain.doFilter(request, response); } - private boolean isOverPerMaxRequest(RequestCounter counter) { - return counter.getCount() >= ipRateLimitingProperties.getMaxRequestsPerMinute(); - } - private void handleBlockedIp(String ip) { - BlockedIpHistory history = blockedHistoryCache.get(ip, k -> new BlockedIpHistory(ip)); - TimePenalty nextPenaltyLevel = history.getPenaltyLevel().nextLevel(); - history.updateHistory(nextPenaltyLevel); + BlockedIp existing = blockedIpCache.asMap().putIfAbsent(ip, BlockedIp.init()); + if (existing != null) { + log.warn("이미 차단된 IP: {}, 차단 레벨: {}", ip, existing.getPenaltyLevel()); + throw new CustomRuntimeException(ErrorCode.TOO_MANY_REQUESTS); + } + + TimePenalty level = blockedHistoryCache.asMap().compute(ip, (k, history) -> { + BlockedIpHistory h = (history == null) ? new BlockedIpHistory(ip) : history; + TimePenalty next = h.getPenaltyLevel().nextLevel(); + h.updateHistory(next); + return h; + }).getPenaltyLevel(); - blockedIpCache.invalidate(ip); - blockedIpCache.put(ip, new BlockedIp(nextPenaltyLevel)); - log.warn("IP 차단: {}, 차단 레벨: {})", ip, nextPenaltyLevel); + blockedIpCache.asMap().computeIfPresent(ip, (k, v) -> new BlockedIp(level)); throw new CustomRuntimeException(ErrorCode.TOO_MANY_REQUESTS); } - private void isAlreadyBlocked(String requestedIp) { - if(blockedIpCache.getIfPresent(requestedIp) != null){ - log.warn("이미 차단된 IP: {}", requestedIp); + private void isAlreadyBlocked(String ip) { + BlockedIp blockedIp = blockedIpCache.getIfPresent(ip); + if (blockedIp != null) { + log.warn("이미 차단된 IP: {}, 차단 레벨: {}", ip, blockedIp.getPenaltyLevel()); throw new CustomRuntimeException(ErrorCode.TOO_MANY_REQUESTS); } } -} +} \ No newline at end of file diff --git a/src/main/java/life/mosu/mosuserver/global/filter/TimePenalty.java b/src/main/java/life/mosu/mosuserver/global/filter/TimePenalty.java index 9a36da15..12770172 100644 --- a/src/main/java/life/mosu/mosuserver/global/filter/TimePenalty.java +++ b/src/main/java/life/mosu/mosuserver/global/filter/TimePenalty.java @@ -7,7 +7,7 @@ @Getter @RequiredArgsConstructor public enum TimePenalty { - LEVEL_0(0, Duration.ZERO), + LEVEL_0(0, Duration.ofSeconds(10)), LEVEL_1(1, Duration.ofMinutes(1)), LEVEL_2(2, Duration.ofMinutes(5)), LEVEL_3(3, Duration.ofMinutes(30)), diff --git a/src/main/resources/db/migration/V1__init.sql b/src/main/resources/db/migration/V1__init.sql new file mode 100644 index 00000000..5b9166af --- /dev/null +++ b/src/main/resources/db/migration/V1__init.sql @@ -0,0 +1,383 @@ +CREATE TABLE application +( + application_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + deleted BIT(1) NULL, + user_id BIGINT NULL, + parent_phone_number VARCHAR(255) NULL, + application_status VARCHAR(255) NOT NULL, + agreed_to_notices BIT(1) NULL, + agreed_to_refund_policy BIT(1) NULL, + CONSTRAINT pk_application PRIMARY KEY (application_id) +); + +CREATE TABLE application_failure_log +( + application_failure_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + application_id BIGINT NOT NULL, + user_id BIGINT NOT NULL, + reason VARCHAR(255) NOT NULL, + snapshot TEXT NULL, + CONSTRAINT pk_application_failure_log PRIMARY KEY (application_failure_id) +); + +CREATE TABLE banner +( + deleted BIT(1) NOT NULL, + id BIGINT AUTO_INCREMENT NOT NULL, + file_name VARCHAR(255) NULL, + s3key VARCHAR(255) NULL, + visibility VARCHAR(255) NULL, + created_at datetime NULL, + updated_at datetime NULL, + title VARCHAR(255) NULL, + dead_line datetime NULL, + banner_link VARCHAR(255) NULL, + CONSTRAINT pk_banner PRIMARY KEY (id) +); + +CREATE TABLE blocked_ip_history_log +( + id BIGINT AUTO_INCREMENT NOT NULL, + ip VARCHAR(255) NULL, + penalty_level VARCHAR(255) NULL, + blocked_at datetime NULL, + CONSTRAINT pk_blocked_ip_history_log PRIMARY KEY (id) +); + +CREATE TABLE event +( + deleted BIT(1) NOT NULL, + event_id BIGINT AUTO_INCREMENT NOT NULL, + file_name VARCHAR(255) NULL, + s3key VARCHAR(255) NULL, + visibility VARCHAR(255) NULL, + created_at datetime NULL, + updated_at datetime NULL, + event_title VARCHAR(255) NOT NULL, + event_link VARCHAR(255) NULL, + start_date date NULL, + end_date date NULL, + CONSTRAINT pk_event PRIMARY KEY (event_id) +); + +CREATE TABLE exam +( + deleted BIT(1) NOT NULL, + id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + school_name VARCHAR(255) NULL, + area VARCHAR(255) NULL, + capacity INT NULL, + deadline_time datetime NULL, + exam_date date NOT NULL, + lunch_name VARCHAR(255) NULL, + lunch_price INT NULL, + exam_status VARCHAR(255) NOT NULL, + zipcode VARCHAR(255) NULL, + street VARCHAR(255) NULL, + detail VARCHAR(255) NULL, + CONSTRAINT pk_exam PRIMARY KEY (id) +); + +CREATE TABLE exam_application +( + id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + deleted BIT(1) NULL, + application_id BIGINT NULL, + user_id BIGINT NULL, + exam_id BIGINT NULL, + lunch_checked BIT(1) NULL, + exam_number VARCHAR(255) NULL, + CONSTRAINT pk_exam_application PRIMARY KEY (id) +); + +CREATE TABLE exam_subject +( + id BIGINT AUTO_INCREMENT NOT NULL, + exam_application_id BIGINT NULL, + subject VARCHAR(255) NULL, + CONSTRAINT pk_exam_subject PRIMARY KEY (id) +); + +CREATE TABLE exam_ticket_image +( + exam_ticket_image_id BIGINT AUTO_INCREMENT NOT NULL, + file_name VARCHAR(255) NULL, + s3key VARCHAR(255) NULL, + visibility VARCHAR(255) NULL, + application_id BIGINT NOT NULL, + CONSTRAINT pk_exam_ticket_image PRIMARY KEY (exam_ticket_image_id) +); + +CREATE TABLE faq +( + deleted BIT(1) NOT NULL, + faq_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + question VARCHAR(500) NOT NULL, + answer VARCHAR(255) NOT NULL, + author VARCHAR(255) NOT NULL, + user_id BIGINT NOT NULL, + CONSTRAINT pk_faq PRIMARY KEY (faq_id) +); + +CREATE TABLE file_move_fail_log +( + id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + faq_id BIGINT NULL, + s3key VARCHAR(255) NULL, + destination_folder SMALLINT NULL, + CONSTRAINT pk_filemovefaillog PRIMARY KEY (id) +); + +CREATE TABLE form +( + form_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + exam_date date NOT NULL, + org_name VARCHAR(255) NULL, + password VARCHAR(255) NULL, + user_name VARCHAR(255) NULL, + gender VARCHAR(255) NULL, + birth date NULL, + phone_number VARCHAR(255) NULL, + subjects VARCHAR(255) NULL, + subjects2 VARCHAR(255) NULL, + lunch BIT(1) NULL, + area VARCHAR(255) NULL, + school_name VARCHAR(255) NULL, + file_name VARCHAR(255) NULL, + s3_key VARCHAR(255) NULL, + CONSTRAINT pk_form PRIMARY KEY (form_id) +); + +CREATE TABLE inquiry +( + deleted BIT(1) NOT NULL, + inquiry_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + title VARCHAR(300) NOT NULL, + content VARCHAR(1000) NOT NULL, + user_id BIGINT NOT NULL, + author VARCHAR(255) NULL, + status VARCHAR(255) NULL, + CONSTRAINT pk_inquiry PRIMARY KEY (inquiry_id) +); + +CREATE TABLE inquiry_answer +( + deleted BIT(1) NOT NULL, + inquiry_answer_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + title VARCHAR(300) NOT NULL, + content VARCHAR(1000) NOT NULL, + inquiry_id BIGINT NOT NULL, + author VARCHAR(255) NOT NULL, + user_id BIGINT NOT NULL, + CONSTRAINT pk_inquiry_answer PRIMARY KEY (inquiry_answer_id) +); + +CREATE TABLE inquiry_answer_attachment +( + inquiry_answer_attachment_id BIGINT AUTO_INCREMENT NOT NULL, + file_name VARCHAR(255) NULL, + s3key VARCHAR(255) NULL, + visibility VARCHAR(255) NULL, + inquiry_answer_id BIGINT NOT NULL, + CONSTRAINT pk_inquiry_answer_attachment PRIMARY KEY (inquiry_answer_attachment_id) +); + +CREATE TABLE inquiry_attachment +( + inquiry_attachment_id BIGINT AUTO_INCREMENT NOT NULL, + file_name VARCHAR(255) NULL, + s3key VARCHAR(255) NULL, + visibility VARCHAR(255) NULL, + inquiry_id BIGINT NOT NULL, + CONSTRAINT pk_inquiry_attachment PRIMARY KEY (inquiry_attachment_id) +); + +CREATE TABLE notice +( + deleted BIT(1) NOT NULL, + notice_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + title VARCHAR(255) NOT NULL, + content VARCHAR(3000) NOT NULL, + user_id BIGINT NOT NULL, + author VARCHAR(255) NOT NULL, + CONSTRAINT pk_notice PRIMARY KEY (notice_id) +); + +CREATE TABLE notice_attachment +( + notice_attachment_id BIGINT AUTO_INCREMENT NOT NULL, + file_name VARCHAR(255) NULL, + s3key VARCHAR(255) NULL, + visibility VARCHAR(255) NULL, + notice_id BIGINT NOT NULL, + CONSTRAINT pk_notice_attachment PRIMARY KEY (notice_attachment_id) +); + +CREATE TABLE notify +( + deleted BIT(1) NOT NULL, + id BIGINT AUTO_INCREMENT NOT NULL, + notify_custom_key VARCHAR(255) NOT NULL, + notify_type VARCHAR(255) NOT NULL, + notify_result_code VARCHAR(255) NOT NULL, + CONSTRAINT pk_notify PRIMARY KEY (id) +); + +CREATE TABLE payment +( + payment_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + deleted BIT(1) NULL, + exam_application_id BIGINT NULL, + application_id BIGINT NULL, + payment_key VARCHAR(255) NULL, + order_id VARCHAR(255) NOT NULL, + status VARCHAR(255) NOT NULL, + method VARCHAR(255) NULL, + total_amount INT NULL, + supplied_amount INT NULL, + vat_amount INT NULL, + balance_amount INT NULL, + tax_free_amount INT NULL, + CONSTRAINT pk_payment PRIMARY KEY (payment_id) +); + +CREATE TABLE payment_failure_log +( + payment_failure_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + payment_id BIGINT NOT NULL, + exam_application_id BIGINT NOT NULL, + application_id BIGINT NULL, + reason VARCHAR(255) NOT NULL, + snapshot TEXT NULL, + CONSTRAINT pk_payment_failure_log PRIMARY KEY (payment_failure_id) +); + +CREATE TABLE profile +( + deleted BIT(1) NOT NULL, + profile_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + user_id BIGINT NOT NULL, + user_name VARCHAR(255) NOT NULL, + gender VARCHAR(255) NOT NULL, + birth date NOT NULL, + phone_number VARCHAR(255) NOT NULL, + email VARCHAR(255) NULL, + education VARCHAR(255) NULL, + recommender_phone_number VARCHAR(255) NULL, + grade VARCHAR(255) NULL, + school_name VARCHAR(255) NULL, + zipcode VARCHAR(255) NULL, + street VARCHAR(255) NULL, + CONSTRAINT pk_profile PRIMARY KEY (profile_id) +); + +CREATE TABLE recommendation +( + deleted BIT(1) NOT NULL, + recommendation_id BIGINT AUTO_INCREMENT NOT NULL, + user_id BIGINT NULL, + recommeded_name VARCHAR(255) NULL, + recommeded_phone_number VARCHAR(255) NULL, + bank VARCHAR(255) NULL, + account_number VARCHAR(255) NULL, + CONSTRAINT pk_recommendation PRIMARY KEY (recommendation_id) +); + +CREATE TABLE refund +( + refund_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + deleted BIT(1) NULL, + transaction_key VARCHAR(255) NOT NULL, + exam_application_id BIGINT NULL, + reason VARCHAR(255) NOT NULL, + refund_status VARCHAR(255) NULL, + refunded_amount INT NULL, + refundable_amount INT NULL, + CONSTRAINT pk_refund PRIMARY KEY (refund_id) +); + +CREATE TABLE refund_failure_log +( + refund_failure_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + refund_id BIGINT NOT NULL, + exam_application_id BIGINT NOT NULL, + reason VARCHAR(255) NOT NULL, + snapshot TEXT NULL, + CONSTRAINT pk_refund_failure_log PRIMARY KEY (refund_failure_id) +); + +CREATE TABLE user +( + deleted BIT(1) NOT NULL, + user_id BIGINT AUTO_INCREMENT NOT NULL, + created_at datetime NULL, + updated_at datetime NULL, + login_id VARCHAR(50) NULL, + password VARCHAR(255) NULL, + gender VARCHAR(255) NULL, + name VARCHAR(255) NULL, + birth date NULL, + phone_number VARCHAR(255) NULL, + customer_key VARCHAR(255) NULL, + agreed_to_terms_of_service BIT(1) NULL, + agreed_to_privacy_policy BIT(1) NULL, + agreed_to_marketing BIT(1) NULL, + user_role VARCHAR(50) NOT NULL, + provider VARCHAR(255) NULL, + CONSTRAINT pk_user PRIMARY KEY (user_id) +); + +CREATE TABLE virtual_account_log +( + deleted BIT(1) NOT NULL, + virtual_account_log_id BIGINT AUTO_INCREMENT NOT NULL, + application_id BIGINT NULL, + order_id VARCHAR(255) NULL, + account_number VARCHAR(255) NULL, + bank_name VARCHAR(255) NULL, + customer_name VARCHAR(255) NULL, + customer_email VARCHAR(255) NULL, + deposit_status SMALLINT NULL, + CONSTRAINT pk_virtual_account_log PRIMARY KEY (virtual_account_log_id) +); + +ALTER TABLE profile + ADD CONSTRAINT uc_25d92281884ae5fdff0d3ec10 UNIQUE (user_id); + +ALTER TABLE notify + ADD CONSTRAINT uc_notify_notify_custom_key UNIQUE (notify_custom_key); + +ALTER TABLE user + ADD CONSTRAINT uc_user_login UNIQUE (login_id); + +CREATE INDEX idx_status_created_at ON payment (status, created_at); \ No newline at end of file diff --git a/src/main/resources/security-config.yml b/src/main/resources/security-config.yml index 523cb69c..41ed8fac 100644 --- a/src/main/resources/security-config.yml +++ b/src/main/resources/security-config.yml @@ -16,7 +16,7 @@ spring: - birthday - birthyear - phone_number - service-terms: terms_03 + service-terms: terms_01,terms_02,terms_03 client-name: kakao provider: kakao: diff --git a/src/test/java/life/mosu/mosuserver/application/oauth/OAuthUserServiceTest.java b/src/test/java/life/mosu/mosuserver/application/oauth/OAuthUserServiceTest.java new file mode 100644 index 00000000..e38ce290 --- /dev/null +++ b/src/test/java/life/mosu/mosuserver/application/oauth/OAuthUserServiceTest.java @@ -0,0 +1,44 @@ +package life.mosu.mosuserver.application.oauth; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +@DisplayName("카카오_추가기능_응답_테스트") +class OAuthUserServiceTest { + + @Test + @DisplayName("Service Terms 응답에 마케팅 동의(terms_03)가 없는 경우 false로 파싱한다") + void 마케팅_동의_여부를_파싱한다() { + Map term1 = Map.of( + "tag", "terms_02", + "required", true, + "agreed", true + ); + Map term2 = Map.of( + "tag", "terms_01", + "required", true, + "agreed", true + ); + + Map serviceTermsAttributes = new HashMap<>(); + serviceTermsAttributes.put("id", 4342056184L); + serviceTermsAttributes.put("service_terms", List.of(term1, term2)); + + boolean agreedToMarketing = false; + if (serviceTermsAttributes.get("service_terms") instanceof List termsList) { + agreedToMarketing = termsList.stream() + .filter(term -> term instanceof Map) + .map(term -> (Map) term) + .filter(termMap -> "terms_03".equals(termMap.get("tag"))) + .findFirst() + .map(termMap -> (Boolean) termMap.get("agreed")) + .orElse(false); + } + + Assertions.assertFalse(agreedToMarketing); + } +} \ No newline at end of file diff --git a/src/test/java/life/mosu/mosuserver/global/filter/IpRateLimitingFilterMultiUserIsolationTest.java b/src/test/java/life/mosu/mosuserver/global/filter/IpRateLimitingFilterMultiUserIsolationTest.java new file mode 100644 index 00000000..679ec31b --- /dev/null +++ b/src/test/java/life/mosu/mosuserver/global/filter/IpRateLimitingFilterMultiUserIsolationTest.java @@ -0,0 +1,494 @@ +package life.mosu.mosuserver.global.filter; + +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; +import static org.awaitility.Awaitility.await; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; +import com.github.benmanes.caffeine.cache.Expiry; +import com.github.benmanes.caffeine.cache.LoadingCache; +import jakarta.servlet.FilterChain; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import java.time.Duration; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import life.mosu.mosuserver.domain.caffeine.dto.BlockedIp; +import life.mosu.mosuserver.domain.caffeine.dto.BlockedIpHistory; +import life.mosu.mosuserver.domain.caffeine.dto.RequestCounter; +import life.mosu.mosuserver.global.config.IpRateLimitingProperties; +import life.mosu.mosuserver.global.exception.CustomRuntimeException; +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; + +@Slf4j +public class IpRateLimitingFilterMultiUserIsolationTest { + + private IpRateLimitingFilter filter; + private IpRateLimitingProperties properties; + private Cache ipRequestCountsCache; + private Cache blockedHistoryCache; + private LoadingCache blockedIpCache; + + @BeforeEach + public void setup() { + properties = Mockito.mock(IpRateLimitingProperties.class); + Mockito.when(properties.isEnabled()).thenReturn(true); + Mockito.when(properties.getMaxRequestsPerMinute()).thenReturn(100); + + ipRequestCountsCache = Caffeine.newBuilder() + .maximumSize(10_000) + .expireAfterWrite(Duration.ofMinutes(1)) + .build(); + + blockedHistoryCache = Caffeine.newBuilder() + .maximumSize(10_000) + .expireAfterWrite(Duration.ofHours(1)) + .build(); + + blockedIpCache = Caffeine.newBuilder() + .expireAfter(new Expiry() { + @Override + public long expireAfterCreate(String key, BlockedIp value, long currentTime) { + return value.getTtl().toNanos(); + } + + @Override + public long expireAfterUpdate(String key, BlockedIp value, long currentTime, + long currentDuration) { + return currentDuration; + } + + @Override + public long expireAfterRead(String key, BlockedIp value, long currentTime, + long currentDuration) { + return currentDuration; + } + }) + .build(key -> null); + + filter = new IpRateLimitingFilter(properties, ipRequestCountsCache, blockedHistoryCache, + blockedIpCache); + } + + @Test + public void 임계치_이하에서는_요청_모두_통과하고_카운터증가_체인호출됨() throws InterruptedException { + int userCount = 500; + int requestsPerUser = 50; + int repeatCount = 10; + + // IP 리스트 500개 생성 + List ips = IntStream.range(1, userCount + 1) + .mapToObj(i -> "192.168." + (i / 255) + "." + (i % 255)) + .collect(Collectors.toList()); + + // 각 IP별 누적 카운트 저장용 배열 (500명 × 누적 합) + long[] cumulativeCounts = new long[userCount]; + + for (int run = 1; run <= repeatCount; run++) { + // 캐시 초기화 (필요시) + ipRequestCountsCache.invalidateAll(); + blockedHistoryCache.invalidateAll(); + blockedIpCache.invalidateAll(); + + ExecutorService executor = Executors.newFixedThreadPool(100); + CountDownLatch latch = new CountDownLatch(userCount * requestsPerUser); + + for (String ip : ips) { + for (int i = 0; i < requestsPerUser; i++) { + executor.submit(() -> { + try { + HttpServletRequest request = new MockHttpServletRequest() { + @Override + public String getRemoteAddr() { + return ip; + } + }; + HttpServletResponse response = new MockHttpServletResponse(); + FilterChain filterChain = (req, res) -> { + }; + + filter.doFilterInternal(request, response, filterChain); + + } catch (CustomRuntimeException e) { + // 차단 예외 무시 + } catch (Exception e) { + e.printStackTrace(); + } finally { + latch.countDown(); + } + }); + } + } + + latch.await(); + executor.shutdown(); + + for (int idx = 0; idx < userCount; idx++) { + String ip = ips.get(idx); + RequestCounter counter = ipRequestCountsCache.get(ip, k -> new RequestCounter()); + long count = counter.getCount(); + cumulativeCounts[idx] += count; + log.debug("[Run {}] IP: {}, 카운트: {}", run, ip, count); + + assertThat(count) + .withFailMessage("Run %d: IP %s 카운트가 %d이어야 합니다, 실제: %d", run, ip, + requestsPerUser, count) + .isEqualTo(requestsPerUser); + } + } + + // 평균 카운트 계산 및 출력 + for (int idx = 0; idx < userCount; idx++) { + double avg = cumulativeCounts[idx] / (double) repeatCount; + log.info("IP: {}, 평균 카운트 ({}회 반복): {}", ips.get(idx), repeatCount, avg); + } + } + + @Test + void 임계치_이하에서는_카운터정확_체인_모두_호출됨() throws Exception { + // (기존 임계치_초과시_즉시차단되고_체인호출안됨_차단캐시에저장됨 본문 그대로) + Mockito.when(properties.getMaxRequestsPerMinute()).thenReturn(1000); + String ip = "10.0.0.1"; + int requests = 200; + AtomicInteger chainCalls = new AtomicInteger(); + FilterChain chain = (req, res) -> chainCalls.incrementAndGet(); + for (int i = 0; i < requests; i++) { + filter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), chain); + } + RequestCounter counter = ipRequestCountsCache.get(ip, k -> new RequestCounter()); + assertThat(counter.getCount()).isEqualTo(requests); + assertThat(chainCalls.get()).isEqualTo(requests); + } + + + @Test + void 이미_차단된_IP는_카운터와무관하게_즉시차단_체인호출안됨() throws Exception { + Mockito.when(properties.getMaxRequestsPerMinute()).thenReturn(10); + String ip = "10.0.0.2"; + AtomicInteger chainCalls = new AtomicInteger(); + FilterChain chain = (req, res) -> chainCalls.incrementAndGet(); + + for (int i = 0; i < 10; i++) { + filter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), chain); + } + + boolean blockedThrown = false; + try { + filter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), chain); + } catch (CustomRuntimeException e) { + blockedThrown = true; + } + + assertThat(blockedThrown).isTrue(); + assertThat(chainCalls.get()).isEqualTo(10); + BlockedIp blocked = blockedIpCache.getIfPresent(ip); + assertThat(blocked).isNotNull(); + } + + + @Test + void 차단상태에서는_카운터증가없고_체인호출안됨() throws Exception { + String ip = "10.0.0.3"; + BlockedIp mockedBlocked = Mockito.mock(BlockedIp.class); + Mockito.when(mockedBlocked.getTtl()).thenReturn(Duration.ofSeconds(30)); + blockedIpCache.put(ip, mockedBlocked); + ipRequestCountsCache.put(ip, new RequestCounter()); + ipRequestCountsCache.get(ip, k -> new RequestCounter()).incrementAndGet(); + AtomicInteger chainCalls = new AtomicInteger(); + FilterChain chain = (req, res) -> chainCalls.incrementAndGet(); + boolean blockedThrown = false; + try { + filter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), chain); + } catch (CustomRuntimeException e) { + blockedThrown = true; + } + assertThat(blockedThrown).isTrue(); + assertThat(chainCalls.get()).isEqualTo(0); + assertThat(ipRequestCountsCache.get(ip, k -> new RequestCounter()).getCount()).isEqualTo(1); + } + + @Test + void 카운터_윈도우_만료후_초기화된다() throws Exception { + Cache shortLivedCount = Caffeine.newBuilder() + .maximumSize(10_000) + .expireAfterWrite(Duration.ofSeconds(2)) + .build(); + Cache history = Caffeine.newBuilder() + .maximumSize(10_000) + .expireAfterWrite(Duration.ofHours(1)) + .build(); + LoadingCache blocked = Caffeine.newBuilder() + .expireAfter(new Expiry() { + public long expireAfterCreate(String key, BlockedIp value, long currentTime) { + return value.getTtl().toNanos(); + } + + public long expireAfterUpdate(String key, BlockedIp value, long currentTime, + long currentDuration) { + return currentDuration; + } + + public long expireAfterRead(String key, BlockedIp value, long currentTime, + long currentDuration) { + return currentDuration; + } + }) + .build(k -> null); + + IpRateLimitingFilter localFilter = new IpRateLimitingFilter(properties, shortLivedCount, + history, blocked); + Mockito.when(properties.getMaxRequestsPerMinute()).thenReturn(1000); + + String ip = "10.0.0.4"; + FilterChain chain = (req, res) -> { + }; + + localFilter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), chain); + localFilter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), chain); + assertThat(shortLivedCount.get(ip, k -> new RequestCounter()).getCount()).isEqualTo(2); + + await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { + RequestCounter c = shortLivedCount.getIfPresent(ip); + assertThat(c).isNull(); + }); + + localFilter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), chain); + assertThat(shortLivedCount.get(ip, k -> new RequestCounter()).getCount()).isEqualTo(1); + } + + @Test + void 차단_TTL_만료후_요청_통과된다() throws Exception { + LoadingCache localBlocked = Caffeine.newBuilder() + .expireAfter(new Expiry() { + public long expireAfterCreate(String key, BlockedIp value, long currentTime) { + return value.getTtl().toNanos(); + } + + public long expireAfterUpdate(String key, BlockedIp value, long currentTime, + long currentDuration) { + return currentDuration; + } + + public long expireAfterRead(String key, BlockedIp value, long currentTime, + long currentDuration) { + return currentDuration; + } + }) + .build(k -> null); + + Cache counts = Caffeine.newBuilder() + .maximumSize(10_000) + .expireAfterWrite(Duration.ofMinutes(1)) + .build(); + Cache history = Caffeine.newBuilder() + .maximumSize(10_000) + .expireAfterWrite(Duration.ofHours(1)) + .build(); + IpRateLimitingFilter localFilter = new IpRateLimitingFilter(properties, counts, history, + localBlocked); + + String ip = "10.0.0.5"; + Mockito.when(properties.getMaxRequestsPerMinute()).thenReturn(1000); + + BlockedIp mockedBlocked = Mockito.mock(BlockedIp.class); + Mockito.when(mockedBlocked.getTtl()).thenReturn(Duration.ofSeconds(2)); + + localBlocked.put(ip, mockedBlocked); + + boolean blockedThrown = false; + try { + localFilter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), + (req, res) -> { + }); + } catch (CustomRuntimeException e) { + blockedThrown = true; + } + assertThat(blockedThrown).isTrue(); + + await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> { + BlockedIp b = localBlocked.getIfPresent(ip); + assertThat(b).isNull(); + }); + + localFilter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), (req, res) -> { + }); + } + + @Test + void 차단은_IP_간_독립적으로_동작한다() throws Exception { + Mockito.when(properties.getMaxRequestsPerMinute()).thenReturn(5); + String bad = "10.0.0.6"; + String good = "10.0.0.7"; + AtomicInteger goodCalls = new AtomicInteger(); + + for (int i = 0; i < 5; i++) { + filter.doFilterInternal(reqForIp(bad), new MockHttpServletResponse(), (req, res) -> { + }); + } + boolean blocked = false; + try { + filter.doFilterInternal(reqForIp(bad), new MockHttpServletResponse(), (req, res) -> { + }); + } catch (CustomRuntimeException e) { + blocked = true; + } + assertThat(blocked).isEqualTo(true); + + filter.doFilterInternal(reqForIp(good), new MockHttpServletResponse(), + (req, res) -> goodCalls.incrementAndGet()); + assertThat(goodCalls.get()).isEqualTo(1); + } + + @Test + void 단일IP_고경합에서도_카운터와_체인호출이_정확하다() throws Exception { + Mockito.when(properties.getMaxRequestsPerMinute()).thenReturn(100_000); + String ip = "10.0.0.8"; + int threads = 200; + int perThread = 50; + ExecutorService exec = Executors.newFixedThreadPool(50); + CountDownLatch latch = new CountDownLatch(threads * perThread); + AtomicInteger chainCalls = new AtomicInteger(); + + FilterChain chain = (req, res) -> chainCalls.incrementAndGet(); + + for (int t = 0; t < threads; t++) { + exec.submit(() -> { + try { + for (int i = 0; i < perThread; i++) { + filter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), chain); + } + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + for (int i = 0; i < perThread; i++) { + latch.countDown(); + } + } + }); + } + latch.await(); + exec.shutdown(); + + long expected = (long) threads * perThread; + RequestCounter counter = ipRequestCountsCache.get(ip, k -> new RequestCounter()); + assertThat(counter.getCount()).isEqualTo(expected); + assertThat(chainCalls.get()).isEqualTo(expected); + } + + @Test + void 차단중에는_모든요청이_차단되고_체인호출없다() throws Exception { + Mockito.when(properties.getMaxRequestsPerMinute()).thenReturn(1); + String ip = "10.0.0.9"; + + filter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), (req, res) -> { + }); + + boolean secondBlocked = false; + try { + filter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), (req, res) -> { + }); + } catch (CustomRuntimeException e) { + secondBlocked = true; + } + assertThat(secondBlocked).isEqualTo(true); + + AtomicInteger chainCalls = new AtomicInteger(); + FilterChain chain = (req, res) -> chainCalls.incrementAndGet(); + int concurrent = 100; + ExecutorService exec = Executors.newFixedThreadPool(20); + CountDownLatch latch = new CountDownLatch(concurrent); + for (int i = 0; i < concurrent; i++) { + exec.submit(() -> { + try { + try { + filter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), chain); + } catch (CustomRuntimeException ignored) { + } + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + latch.countDown(); + } + }); + } + latch.await(); + exec.shutdown(); + + assertThat(chainCalls.get()).isEqualTo(0); + } + + private HttpServletRequest reqForIp(String ip) { + return new MockHttpServletRequest() { + @Override + public String getRemoteAddr() { + return ip; + } + }; + } + + @Test + void 다중스레드_동시차단_시도에도_차단이력은_한단계만_상승한다() throws Exception { + // 임계치 1: 첫 요청만 허용, 그 이후는 모두 차단 + Mockito.when(properties.getMaxRequestsPerMinute()).thenReturn(1); + String ip = "10.0.0.200"; + + // 1) 첫 요청: 허용되어 카운터가 1이 됨 + filter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), (req, res) -> { + }); + + // 2) 동시에 많은 요청을 던져서 handleBlockedIp가 동시 다발로 호출되도록 유도 + int concurrent = 200; + CountDownLatch latch = new CountDownLatch(concurrent); + ExecutorService exec = Executors.newFixedThreadPool(40); + + for (int i = 0; i < concurrent; i++) { + exec.submit(() -> { + try { + try { + filter.doFilterInternal(reqForIp(ip), new MockHttpServletResponse(), + (req, res) -> { + }); + } catch (CustomRuntimeException ignored) { + // 차단 예외는 무시 (정상 동작) + } + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + latch.countDown(); + } + }); + } + + latch.await(); + exec.shutdown(); + + // 3) 검증 + // - blockedIpCache에 엔트리가 있어야 함 (차단 상태) + // - BlockedIpHistory의 penalty level이 '정확히 한 단계'만 상승해야 함 + // (여러 스레드가 동시에 업데이트해도 compute로 원자적 갱신이므로 LEVEL_1 이 되어야 함) + + BlockedIp blocked = blockedIpCache.getIfPresent(ip); + assertThat(blocked).isNotNull(); + + BlockedIpHistory history = blockedHistoryCache.getIfPresent(ip); + assertThat(history).isNotNull(); + + // 초기 레벨이 LEVEL_0이라고 가정하고, 상승 후 LEVEL_1을 기대 + log.info("history: {}", history); + assertThat(history.getPenaltyLevel()).isEqualTo(TimePenalty.LEVEL_1); + + } +} + +