-
Notifications
You must be signed in to change notification settings - Fork 0
Add issueStat query and openIssueCount field
#175
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -35,6 +35,12 @@ query Issues( | |
| login | ||
| } | ||
| } | ||
| # TODO: #181 | ||
| assignees(first: 10) { | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. #173 (comment) 에서와 동일한 쟁점이 여기에도 있는 것 같습니다. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 특정 갯수로 제한하지 않고, 전부 다 가져올 수 있는 generic하면서 programatic한 방식을 도입하고, 이를 여러개의 PR에서 공통적으로 활용하는 것이 좋지 않을까합니다.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 페이지네이션 전략이 다양할 수 있어서 고민이 좀 필요할 것 같네요. 이슈를 따로 생성하고 한꺼번에 적용하는 게 좋을 것 같습니다. 당장 생각나는 방법은
등이 생각나네요. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 요청한 개수와 응답 받은 개수 비교해서 추가 요청하기 방식을 취하면, 초과할 가능성이 높은 필드만 추가로 확인해서 요청하기 방식은 무조건 커버가 될 것으로 보입니다. 전자가 더 나은 방식이라고 생각합니다. 이슈를 생성해주실 수 있을까요? 한편, pagination이 결부되는 GraphQL field들은 이 PR에서 제거하고 리뷰&머지 하거나, 아니면 이 PR 자체를 pending 해두어야 할 것 같습니다. 아니면 이 부분을 TODO 로 주석에 표기해두고, 기술부채가 남지않도록 이를 챙기면 될 것 같습니다. 이 PR 진행방식은 저는 어떤 것이든 무관하다고 생각합니다. Jake께서 github-dashboard에서의 주요 2개 프로젝트 고려해서 정하면 될 것 같다고 생각합니다.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. TODO 주석 추가했습니다. |
||
| nodes { | ||
| login | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -3,10 +3,16 @@ use std::fmt; | |
| use anyhow::Context as AnyhowContext; | ||
| use async_graphql::{ | ||
| connection::{query, Connection, EmptyFields}, | ||
| Context, Object, Result, SimpleObject, | ||
| scalar, Context, Object, Result, SimpleObject, | ||
| }; | ||
|
|
||
| use crate::database::{self, Database, TryFromKeyValue}; | ||
| use crate::{ | ||
| database::{self, Database, TryFromKeyValue}, | ||
| github::{issues::IssueState, GitHubIssue}, | ||
| graphql::DateTimeUtc, | ||
| }; | ||
|
|
||
| scalar!(IssueState); | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Custom scalar 로 표현됩니다. Remote enum 이라는 게 있긴한데, 아래의 이유로 적용 불가능합니다.
|
||
|
|
||
| #[derive(SimpleObject)] | ||
| pub(crate) struct Issue { | ||
|
|
@@ -15,19 +21,32 @@ pub(crate) struct Issue { | |
| pub(crate) number: i32, | ||
| pub(crate) title: String, | ||
| pub(crate) author: String, | ||
| pub(crate) created_at: DateTimeUtc, | ||
| pub(crate) state: IssueState, | ||
| pub(crate) assignees: Vec<String>, | ||
| } | ||
|
|
||
| impl TryFromKeyValue for Issue { | ||
| fn try_from_key_value(key: &[u8], value: &[u8]) -> anyhow::Result<Self> { | ||
| let (owner, repo, number) = database::parse_key(key) | ||
| .with_context(|| format!("invalid key in database: {key:02x?}"))?; | ||
| let (title, author, _) = bincode::deserialize::<(String, String, Option<String>)>(value)?; | ||
| let GitHubIssue { | ||
| title, | ||
| author, | ||
| created_at, | ||
| state, | ||
| assignees, | ||
| .. | ||
| } = bincode::deserialize::<GitHubIssue>(value)?; | ||
| let issue = Issue { | ||
| title, | ||
| author, | ||
| owner, | ||
| repo, | ||
| number: i32::try_from(number).unwrap_or(i32::MAX), | ||
| created_at: DateTimeUtc(created_at), | ||
| state, | ||
| assignees, | ||
| }; | ||
| Ok(issue) | ||
| } | ||
|
|
@@ -69,6 +88,15 @@ impl IssueQuery { | |
| mod tests { | ||
| use crate::{github::GitHubIssue, graphql::TestSchema}; | ||
|
|
||
| fn create_issues(n: usize) -> Vec<GitHubIssue> { | ||
| (1..=n) | ||
| .map(|i| GitHubIssue { | ||
| number: i64::try_from(i).unwrap(), | ||
| ..Default::default() | ||
| }) | ||
| .collect() | ||
| } | ||
|
|
||
| #[tokio::test] | ||
| async fn issues_empty() { | ||
| let schema = TestSchema::new(); | ||
|
|
@@ -89,26 +117,7 @@ mod tests { | |
| #[tokio::test] | ||
| async fn issues_first() { | ||
| let schema = TestSchema::new(); | ||
| let issues = vec![ | ||
| GitHubIssue { | ||
| number: 1, | ||
| title: "issue 1".to_string(), | ||
| author: "author 1".to_string(), | ||
| closed_at: None, | ||
| }, | ||
| GitHubIssue { | ||
| number: 2, | ||
| title: "issue 2".to_string(), | ||
| author: "author 2".to_string(), | ||
| closed_at: None, | ||
| }, | ||
| GitHubIssue { | ||
| number: 3, | ||
| title: "issue 3".to_string(), | ||
| author: "author 3".to_string(), | ||
| closed_at: None, | ||
| }, | ||
| ]; | ||
| let issues = create_issues(3); | ||
| schema.db.insert_issues(issues, "owner", "name").unwrap(); | ||
|
|
||
| let query = r" | ||
|
|
@@ -148,26 +157,7 @@ mod tests { | |
| #[tokio::test] | ||
| async fn issues_last() { | ||
| let schema = TestSchema::new(); | ||
| let issues = vec![ | ||
| GitHubIssue { | ||
| number: 1, | ||
| title: "issue 1".to_string(), | ||
| author: "author 1".to_string(), | ||
| closed_at: None, | ||
| }, | ||
| GitHubIssue { | ||
| number: 2, | ||
| title: "issue 2".to_string(), | ||
| author: "author 2".to_string(), | ||
| closed_at: None, | ||
| }, | ||
| GitHubIssue { | ||
| number: 3, | ||
| title: "issue 3".to_string(), | ||
| author: "author 3".to_string(), | ||
| closed_at: None, | ||
| }, | ||
| ]; | ||
| let issues = create_issues(3); | ||
| schema.db.insert_issues(issues, "owner", "name").unwrap(); | ||
|
|
||
| let query = r" | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.