Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refac: Migrate all datasets db operation to drizzle on NextJs #197

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 0 additions & 69 deletions app-server/src/db/datasets.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,56 +4,6 @@ use uuid::Uuid;

use crate::datasets::Dataset;

pub async fn create_dataset(pool: &PgPool, name: &String, project_id: Uuid) -> Result<Dataset> {
let dataset = sqlx::query_as::<_, Dataset>(
"INSERT INTO datasets (name, project_id)
VALUES ($1, $2)
RETURNING id, created_at, name, project_id, indexed_on
",
)
.bind(name)
.bind(project_id)
.fetch_one(pool)
.await?;

Ok(dataset)
}

pub async fn get_datasets(
pool: &PgPool,
project_id: Uuid,
limit: i64,
offset: i64,
) -> Result<Vec<Dataset>> {
let datasets = sqlx::query_as::<_, Dataset>(
"SELECT
id,
created_at,
name,
project_id,
indexed_on
FROM datasets WHERE project_id = $1
ORDER BY created_at DESC
LIMIT $2 OFFSET $3",
)
.bind(project_id)
.bind(limit)
.bind(offset)
.fetch_all(pool)
.await?;

Ok(datasets)
}

pub async fn count_datasets(pool: &PgPool, project_id: Uuid) -> Result<i64> {
let count = sqlx::query_scalar::<_, i64>("SELECT COUNT(*) FROM datasets WHERE project_id = $1")
.bind(project_id)
.fetch_one(pool)
.await?;

Ok(count)
}

pub async fn get_dataset(pool: &PgPool, project_id: Uuid, dataset_id: Uuid) -> Result<Dataset> {
let dataset = sqlx::query_as::<_, Dataset>(
"SELECT id, created_at, name, project_id, indexed_on FROM datasets WHERE id = $1 AND project_id = $2",
Expand All @@ -66,25 +16,6 @@ pub async fn get_dataset(pool: &PgPool, project_id: Uuid, dataset_id: Uuid) -> R
dataset.context("Dataset with such id and project_id not found")
}

pub async fn rename_dataset(
pool: &PgPool,
id: Uuid,
project_id: Uuid,
new_name: &String,
) -> Result<Dataset> {
let dataset = sqlx::query_as::<_, Dataset>(
"UPDATE datasets SET name = $3 WHERE id = $1 AND project_id = $2
RETURNING id, created_at, name, project_id, indexed_on",
)
.bind(id)
.bind(project_id)
.bind(new_name)
.fetch_optional(pool)
.await?;

dataset.context("Dataset with such id and project_id not found")
}

pub async fn delete_dataset(pool: &PgPool, dataset_id: Uuid) -> Result<()> {
sqlx::query("DELETE from datasets WHERE id = $1")
.bind(dataset_id)
Expand Down
4 changes: 0 additions & 4 deletions app-server/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -492,10 +492,6 @@ fn main() -> anyhow::Result<()> {
.service(
routes::evaluations::get_evaluation_score_distribution,
)
.service(routes::datasets::get_datasets)
.service(routes::datasets::create_dataset)
.service(routes::datasets::get_dataset)
.service(routes::datasets::rename_dataset)
.service(routes::datasets::delete_dataset)
.service(routes::datasets::upload_datapoint_file)
.service(routes::datasets::create_datapoints)
Expand Down
73 changes: 0 additions & 73 deletions app-server/src/routes/datasets.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,79 +15,6 @@ use crate::{

const DEFAULT_PAGE_SIZE: usize = 50;

#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct CreateDatasetRequest {
name: String,
}

#[post("datasets")]
async fn create_dataset(
db: web::Data<DB>,
project_id: web::Path<Uuid>,
req: web::Json<CreateDatasetRequest>,
) -> ResponseResult {
let project_id = project_id.into_inner();
let req = req.into_inner();

let dataset = datasets::create_dataset(&db.pool, &req.name, project_id).await?;

Ok(HttpResponse::Ok().json(dataset))
}

#[get("datasets")]
async fn get_datasets(
db: web::Data<DB>,
project_id: web::Path<Uuid>,
params: web::Query<PaginatedGetQueryParams>,
) -> ResponseResult {
let project_id = project_id.into_inner();
let limit = params.page_size.unwrap_or(DEFAULT_PAGE_SIZE) as i64;
let datasets =
datasets::get_datasets(&db.pool, project_id, limit, params.page_number as i64).await?;

let total_count = datasets::count_datasets(&db.pool, project_id).await? as u64;

let response = PaginatedResponse::<Dataset> {
total_count,
items: datasets,
// Later, when we add filters, we must send a separate query to check if there are
// any datasets in the project
any_in_project: total_count > 0,
};

Ok(HttpResponse::Ok().json(response))
}

#[get("datasets/{dataset_id}")]
async fn get_dataset(db: web::Data<DB>, path: web::Path<(Uuid, Uuid)>) -> ResponseResult {
let (project_id, dataset_id) = path.into_inner();
let dataset = datasets::get_dataset(&db.pool, project_id, dataset_id).await?;

Ok(HttpResponse::Ok().json(dataset))
}

#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct UpdateDatasetRequest {
new_name: String,
}

#[post("datasets/{dataset_id}")]
async fn rename_dataset(
db: web::Data<DB>,
path: web::Path<(Uuid, Uuid)>,
new_name: web::Json<UpdateDatasetRequest>,
) -> ResponseResult {
let (project_id, dataset_id) = path.into_inner();
let new_name = new_name.new_name.clone();

let updated_dataset =
datasets::rename_dataset(&db.pool, dataset_id, project_id, &new_name).await?;

Ok(HttpResponse::Ok().json(updated_dataset))
}

#[delete("datasets/{dataset_id}")]
async fn delete_dataset(
db: web::Data<DB>,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,48 +1,22 @@
import { getServerSession } from 'next-auth';
import { authOptions } from '@/lib/auth';
import { fetcher } from '@/lib/utils';

export async function POST(
req: Request,
{ params }: { params: { projectId: string; datasetId: string } }
): Promise<Response> {
const projectId = params.projectId;
const datasetId = params.datasetId;
const session = await getServerSession(authOptions);
const user = session!.user;

const body = await req.json();

const res = await fetcher(`/projects/${projectId}/datasets/${datasetId}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${user.apiKey}`
},
body: JSON.stringify(body)
});

return new Response(res.body);
}
import { db } from '@/lib/db/drizzle';
import { and, eq } from 'drizzle-orm';
import { datasets } from '@/lib/db/migrations/schema';

export async function GET(
req: Request,
{ params }: { params: { projectId: string; datasetId: string } }
): Promise<Response> {
const projectId = params.projectId;
const datasetId = params.datasetId;
const session = await getServerSession(authOptions);
const user = session!.user;

const res = await fetcher(`/projects/${projectId}/datasets/${datasetId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${user.apiKey}`
}
const dataset = await db.query.datasets.findFirst({
where: and(eq(datasets.id, datasetId), eq(datasets.projectId, projectId))
});

return new Response(res.body);
return new Response(JSON.stringify(dataset), { status: 200 });
}

export async function DELETE(
Expand Down
72 changes: 36 additions & 36 deletions frontend/app/api/projects/[projectId]/datasets/route.ts
Original file line number Diff line number Diff line change
@@ -1,62 +1,64 @@
import { getServerSession } from 'next-auth';
import { authOptions } from '@/lib/auth';
import { fetcher } from '@/lib/utils';
import { datasets } from '@/lib/db/migrations/schema';

import { eq, inArray } from 'drizzle-orm';
import { desc, eq, inArray } from 'drizzle-orm';
import { and } from 'drizzle-orm';
import { db } from '@/lib/db/drizzle';
import { paginatedGet } from '@/lib/db/utils';
import { NextRequest } from 'next/server';

export async function POST(
req: Request,
{ params }: { params: { projectId: string } }
): Promise<Response> {
const projectId = params.projectId;
const session = await getServerSession(authOptions);
const user = session!.user;

const body = await req.json();
const { name } = body;

const dataset = await db
.insert(datasets)
.values({
name,
projectId
})
.returning()
.then((res) => res[0]);

if (!dataset) {
return new Response(JSON.stringify({ error: 'Failed to create dataset' }), {
status: 500
});
}

const res = await fetcher(`/projects/${projectId}/datasets`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${user.apiKey}`
},
body: JSON.stringify(body)
});

return new Response(res.body);
return new Response(JSON.stringify(dataset), { status: 200 });
}

export async function GET(
req: Request,
req: NextRequest,
{ params }: { params: { projectId: string } }
): Promise<Response> {
const projectId = params.projectId;
const session = await getServerSession(authOptions);
const user = session!.user;

const res = await fetcher(`/projects/${projectId}/datasets`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${user.apiKey}`
}
const pageNumber =
parseInt(req.nextUrl.searchParams.get('pageNumber') ?? '0') || 0;
const pageSize =
parseInt(req.nextUrl.searchParams.get('pageSize') ?? '50') || 50;
const filters = [eq(datasets.projectId, projectId)];
const datasetsData = await paginatedGet({
table: datasets,
pageNumber,
pageSize,
filters,
orderBy: desc(datasets.createdAt)
});

return new Response(res.body);
return new Response(JSON.stringify(datasetsData), { status: 200 });
}


export async function DELETE(
req: Request,
{ params }: { params: { projectId: string; datasetId: string } }
): Promise<Response> {
const projectId = params.projectId;



const { searchParams } = new URL(req.url);
const datasetIds = searchParams.get('datasetIds')?.split(',');

Expand All @@ -65,12 +67,10 @@ export async function DELETE(
}

try {
await db.delete(datasets)
await db
.delete(datasets)
.where(
and(
inArray(datasets.id, datasetIds),
eq(datasets.projectId, projectId)
)
and(inArray(datasets.id, datasetIds), eq(datasets.projectId, projectId))
);

return new Response('datasets deleted successfully', { status: 200 });
Expand Down
21 changes: 9 additions & 12 deletions frontend/app/project/[projectId]/datasets/[datasetId]/page.tsx
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import { authOptions } from '@/lib/auth';
import { fetcherJSON } from '@/lib/utils';
import { getServerSession } from 'next-auth';
import { redirect } from 'next/navigation';

import { Metadata } from 'next';
import Dataset from '@/components/dataset/dataset';
import { db } from '@/lib/db/drizzle';
import { and, eq } from 'drizzle-orm';
import { datasets } from '@/lib/db/migrations/schema';

export const metadata: Metadata = {
title: 'Dataset'
Expand All @@ -23,18 +25,13 @@ export default async function DatasetPage({
redirect('/sign-in');
}

const user = session.user;
const dataset = await db.query.datasets.findFirst({
where: and(eq(datasets.projectId, projectId), eq(datasets.id, datasetId))
});

const dataset = await fetcherJSON(
`/projects/${projectId}/datasets/${datasetId}`,
{
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${user.apiKey}`
}
}
);
if (!dataset) {
redirect('/404');
}

return <Dataset dataset={dataset} />;
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
evaluationScores
} from '@/lib/db/migrations/schema';
import { and, asc, eq, sql } from 'drizzle-orm';
import { redirect } from 'next/navigation';

export const metadata: Metadata = {
title: 'Evaluation results'
Expand Down Expand Up @@ -90,6 +91,10 @@ async function getEvaluationInfo(
getEvaluationResults
]);

if (!evaluation) {
redirect('/404');
}

const result = {
evaluation: evaluation,
results
Expand Down