+
No Skore has been created, this worskpace is empty.
@@ -301,6 +305,7 @@ main {
}
.not-found {
+ height: 100vh;
flex-direction: column;
justify-content: center;
background-image: var(--not-found-image);
diff --git a/frontend/tests/components/FileTree.spec.ts b/frontend/tests/components/FileTree.spec.ts
deleted file mode 100644
index 8e54d0a6a..000000000
--- a/frontend/tests/components/FileTree.spec.ts
+++ /dev/null
@@ -1,149 +0,0 @@
-import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
-
-import FileTree, { transformUrisToTree, type FileTreeNode } from "@/components/FileTree.vue";
-import { mount } from "@vue/test-utils";
-import { createPinia, setActivePinia } from "pinia";
-import { createApp } from "vue";
-
-function countLeaves(nodes: FileTreeNode[]): number {
- function countInNode(node: FileTreeNode): number {
- if (!node.children?.length) {
- return 1;
- }
-
- const countInChildren = node.children.map(countInNode);
- return countInChildren.reduce((accumulator, leavesCount) => accumulator + leavesCount);
- }
-
- const allBranches = nodes.map(countInNode);
- return allBranches.reduce((accumulator, leavesCount) => accumulator + leavesCount);
-}
-
-describe("FileTree", () => {
- beforeEach(() => {
- vi.mock("vue-router");
- const app = createApp({});
- const pinia = createPinia();
- app.use(pinia);
- setActivePinia(pinia);
- });
-
- afterEach(() => {
- vi.restoreAllMocks();
- });
-
- it("Renders properly.", () => {
- const records: FileTreeNode[] = [
- {
- uri: "Punk Rock",
- children: [
- {
- uri: "The Clash",
- children: [
- { uri: "The Clash" },
- { uri: "Give 'Em Enough Rope" },
- { uri: "London Calling" },
- { uri: "Sandinista!" },
- { uri: "Combat Rock" },
- { uri: "Cut the Crap" },
- ],
- },
- {
- uri: "Ramones",
- children: [
- { uri: "Ramones" },
- { uri: "Leave Home" },
- { uri: "Rocket to Russia" },
- { uri: "Road to Ruin" },
- { uri: "End of the Century" },
- { uri: "Pleasant Dreams" },
- { uri: "Subterranean Jungle" },
- { uri: "Too Tough to Die" },
- { uri: "Animal Boy" },
- { uri: "Halfway to Sanity" },
- { uri: "Brain Drain" },
- { uri: "Mondo Bizarro" },
- { uri: "Acid Eaters" },
- { uri: "¡Adios Amigos!" },
- ],
- },
- ],
- },
- {
- uri: "French touch",
- children: [
- {
- uri: "Laurent Garnier",
- children: [
- { uri: "Shot in the Dark" },
- { uri: "Club Traxx EP" },
- { uri: "30" },
- { uri: "Early Works" },
- { uri: "Unreasonable Behaviour" },
- { uri: "The Cloud Making Machine" },
- { uri: "Retrospective" },
- { uri: "Public Outburst" },
- { uri: "Tales of a Kleptomaniac" },
- { uri: "Suivront Mille Ans De Calme" },
- { uri: "Home Box" },
- { uri: "Paris Est à Nous" },
- { uri: "Le Roi Bâtard" },
- { uri: "De PelÃcula" },
- { uri: "Entre la Vie et la Mort" },
- { uri: "33 tours et puis s'en vont" },
- ],
- },
- ],
- },
- ];
-
- const wrapper = mount(FileTree, {
- props: { nodes: records },
- });
-
- const itemSelector = ".file-tree-item";
- const treeItems = wrapper.findAll(itemSelector);
- const leavesCount = countLeaves(records);
- const leaves = treeItems.filter((c) => c.findAll(itemSelector).length == 0);
- expect(leaves).toHaveLength(leavesCount);
- });
-
- it("Can transform an array of URIs to a tree", () => {
- const uris = [
- "probabl-ai/demo-usecase/training/0",
- "probabl-ai/test-skore/0",
- "probabl-ai/test-skore/1",
- "probabl-ai/test-skore/2",
- "probabl-ai/test-skore/3",
- "probabl-ai/test-skore/4",
- ];
-
- const tree = transformUrisToTree(uris);
- expect(tree).toEqual([
- {
- uri: "probabl-ai",
- children: [
- {
- uri: "probabl-ai/demo-usecase",
- children: [
- {
- uri: "probabl-ai/demo-usecase/training",
- children: [{ uri: "probabl-ai/demo-usecase/training/0" }],
- },
- ],
- },
- {
- uri: "probabl-ai/test-skore",
- children: [
- { uri: "probabl-ai/test-skore/0" },
- { uri: "probabl-ai/test-skore/1" },
- { uri: "probabl-ai/test-skore/2" },
- { uri: "probabl-ai/test-skore/3" },
- { uri: "probabl-ai/test-skore/4" },
- ],
- },
- ],
- },
- ]);
- });
-});
diff --git a/frontend/tests/models.spec.ts b/frontend/tests/models.spec.ts
deleted file mode 100644
index e84455aa5..000000000
--- a/frontend/tests/models.spec.ts
+++ /dev/null
@@ -1,34 +0,0 @@
-import { type ItemType } from "@/models";
-import { describe, expect, it } from "vitest";
-import { makeDataStore } from "./test.utils";
-
-describe("DataStore model", () => {
- it("Can access keys by type", () => {
- const infoKeys: ItemType[] = [
- "boolean",
- "integer",
- "number",
- "string",
- "any",
- "array",
- "date",
- "datetime",
- "html",
- "markdown",
- "dataframe",
- "image",
- "cv_results",
- "numpy_array",
- "sklearn_model",
- ];
- const plotKeys: ItemType[] = ["vega", "matplotlib_figure"];
-
- const m = makeDataStore("/test/fixture", [...infoKeys, ...plotKeys]);
-
- expect(m.infoKeys).toHaveLength(infoKeys.length);
- expect(m.plotKeys).toHaveLength(plotKeys.length);
- expect(m.artifactKeys).toHaveLength(0);
-
- expect(m.get("boolean")).toBeDefined();
- });
-});
diff --git a/frontend/tests/services/api.spec.ts b/frontend/tests/services/api.spec.ts
index 65a2f3f1e..34a5cfa84 100644
--- a/frontend/tests/services/api.spec.ts
+++ b/frontend/tests/services/api.spec.ts
@@ -1,7 +1,7 @@
-import { fetchAllManderUris, fetchMander, fetchShareableBlob, putLayout } from "@/services/api";
+import { fetchReport, putLayout } from "@/services/api";
import { afterEach, describe, expect, it, vi } from "vitest";
-import { DataStore, type Layout } from "@/models";
+import type { KeyLayoutSize } from "@/models";
import { createFetchResponse, mockedFetch } from "../test.utils";
describe("API Service", () => {
@@ -9,99 +9,24 @@ describe("API Service", () => {
vi.restoreAllMocks();
});
- it("Can fetch the list of manders from the server.", async () => {
- const uris = [
- "probabl-ai/demo-usecase/training/0",
- "probabl-ai/test-skore/0",
- "probabl-ai/test-skore/1",
- "probabl-ai/test-skore/2",
- "probabl-ai/test-skore/3",
- "probabl-ai/test-skore/4",
- ];
-
- mockedFetch.mockResolvedValue(createFetchResponse(uris));
-
- const r = await fetchAllManderUris();
- expect(r).toStrictEqual(uris);
- });
-
- it("Can fetch a mander from the server", async () => {
- const mander = {
- schema: "schema:dashboard:v0",
- uri: "probal-ai/demo-usecase/training/1",
- payload: {
- title: {
- type: "string",
- data: "My Awesome Dashboard",
- },
- errors: {
- type: "array",
- data: [0.1, 0.2, 0.3, 0.4, 0.5],
- },
- "creation date": {
- type: "date",
- data: "2024-07-24",
- },
- "last updated": {
- type: "datetime",
- data: "2024-07-24T11:31:00Z",
- },
- score: {
- type: "number",
- data: 0.87,
- },
- count: {
- type: "integer",
- data: 234567,
- },
- monitoring: {
- type: "markdown",
- data: "- The fitting run used **92.24347826086958%** of your CPU (min: 0.0%; max: 100.4%)\n- The fitting run used **0.7128300874129586%** of your RAM (min: 0.7058143615722656%; max: 0.7147789001464844%)",
- },
- "custom html": {
- type: "html",
- data: "
",
+ it("Can fetch the current project from the server", async () => {
+ const p = {
+ layout: [
+ { key: "Any", size: "small" },
+ { key: "Array", size: "medium" },
+ ],
+ items: {
+ Any: { item_type: "json", media_type: null, serialized: { k1: "v1" } },
+ Array: {
+ item_type: "json",
+ media_type: null,
+ serialized: [1, 2, 3],
},
},
};
- mockedFetch.mockResolvedValue(createFetchResponse(mander));
-
- const r = await fetchMander("random");
- expect(r).toBeInstanceOf(DataStore);
- expect(r?.infoKeys.length).toBe(8);
- });
-
- it("Can persist a layout.", async () => {
- const layout: Layout = [
- { key: "title", size: "medium" },
- { key: "errors", size: "large" },
- { key: "creation_date", size: "small" },
- ];
- const mander = new DataStore(
- "random",
- {
- title: {
- type: "string",
- data: "My Awesome Dashboard",
- },
- errors: {
- type: "array",
- data: [0.1, 0.2, 0.3, 0.4, 0.5],
- },
- creation_date: {
- type: "date",
- data: "2024-07-24",
- },
- },
- layout
- );
-
- mockedFetch.mockResolvedValue(createFetchResponse(mander, 201));
-
- const r = await putLayout("random", layout);
- expect(r).toBeInstanceOf(DataStore);
- expect(r).toEqual(mander);
- expect(r?.layout).toEqual(layout);
+ mockedFetch.mockResolvedValue(createFetchResponse(p));
+ const r = await fetchReport();
+ expect(Object.keys(r!).length).toBe(2);
});
it("Can report errors.", async () => {
@@ -110,18 +35,30 @@ describe("API Service", () => {
throw error;
});
- expect(await fetchAllManderUris()).toEqual([]);
- expect(await fetchMander("random")).toBeNull();
- expect(await putLayout("random", [])).toBeNull();
- expect(await fetchShareableBlob("random")).toBeNull();
+ expect(await fetchReport()).toBeNull();
});
- it("Can fetch a shareable blob.", async () => {
- const blob = new Blob(["Hello, world!"], { type: "text/plain" });
- mockedFetch.mockResolvedValue(createFetchResponse(blob));
-
- const r = await fetchShareableBlob("random");
- expect(r).toBeInstanceOf(Blob);
- expect(r?.size).toBe(13);
+ it("Can put a layout", async () => {
+ const layoutPayload = [
+ { key: "Any", size: "small" as KeyLayoutSize },
+ { key: "Array", size: "medium" as KeyLayoutSize },
+ ];
+ const reportPayload = {
+ layout: [
+ { key: "Any", size: "small" },
+ { key: "Array", size: "medium" },
+ ],
+ items: {
+ Any: { item_type: "json", media_type: null, serialized: { k1: "v1" } },
+ Array: {
+ item_type: "json",
+ media_type: null,
+ serialized: [1, 2, 3],
+ },
+ },
+ };
+ mockedFetch.mockResolvedValue(createFetchResponse(reportPayload, 201));
+ const r = await putLayout(layoutPayload);
+ expect(r).toEqual(reportPayload);
});
});
diff --git a/frontend/tests/services/utils.spec.ts b/frontend/tests/services/utils.spec.ts
index 715b308a1..53a3fe10b 100644
--- a/frontend/tests/services/utils.spec.ts
+++ b/frontend/tests/services/utils.spec.ts
@@ -49,7 +49,7 @@ describe("utils", () => {
it("Can poll a function and stp the polling", async () => {
const f = vi.fn();
const stop = await poll(f, 10);
- await sleep(22);
+ await sleep(25);
stop();
expect(f).toBeCalledTimes(3);
});
diff --git a/frontend/tests/stores/reports.spec.ts b/frontend/tests/stores/reports.spec.ts
index 649d8c661..ebf6fe1c9 100644
--- a/frontend/tests/stores/reports.spec.ts
+++ b/frontend/tests/stores/reports.spec.ts
@@ -1,44 +1,37 @@
-import { type ItemType, type KeyLayoutSize } from "@/models";
-import { fetchAllManderUris, fetchMander, putLayout } from "@/services/api";
-import { useReportsStore } from "@/stores/reports";
+import type { ReportItem } from "@/models";
+import { fetchReport } from "@/services/api";
+import { useReportStore } from "@/stores/report";
import { createTestingPinia } from "@pinia/testing";
import { setActivePinia } from "pinia";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
-import { createFetchResponse, makeDataStore, mockedFetch } from "../test.utils";
-
-const uri = "/test/fixture";
-const infoKeys: ItemType[] = [
- "boolean",
- "integer",
- "number",
- "string",
- "any",
- "array",
- "date",
- "datetime",
- "html",
- "markdown",
- "dataframe",
- "image",
- "cv_results",
- "numpy_array",
- "sklearn_model",
-];
-const plotKeys: ItemType[] = ["vega", "matplotlib_figure"];
vi.mock("@/services/api", () => {
- const fetchAllManderUris = vi.fn().mockImplementation(() => {
- return [uri];
- });
- const fetchMander = vi.fn().mockImplementation(() => {
- return makeDataStore(uri, [...infoKeys, ...plotKeys]);
- });
- const putLayout = vi.fn().mockImplementation(() => {
- return makeDataStore(uri, [...infoKeys, ...plotKeys]);
- });
- return { fetchAllManderUris, fetchMander, putLayout };
+ const fetchReport = vi.fn().mockImplementation(() => {});
+ return { fetchReport };
});
+function makeFakeReport() {
+ const epoch = new Date("1970-01-01T00:00:00Z").toISOString();
+ const i1 = {
+ media_type: "text/markdown",
+ value: "",
+ updated_at: epoch,
+ created_at: epoch,
+ } as ReportItem;
+ const i2 = {
+ media_type: "text/markdown",
+ value: "",
+ updated_at: epoch,
+ created_at: epoch,
+ } as ReportItem;
+ return {
+ layout: [],
+ items: {
+ Any: i1,
+ Array: i2,
+ },
+ };
+}
describe("Reports store", () => {
beforeEach(() => {
setActivePinia(createTestingPinia({ stubActions: false, createSpy: vi.fn }));
@@ -48,92 +41,42 @@ describe("Reports store", () => {
vi.restoreAllMocks();
});
- it("Can create an empty layout when setting a DataStore with no layout.", () => {
- const ds = makeDataStore(uri, [...infoKeys, ...plotKeys]);
- const reportsStore = useReportsStore();
+ it("Can create an empty layout when setting a DataStore with no layout.", async () => {
+ const reportStore = useReportStore();
- reportsStore.setSelectedReportIfDifferent(ds);
- expect(reportsStore.selectedReport?.uri).toEqual(uri);
- expect(reportsStore.layout).toHaveLength(0);
- });
-
- it("Can create layout item for existing key.", () => {
- const layoutItem = { key: "boolean", size: "large" as KeyLayoutSize };
- const ds = makeDataStore(uri, [...infoKeys, ...plotKeys], [layoutItem]);
- const reportsStore = useReportsStore();
-
- reportsStore.setSelectedReportIfDifferent(ds);
- expect(reportsStore.layout).toContainEqual(layoutItem);
- });
-
- it("Can add some layout to an existing key.", () => {
- const ds = makeDataStore(uri, [...infoKeys, ...plotKeys]);
- const reportsStore = useReportsStore();
-
- reportsStore.setSelectedReportIfDifferent(ds);
- reportsStore.displayKey("boolean");
- reportsStore.setKeyLayoutSize("boolean", "small");
- expect(reportsStore.layout).toHaveLength(1);
- expect(reportsStore.layout).toEqual([{ key: "boolean", size: "small" }]);
- reportsStore.setKeyLayoutSize("unknown", "small");
- expect(reportsStore.layout).toHaveLength(1);
- reportsStore.hideKey("boolean");
- expect(reportsStore.layout).toHaveLength(0);
+ await reportStore.setReport(makeFakeReport());
+ expect(reportStore.layout).toHaveLength(0);
});
it("Can poll the backend.", async () => {
- const reportsStore = useReportsStore();
+ const reportStore = useReportStore();
- reportsStore.selectedReportUri = uri;
- await reportsStore.startBackendPolling();
- expect(fetchAllManderUris).toBeCalled();
- expect(fetchMander).toBeCalled();
- reportsStore.stopBackendPolling();
+ await reportStore.startBackendPolling();
+ expect(fetchReport).toBeCalled();
+ reportStore.stopBackendPolling();
});
- it("Can move keys in layout.", () => {
- const ds = makeDataStore(uri, [...infoKeys, ...plotKeys]);
- const reportsStore = useReportsStore();
-
- reportsStore.setSelectedReportIfDifferent(ds);
- reportsStore.displayKey("boolean");
- reportsStore.displayKey("integer");
- reportsStore.setKeyLayoutSize("boolean", "large");
- reportsStore.setKeyLayoutSize("integer", "large");
- expect(reportsStore.layout).toEqual([
- { key: "boolean", size: "large" },
- { key: "integer", size: "large" },
+ it("Can move keys in layout.", async () => {
+ const reportStore = useReportStore();
+
+ await reportStore.setReport(makeFakeReport());
+ reportStore.displayKey("Any");
+ reportStore.displayKey("Array");
+ reportStore.setKeyLayoutSize("Any", "large");
+ reportStore.setKeyLayoutSize("Array", "large");
+ expect(reportStore.layout).toEqual([
+ { key: "Any", size: "large" },
+ { key: "Array", size: "large" },
]);
- reportsStore.moveKey("integer", "up");
- expect(reportsStore.layout).toEqual([
- { key: "integer", size: "large" },
- { key: "boolean", size: "large" },
+ reportStore.moveKey("Array", "up");
+ expect(reportStore.layout).toEqual([
+ { key: "Array", size: "large" },
+ { key: "Any", size: "large" },
]);
- reportsStore.moveKey("integer", "down");
- expect(reportsStore.layout).toEqual([
- { key: "boolean", size: "large" },
- { key: "integer", size: "large" },
+ reportStore.moveKey("Array", "down");
+ expect(reportStore.layout).toEqual([
+ { key: "Any", size: "large" },
+ { key: "Array", size: "large" },
]);
});
-
- it("Can persist layout.", () => {
- const reportsStore = useReportsStore();
-
- const ds = makeDataStore(
- uri,
- [...infoKeys, ...plotKeys],
- [
- { key: "boolean", size: "large" },
- { key: "integer", size: "large" },
- ]
- );
- mockedFetch.mockResolvedValue(createFetchResponse(ds, 201));
-
- reportsStore.setSelectedReportIfDifferent(ds);
- reportsStore.displayKey("boolean");
- reportsStore.displayKey("integer");
- reportsStore.setKeyLayoutSize("boolean", "large");
- reportsStore.setKeyLayoutSize("integer", "large");
- expect(putLayout).toBeCalledTimes(2);
- });
});
diff --git a/frontend/tests/test.utils.ts b/frontend/tests/test.utils.ts
index e3d6aced6..788e0a563 100644
--- a/frontend/tests/test.utils.ts
+++ b/frontend/tests/test.utils.ts
@@ -1,4 +1,3 @@
-import { DataStore, type IPayloadItem, type ItemType, type Layout } from "@/models";
import { flushPromises, mount } from "@vue/test-utils";
import { vi } from "vitest";
import { type ComponentPublicInstance, defineComponent, h, Suspense } from "vue";
@@ -39,36 +38,3 @@ export async function mountSuspense(
await flushPromises();
return wrapper;
}
-
-/**
- * Create a a fake payload item.
- * @param type the type of the payload item
- * @param data the inner data of the item
- * @returns an object with IPayloadItem interface
- */
-export function makePayloadItem(type: ItemType, data: any = {}): IPayloadItem {
- const now = new Date().toISOString();
- return {
- type,
- data,
- metadata: {
- display_type: type,
- created_at: now,
- updated_at: now,
- },
- };
-}
-
-/**
- * Create a fake `DataStore` model.
- * @param uri the uri of the DataStore
- * @param types a list of types that must be created
- * @param layout the layout to store
- */
-export function makeDataStore(uri: string, types: ItemType[], layout: Layout = []) {
- const payload = types.reduce(
- (previous, current) => ({ ...previous, [current]: makePayloadItem(current) }),
- {}
- );
- return new DataStore(uri, payload, layout);
-}
diff --git a/frontend/tests/views/ReportBuilderView.spec.ts b/frontend/tests/views/ReportBuilderView.spec.ts
index 8434fd04b..ff4eda9da 100644
--- a/frontend/tests/views/ReportBuilderView.spec.ts
+++ b/frontend/tests/views/ReportBuilderView.spec.ts
@@ -4,25 +4,15 @@ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { createApp } from "vue";
import { useRoute } from "vue-router";
-import type { ItemType } from "@/models";
import { ROUTE_NAMES } from "@/router";
import ReportBuilderView from "@/views/ReportBuilderView.vue";
-import { makeDataStore, mountSuspense } from "../test.utils";
-
-const uri = "/a/b";
-const keys: ItemType[] = ["boolean", "integer", "vega", "matplotlib_figure"];
+import { mountSuspense } from "../test.utils";
vi.mock("@/services/api", () => {
- const fetchAllManderUris = vi.fn().mockImplementation(() => {
- return [uri];
- });
- const fetchMander = vi.fn().mockImplementation(() => {
- return makeDataStore(uri, keys);
- });
- const putLayout = vi.fn().mockImplementation(() => {
- return makeDataStore(uri, keys);
+ const fetchReport = vi.fn().mockImplementation(() => {
+ return { items: {}, layout: [] };
});
- return { fetchAllManderUris, fetchMander, putLayout };
+ return { fetchReport };
});
describe("ReportBuilderView", () => {
@@ -41,12 +31,10 @@ describe("ReportBuilderView", () => {
it("Renders properly", async () => {
vi.mocked(useRoute).mockImplementationOnce(() => ({
- fullPath: `/${ROUTE_NAMES.REPORT_BUILDER}/a/b`,
- path: `/${ROUTE_NAMES.REPORT_BUILDER}/a/b`,
+ fullPath: `/${ROUTE_NAMES.REPORT_BUILDER}`,
+ path: `/${ROUTE_NAMES.REPORT_BUILDER}`,
query: {},
- params: {
- segments: ["a", "b"],
- },
+ params: {},
matched: [],
name: ROUTE_NAMES.REPORT_BUILDER,
hash: "",
diff --git a/notebooks/basic_usage.ipynb b/notebooks/basic_usage.ipynb
index e50fa92fe..4e0bf4c42 100644
--- a/notebooks/basic_usage.ipynb
+++ b/notebooks/basic_usage.ipynb
@@ -2,6 +2,7 @@
"cells": [
{
"cell_type": "markdown",
+ "id": "31f5797e",
"metadata": {},
"source": [
"# Getting started with `skore`"
@@ -9,17 +10,19 @@
},
{
"cell_type": "markdown",
+ "id": "5eaf0f80",
"metadata": {},
"source": [
"# Introduction\n",
"\n",
- "The purpose of this guide is to illustrate some of the main features that `skore` provides. Please refer to our installation instructions for installing `skore`. \n",
+ "The purpose of this guide is to illustrate some of the main features that `skore` provides. Please refer to our installation instructions for installing `skore`.\n",
"\n",
- "Given to you by [:probabl.](https://probabl.ai/), `skore` is a powerful tool that allows data scientists to create tracking and clear reports from their Python code, typically a notebook. For example, this notebook generated a skore dashboard that was then exported into [this HTML file](https://drive.google.com/file/d/1wPUTWBov6lWVivnbkLyhzUis3aQJsSjV/view?usp=share_link)."
+ "Given to you by [:probabl.](https://probabl.ai/), `skore` is a powerful tool that allows data scientists to create tracking and clear reports from their Python code, typically a notebook. For example, see [this HTML file](https://gist.github.com/augustebaum/6b21dbd7f7d5a584fbf2c1956692574e): download it and open it in your browser to visualize it."
]
},
{
"cell_type": "markdown",
+ "id": "817609d9",
"metadata": {},
"source": [
"## Imports"
@@ -28,77 +31,95 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "d6f56c15",
"metadata": {},
"outputs": [],
"source": [
- "import pandas as pd\n",
- "import numpy as np\n",
- "import altair as alt\n",
+ "# ruff: noqa\n",
"\n",
+ "import altair as alt\n",
+ "import matplotlib.pyplot as plt\n",
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import PIL\n",
"from sklearn.datasets import load_diabetes\n",
"from sklearn.linear_model import Lasso\n",
"from sklearn.pipeline import Pipeline\n",
"from sklearn.preprocessing import StandardScaler\n",
- "\n",
- "from skore import Store"
+ "from skore import load"
]
},
{
"cell_type": "markdown",
+ "id": "b68aa7de",
"metadata": {},
"source": [
- "# Initialize and use a Store\n",
+ "# Initialize and use a Project\n",
"\n",
- "To initialize a Store, we need to give it a root path, which amounts to giving it a name:"
+ "To initialize a Project, we need to give it a name, or equivalently a file path:"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "23f035b6",
"metadata": {},
"outputs": [],
"source": [
- "root_store = Store(\"root\")"
+ "# Create a project at path './project.skore'\n",
+ "!python -m skore create 'project.skore'"
]
},
{
- "cell_type": "code",
- "execution_count": null,
+ "cell_type": "markdown",
+ "id": "1fbcfbbf",
"metadata": {},
- "outputs": [],
"source": [
- "store = Store(\"root/basic_usage\")"
+ "This will create a Skore project directory named \"project.skore\" in the current directory."
]
},
{
"cell_type": "markdown",
+ "id": "964267f0",
"metadata": {},
"source": [
- "Here, the name is `basic_usage`.\n",
- "\n",
- "This abstract path lets you express a hierarchy between Stores (so a Store can contain Stores).\n",
- "\n",
- "A Store also needs some physical storage to get and put items from/into.\n",
- "By default, this storage will be in a `.datamander` directory in the current working directory.\n",
- "\n",
- "Now that you have created the `.datamander` folder (even though nothing has yet been stored), you can run the dashboard (in your project root i.e. where `.datamander` is):\n",
+ "Now that you have created the `project.skore` folder (even though nothing has yet been stored), you can run the ui (in your project root i.e. where `project.skore` is):\n",
"```python3\n",
- "$ python -m skore launch .datamander\n",
+ "$ python -m skore launch project.skore\n",
"```\n",
"\n",
- ">*Note*: If you already had some data in your `.datamander` directory from a previous run -- you can check for that by using:\n",
+ ">*Note*: If you already had some data in your `project.skore` directory from a previous run -- you can check for that in your shell by using:\n",
">```python3\n",
">$ ls -a\n",
">```\n",
- ">and if you no longer need its objects, we recommend deleting this folder by running:\n",
+ ">and if you no longer need its objects, we recommend deleting this folder by running `rm` in your shell:\n",
">```python3\n",
- ">$ rm -rf .datamander\n",
+ ">$ rm -r project.skore\n",
">```\n",
- ">This deletion needs to be done before the cells above: before initializing the store and before launching the dashboard!"
+ ">This deletion needs to be done before the cells above: before initializing the store and before launching the ui!"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1d7a45e5",
+ "metadata": {},
+ "source": [
+ "Now that the project file exists, we can load it in our script so that we can read from and write to it:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6e4d51c7",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "project = load(\"project.skore\")"
]
},
{
"cell_type": "markdown",
+ "id": "0419195c",
"metadata": {},
"source": [
"## Storing an integer"
@@ -106,6 +127,7 @@
},
{
"cell_type": "markdown",
+ "id": "05f7f17f",
"metadata": {},
"source": [
"Now, let us store our first object, for example an integer:"
@@ -114,48 +136,54 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "31fc4cc8",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\"my_int\", 3)"
+ "project.put(\"my_int\", 3)"
]
},
{
"cell_type": "markdown",
+ "id": "dc643253",
"metadata": {},
"source": [
"Here, the name of my object is `my_int` and the integer value is 3.\n",
"\n",
- "You can read it from the Store:"
+ "You can read it from the Project:"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "19053a47",
"metadata": {},
"outputs": [],
"source": [
- "store.read(\"my_int\")"
+ "project.get(\"my_int\")"
]
},
{
"cell_type": "markdown",
+ "id": "8af44c95",
"metadata": {},
"source": [
- "The `insert` method will raise an error if the object already exists in order to prevent accidentally overwriting data in the Store. If you wish to change the value of this object, you must update its value:"
+ "Careful; like in a normal Python dictionary, the `put` method **will overwrite** past data if you use a key which already exists!"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "5f3ffd54",
"metadata": {},
"outputs": [],
"source": [
- "store.update(\"my_int\", 4)"
+ "project.put(\"my_int\", 30000)"
]
},
{
"cell_type": "markdown",
+ "id": "72a29907",
"metadata": {},
"source": [
"Let us check the updated value:"
@@ -164,56 +192,62 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "00f983cc",
"metadata": {},
"outputs": [],
"source": [
- "store.read(\"my_int\")"
+ "project.get(\"my_int\")"
]
},
{
"cell_type": "markdown",
+ "id": "11e83006",
"metadata": {},
"source": [
- "By using the `delete` method, you can also delete an object so that your `skore` dashboard does not become cluttered:"
+ "By using the `delete_item` method, you can also delete an object so that your `skore` UI does not become cluttered:"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "97d162d2",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\"my_int_2\", 10)"
+ "project.put(\"my_int_2\", 10)"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "94f7001a",
"metadata": {},
"outputs": [],
"source": [
- "store.delete(\"my_int_2\")"
+ "project.delete_item(\"my_int_2\")"
]
},
{
"cell_type": "markdown",
+ "id": "17b5f6a0",
"metadata": {},
"source": [
- "You can also display all the objects in your store directly from Python:"
+ "You can use `Project.list_keys` to display all the keys in your project:"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "e7f30789",
"metadata": {},
"outputs": [],
"source": [
- "for key, value in store.items():\n",
- " print(f\"Key {key} corresponds to value {value}\")"
+ "project.list_keys()"
]
},
{
"cell_type": "markdown",
+ "id": "12b5a0c0",
"metadata": {},
"source": [
"## Storing a string"
@@ -221,6 +255,7 @@
},
{
"cell_type": "markdown",
+ "id": "d38fcff8",
"metadata": {},
"source": [
"We just stored a integer, now let us store some text using strings!"
@@ -229,82 +264,107 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "90bbe0ec",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\"my_string\", \"Hello world!\")"
+ "project.put(\"my_string\", \"Hello world!\")"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "4b805bd5",
"metadata": {},
"outputs": [],
"source": [
- "store.read(\"my_string\")"
+ "project.get(\"my_string\")"
]
},
{
"cell_type": "markdown",
+ "id": "6348a8dc",
"metadata": {},
"source": [
- "The Store infers the type of the inserted object by default. For example, strings are assumed to be in Markdown format. Hence, you can customize the display of your text:"
+ "`Project.get` infers the type of the inserted object by default. For example, strings are assumed to be in Markdown format. Hence, you can customize the display of your text:"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "b67679af",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\n",
+ "project.put(\n",
" \"my_string_2\",\n",
" (\n",
- "\"\"\"Hello world!, **bold**, *italic*, `code` \n",
+ " \"\"\"Hello world!, **bold**, *italic*, `code`\n",
"\n",
"```python\n",
"def my_func(x):\n",
" return x+2\n",
"```\n",
"\"\"\"\n",
- " )\n",
+ " ),\n",
")"
]
},
{
"cell_type": "markdown",
+ "id": "8f8bc0e3",
"metadata": {},
"source": [
- "Moreover, you can also explicitly set the type, for example in HTML:"
+ "Moreover, you can also explicitly tell Skore the media type of an object, for example in HTML:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f5ac6835",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from skore.item import MediaItem"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "2e93653f",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\"my_string_3\", \"
Title
bold,
italic, etc.\", display_type=\"html\")"
+ "# Note we use `put_item` instead of `put`\n",
+ "project.put_item(\n",
+ " \"my_string_3\",\n",
+ " MediaItem.factory(\n",
+ " \"
Title
bold,
italic, etc.\", media_type=\"text/html\"\n",
+ " ),\n",
+ ")"
]
},
{
"cell_type": "markdown",
+ "id": "aeac33be",
"metadata": {},
"source": [
- "Note that the display type is only used for the dashboard, and not in this notebook at hand:"
+ "Note that the media type is only used for the UI, and not in this notebook at hand:"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "37a9c8ae",
"metadata": {},
"outputs": [],
"source": [
- "store.read(\"my_string_3\")"
+ "project.get(\"my_string_3\")"
]
},
{
"cell_type": "markdown",
+ "id": "3c52a9dd",
"metadata": {},
"source": [
"You can also conveniently use Python f-strings:"
@@ -313,19 +373,18 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "427533bb",
"metadata": {},
"outputs": [],
"source": [
"x = 2\n",
"y = [1, 2, 3, 4]\n",
- "store.insert(\n",
- " \"my_string_4\",\n",
- " f\"The value of `x` is {x} and the value of `y` is {y}.\"\n",
- ")"
+ "project.put(\"my_string_4\", f\"The value of `x` is {x} and the value of `y` is {y}.\")"
]
},
{
"cell_type": "markdown",
+ "id": "f128b0c2",
"metadata": {},
"source": [
"## Storing many kinds of data"
@@ -333,6 +392,7 @@
},
{
"cell_type": "markdown",
+ "id": "8059cd6b",
"metadata": {},
"source": [
"Python list:"
@@ -341,15 +401,17 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "271d5e8b",
"metadata": {},
"outputs": [],
"source": [
"my_list = [1, 2, 3, 4]\n",
- "store.insert(\"my_list\", my_list)"
+ "project.put(\"my_list\", my_list)"
]
},
{
"cell_type": "markdown",
+ "id": "9ec88d84",
"metadata": {},
"source": [
"Python dictionary:"
@@ -358,6 +420,7 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "8d6cb2b3",
"metadata": {},
"outputs": [],
"source": [
@@ -365,11 +428,12 @@
" \"company\": \"probabl\",\n",
" \"year\": 2023,\n",
"}\n",
- "store.insert(\"my_dict\", my_dict)"
+ "project.put(\"my_dict\", my_dict)"
]
},
{
"cell_type": "markdown",
+ "id": "893e2231",
"metadata": {},
"source": [
"NumPy array:"
@@ -378,15 +442,17 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "cba91e88",
"metadata": {},
"outputs": [],
"source": [
"my_arr = np.random.randn(3, 3)\n",
- "store.insert(\"my_arr\", my_arr)"
+ "project.put(\"my_arr\", my_arr)"
]
},
{
"cell_type": "markdown",
+ "id": "7fedf721",
"metadata": {},
"source": [
"Pandas data frame:"
@@ -395,25 +461,26 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "76e90ff7",
"metadata": {},
"outputs": [],
"source": [
"my_df = pd.DataFrame(np.random.randn(5, 3))\n",
- "store.insert(\"my_df\", my_df)"
+ "project.put(\"my_df\", my_df)"
]
},
{
"cell_type": "markdown",
+ "id": "727b319c",
"metadata": {},
"source": [
- "## Storing Altair plots\n",
- "\n",
- "As of today, only Altair plots are supported, but many more will follow! Feel free to contribute!"
+ "Altair Charts:"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "b47150af",
"metadata": {},
"outputs": [],
"source": [
@@ -430,16 +497,62 @@
" .properties(title=\"My title\")\n",
")\n",
"\n",
- "store.insert(\"my_chart\", my_chart)"
+ "project.put(\"my_chart\", my_chart)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "46cc9344",
+ "metadata": {},
+ "source": [
+ "Matplotlib Figures:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "37893937",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "fig, ax = plt.subplots()\n",
+ "ax.plot([1, 2, 3, 4], [1, 4, 2, 3])\n",
+ "\n",
+ "project.put(\"my_figure\", fig)"
]
},
{
"cell_type": "markdown",
+ "id": "9aed47d9",
+ "metadata": {},
+ "source": [
+ "PIL images:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "183c2e51",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import io\n",
+ "\n",
+ "pil_image = PIL.Image.new(\"RGB\", (100, 100), color=\"red\")\n",
+ "with io.BytesIO() as output:\n",
+ " pil_image.save(output, format=\"png\")\n",
+ "\n",
+ "project.put(\"pil_image\", pil_image)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "ac2d4024",
"metadata": {},
"source": [
"## Scikit-learn model\n",
"\n",
- "As `skore` is developed by :probabl., the spin-off of scikit-learn, `skore` can naturally handle scikit-learn models and pipelines.\n",
+ "As `skore` is developed by :probabl., the spin-off of scikit-learn, `skore` treats scikit-learn models and pipelines as first-class citizens.\n",
"\n",
"First of all, you can store a scikit-learn model:"
]
@@ -447,15 +560,17 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "b1e071f7",
"metadata": {},
"outputs": [],
"source": [
"my_model = Lasso(alpha=2)\n",
- "store.insert(\"my_model\", my_model)"
+ "project.put(\"my_model\", my_model)"
]
},
{
"cell_type": "markdown",
+ "id": "124c4690",
"metadata": {},
"source": [
"You can also store scikit-learn pipelines:"
@@ -464,20 +579,19 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "238a8fe5",
"metadata": {},
"outputs": [],
"source": [
"my_pipeline = Pipeline(\n",
- " [\n",
- " (\"standard_scaler\", StandardScaler()),\n",
- " (\"lasso\", Lasso(alpha=2))\n",
- " ]\n",
+ " [(\"standard_scaler\", StandardScaler()), (\"lasso\", Lasso(alpha=2))]\n",
")\n",
- "store.insert(\"my_pipeline\", my_pipeline)"
+ "project.put(\"my_pipeline\", my_pipeline)"
]
},
{
"cell_type": "markdown",
+ "id": "7235e282",
"metadata": {},
"source": [
"Moreover, you can store fitted scikit-learn pipelines:"
@@ -486,6 +600,7 @@
{
"cell_type": "code",
"execution_count": null,
+ "id": "deb9d103",
"metadata": {},
"outputs": [],
"source": [
@@ -494,11 +609,12 @@
"y = diabetes.target[:150]\n",
"my_pipeline.fit(X, y)\n",
"\n",
- "store.insert(\"my_fitted_pipeline\", my_pipeline)"
+ "project.put(\"my_fitted_pipeline\", my_pipeline)"
]
},
{
"cell_type": "markdown",
+ "id": "e6b090d3",
"metadata": {},
"source": [
"_Stay tuned for some new features!_"
@@ -506,139 +622,149 @@
},
{
"cell_type": "markdown",
+ "id": "555665f0",
"metadata": {},
"source": [
"---\n",
- "# Manipulating the skore dashboard\n",
+ "# Manipulating the skore UI\n",
"\n",
- "The following is just some skore strings that we generate in order to provide more context on the obtained dashboard."
+ "The following is just some skore strings that we generate in order to provide more context on the obtained report."
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "30f6f3bf",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\n",
+ "project.put_item(\n",
" \"my_comment_1\",\n",
- " \"
Welcome to skore!
Given to you by :probabl., skore is a powerful tool that allows data scientists to create tracking and clear reports from their Python code, typically a notebook. This HTML document is actually a skore dashboard generated using the `basic_usage.ipynb` notebook that has been exported (into HTML)!
\"\n",
+ " MediaItem.factory(\n",
+ " \"
Welcome to skore!
Given to you by :probabl., skore is a powerful tool that allows data scientists to create tracking and clear reports from their Python code, typically a notebook. This HTML document is actually a skore report generated using the `basic_usage.ipynb` notebook that has been exported (into HTML)!
\",\n",
+ " media_type=\"text/html\",\n",
+ " ),\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "7e5ed756",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\n",
+ "project.put_item(\n",
" \"my_comment_2\",\n",
- " \"
Integers\"\n",
+ " MediaItem.factory(\"
Integers\", media_type=\"text/html\"),\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "2108a347",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\n",
- " \"my_comment_3\",\n",
- " \"
Strings\"\n",
+ "project.put_item(\n",
+ " \"my_comment_3\", MediaItem.factory(\"
Strings\", media_type=\"text/html\")\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "067e4fdd",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\n",
+ "project.put_item(\n",
" \"my_comment_4\",\n",
- " \"
Many kinds of data\"\n",
+ " MediaItem.factory(\"
Many kinds of data\", media_type=\"text/html\"),\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "da81ae66",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\n",
+ "project.put_item(\n",
" \"my_comment_5\",\n",
- " \"
Altair plots\"\n",
+ " MediaItem.factory(\"
Altair plots\", media_type=\"text/html\"),\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "c65be8c4",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\n",
+ "project.put_item(\n",
" \"my_comment_6\",\n",
- " \"
Scikit-learn models and pipelines\"\n",
+ " MediaItem.factory(\n",
+ " \"
Scikit-learn models and pipelines\", media_type=\"text/html\"\n",
+ " ),\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "e1365efe",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\n",
+ "project.put_item(\n",
" \"my_comment_7\",\n",
- " \"
Manipulating the skore dashboard\"\n",
+ " MediaItem.factory(\n",
+ " \"
Manipulating the skore report\", media_type=\"text/html\"\n",
+ " ),\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "e908bcc2",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\n",
+ "project.put_item(\n",
" \"my_comment_8\",\n",
- " \"
Once you have created cells in your Mandr dashboard, you can place them where you want: move them up or down, delete them, for example put one to the left of a graph to comment on it.
\"\n",
+ " MediaItem.factory(\n",
+ " \"
Once you have created cells in your Skore report, you can place them where you want: move them up or down, delete them, for example put one to the left of a graph to comment on it.
\",\n",
+ " media_type=\"text/html\",\n",
+ " ),\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
+ "id": "b35e24c8",
"metadata": {},
"outputs": [],
"source": [
- "store.insert(\"my_chart_2\", my_chart)"
+ "project.put(\"my_chart_2\", my_chart)"
]
}
],
"metadata": {
+ "jupytext": {
+ "formats": "ipynb,py"
+ },
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.12.4"
}
},
"nbformat": 4,
- "nbformat_minor": 2
+ "nbformat_minor": 5
}
diff --git a/notebooks/basic_usage.py b/notebooks/basic_usage.py
new file mode 100644
index 000000000..f2d326a62
--- /dev/null
+++ b/notebooks/basic_usage.py
@@ -0,0 +1,292 @@
+# ---
+# jupyter:
+# jupytext:
+# formats: ipynb,py
+# text_representation:
+# extension: .py
+# format_name: light
+# format_version: '1.5'
+# jupytext_version: 1.16.1
+# kernelspec:
+# display_name: .venv
+# language: python
+# name: python3
+# ---
+
+# # Getting started with `skore`
+
+# # Introduction
+#
+# The purpose of this guide is to illustrate some of the main features that `skore` provides. Please refer to our installation instructions for installing `skore`.
+#
+# Given to you by [:probabl.](https://probabl.ai/), `skore` is a powerful tool that allows data scientists to create tracking and clear reports from their Python code, typically a notebook. For example, see [this HTML file](https://gist.github.com/augustebaum/6b21dbd7f7d5a584fbf2c1956692574e): download it and open it in your browser to visualize it.
+
+# ## Imports
+
+# +
+# ruff: noqa
+
+import altair as alt
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+import PIL
+from sklearn.datasets import load_diabetes
+from sklearn.linear_model import Lasso
+from sklearn.pipeline import Pipeline
+from sklearn.preprocessing import StandardScaler
+from skore import load
+# -
+
+# # Initialize and use a Project
+#
+# To initialize a Project, we need to give it a name, or equivalently a file path:
+
+# Create a project at path './project.skore'
+# !python -m skore create 'project.skore'
+
+# This will create a Skore project directory named "project.skore" in the current directory.
+
+# Now that you have created the `project.skore` folder (even though nothing has yet been stored), you can run the ui (in your project root i.e. where `project.skore` is):
+# ```python3
+# $ python -m skore launch project.skore
+# ```
+#
+# >*Note*: If you already had some data in your `project.skore` directory from a previous run -- you can check for that in your shell by using:
+# >```python3
+# >$ ls -a
+# >```
+# >and if you no longer need its objects, we recommend deleting this folder by running `rm` in your shell:
+# >```python3
+# >$ rm -r project.skore
+# >```
+# >This deletion needs to be done before the cells above: before initializing the store and before launching the ui!
+
+# Now that the project file exists, we can load it in our script so that we can read from and write to it:
+
+project = load("project.skore")
+
+# ## Storing an integer
+
+# Now, let us store our first object, for example an integer:
+
+project.put("my_int", 3)
+
+# Here, the name of my object is `my_int` and the integer value is 3.
+#
+# You can read it from the Project:
+
+project.get("my_int")
+
+# Careful; like in a normal Python dictionary, the `put` method **will overwrite** past data if you use a key which already exists!
+
+project.put("my_int", 30000)
+
+# Let us check the updated value:
+
+project.get("my_int")
+
+# By using the `delete_item` method, you can also delete an object so that your `skore` UI does not become cluttered:
+
+project.put("my_int_2", 10)
+
+project.delete_item("my_int_2")
+
+# You can use `Project.list_keys` to display all the keys in your project:
+
+project.list_keys()
+
+# ## Storing a string
+
+# We just stored a integer, now let us store some text using strings!
+
+project.put("my_string", "Hello world!")
+
+project.get("my_string")
+
+# `Project.get` infers the type of the inserted object by default. For example, strings are assumed to be in Markdown format. Hence, you can customize the display of your text:
+
+project.put(
+ "my_string_2",
+ (
+ """Hello world!, **bold**, *italic*, `code`
+
+```python
+def my_func(x):
+ return x+2
+```
+"""
+ ),
+)
+
+# Moreover, you can also explicitly tell Skore the media type of an object, for example in HTML:
+
+from skore.item import MediaItem
+
+# Note we use `put_item` instead of `put`
+project.put_item(
+ "my_string_3",
+ MediaItem.factory(
+ "
Title
bold,
italic, etc.", media_type="text/html"
+ ),
+)
+
+# Note that the media type is only used for the UI, and not in this notebook at hand:
+
+project.get("my_string_3")
+
+# You can also conveniently use Python f-strings:
+
+x = 2
+y = [1, 2, 3, 4]
+project.put("my_string_4", f"The value of `x` is {x} and the value of `y` is {y}.")
+
+# ## Storing many kinds of data
+
+# Python list:
+
+my_list = [1, 2, 3, 4]
+project.put("my_list", my_list)
+
+# Python dictionary:
+
+my_dict = {
+ "company": "probabl",
+ "year": 2023,
+}
+project.put("my_dict", my_dict)
+
+# NumPy array:
+
+my_arr = np.random.randn(3, 3)
+project.put("my_arr", my_arr)
+
+# Pandas data frame:
+
+my_df = pd.DataFrame(np.random.randn(5, 3))
+project.put("my_df", my_df)
+
+# Altair Charts:
+
+# +
+num_points = 100
+df_plot = pd.DataFrame(
+ {"x": np.random.randn(num_points), "y": np.random.randn(num_points)}
+)
+
+my_chart = (
+ alt.Chart(df_plot)
+ .mark_circle()
+ .encode(x="x", y="y", tooltip=["x", "y"])
+ .interactive()
+ .properties(title="My title")
+)
+
+project.put("my_chart", my_chart)
+# -
+
+# Matplotlib Figures:
+
+# +
+fig, ax = plt.subplots()
+ax.plot([1, 2, 3, 4], [1, 4, 2, 3])
+
+project.put("my_figure", fig)
+# -
+
+# PIL images:
+
+# +
+import io
+
+pil_image = PIL.Image.new("RGB", (100, 100), color="red")
+with io.BytesIO() as output:
+ pil_image.save(output, format="png")
+
+project.put("pil_image", pil_image)
+# -
+
+# ## Scikit-learn model
+#
+# As `skore` is developed by :probabl., the spin-off of scikit-learn, `skore` treats scikit-learn models and pipelines as first-class citizens.
+#
+# First of all, you can store a scikit-learn model:
+
+my_model = Lasso(alpha=2)
+project.put("my_model", my_model)
+
+# You can also store scikit-learn pipelines:
+
+my_pipeline = Pipeline(
+ [("standard_scaler", StandardScaler()), ("lasso", Lasso(alpha=2))]
+)
+project.put("my_pipeline", my_pipeline)
+
+# Moreover, you can store fitted scikit-learn pipelines:
+
+# +
+diabetes = load_diabetes()
+X = diabetes.data[:150]
+y = diabetes.target[:150]
+my_pipeline.fit(X, y)
+
+project.put("my_fitted_pipeline", my_pipeline)
+# -
+
+# _Stay tuned for some new features!_
+
+# ---
+# # Manipulating the skore UI
+#
+# The following is just some skore strings that we generate in order to provide more context on the obtained report.
+
+project.put_item(
+ "my_comment_1",
+ MediaItem.factory(
+ "
Welcome to skore!
Given to you by :probabl., skore is a powerful tool that allows data scientists to create tracking and clear reports from their Python code, typically a notebook. This HTML document is actually a skore report generated using the `basic_usage.ipynb` notebook that has been exported (into HTML)!
",
+ media_type="text/html",
+ ),
+)
+
+project.put_item(
+ "my_comment_2",
+ MediaItem.factory("
Integers", media_type="text/html"),
+)
+
+project.put_item(
+ "my_comment_3", MediaItem.factory("
Strings", media_type="text/html")
+)
+
+project.put_item(
+ "my_comment_4",
+ MediaItem.factory("
Many kinds of data", media_type="text/html"),
+)
+
+project.put_item(
+ "my_comment_5",
+ MediaItem.factory("
Altair plots", media_type="text/html"),
+)
+
+project.put_item(
+ "my_comment_6",
+ MediaItem.factory(
+ "
Scikit-learn models and pipelines", media_type="text/html"
+ ),
+)
+
+project.put_item(
+ "my_comment_7",
+ MediaItem.factory(
+ "
Manipulating the skore report", media_type="text/html"
+ ),
+)
+
+project.put_item(
+ "my_comment_8",
+ MediaItem.factory(
+ "
Once you have created cells in your Skore report, you can place them where you want: move them up or down, delete them, for example put one to the left of a graph to comment on it.
",
+ media_type="text/html",
+ ),
+)
+
+project.put("my_chart_2", my_chart)
diff --git a/notebooks/skrub_demo.ipynb b/notebooks/skrub_demo.ipynb
new file mode 100644
index 000000000..c14a06a22
--- /dev/null
+++ b/notebooks/skrub_demo.ipynb
@@ -0,0 +1,453 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2f0b0800",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# ruff: noqa\n",
+ "import base64\n",
+ "from pathlib import Path\n",
+ "from time import time"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "cc37e639",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import altair as alt\n",
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import seaborn as sns\n",
+ "from matplotlib import pyplot as plt\n",
+ "from sklearn.ensemble import HistGradientBoostingRegressor, RandomForestRegressor\n",
+ "from sklearn.inspection import permutation_importance\n",
+ "from sklearn.linear_model import RidgeCV\n",
+ "from sklearn.metrics import mean_absolute_error, mean_squared_error, r2_score\n",
+ "from sklearn.model_selection import train_test_split\n",
+ "from sklearn.utils import Bunch\n",
+ "from skrub import TableReport, tabular_learner\n",
+ "from skrub.datasets import fetch_employee_salaries\n",
+ "from tqdm import tqdm"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "406992da",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from skore import load\n",
+ "from skore.item import MediaItem"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9084704d",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "DIR_MANDER = \"datamander\"\n",
+ "PATH_PROJECT = Path(\"skrub_demo\")\n",
+ "N_SEEDS = 5"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "a9290081",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Create a project at path './skrub_demo.skore'\n",
+ "!python -m skore create skrub_demo"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6c574556",
+ "metadata": {
+ "lines_to_next_cell": 2
+ },
+ "source": [
+ "Launch the web UI with `python -m skore launch skrub_demo`"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "305bd7cf",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def init_ridge():\n",
+ " return tabular_learner(RidgeCV())"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "335f1a0e",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def init_rf():\n",
+ " return tabular_learner(RandomForestRegressor(n_jobs=4))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "086c6ebd",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def init_gb():\n",
+ " return tabular_learner(HistGradientBoostingRegressor())"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "7a548401",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "INIT_MODEL_FUNC = {\n",
+ " \"ridge\": init_ridge,\n",
+ " \"rf\": init_rf,\n",
+ " \"gb\": init_gb,\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9803f8c2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def evaluate_models(model_names):\n",
+ " results = []\n",
+ " for model_name in model_names:\n",
+ " print(f\"{' Evaluating ' + model_name + ' ':=^50}\")\n",
+ " results.append(evaluate_seeds(model_name))\n",
+ "\n",
+ " project = load(PATH_PROJECT)\n",
+ " project.put_item(\n",
+ " \"skrub_report\",\n",
+ " MediaItem.factory(plot_skrub_report(), media_type=\"text/html\"),\n",
+ " )\n",
+ "\n",
+ " project.put(\"target_distribution\", plot_y_distribution())\n",
+ " project.put(\"Metrics\", plot_table_metrics(results))\n",
+ " project.put(\"R2 vs fit time\", plot_r2_vs_fit_time(results))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "34c4c685",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def evaluate_seeds(model_name):\n",
+ " path_model = PATH_PROJECT / model_name\n",
+ "\n",
+ " seed_scores = []\n",
+ " for random_state in tqdm(range(N_SEEDS)):\n",
+ " bunch = get_data(random_state)\n",
+ " model = INIT_MODEL_FUNC[model_name]()\n",
+ "\n",
+ " tic = time()\n",
+ " model.fit(bunch.X_train, bunch.y_train)\n",
+ " fit_time = time() - tic\n",
+ "\n",
+ " scores = evaluate(model, bunch)\n",
+ " scores.update(\n",
+ " {\n",
+ " \"random_state\": random_state,\n",
+ " \"model_name\": model_name,\n",
+ " \"fit_time\": fit_time,\n",
+ " }\n",
+ " )\n",
+ "\n",
+ " path_seed = path_model / f\"random_state{random_state}\"\n",
+ "\n",
+ " project = load(PATH_PROJECT)\n",
+ " project.put(path_seed / \"scores\", scores) # scores is a dict\n",
+ " project.put_item(\n",
+ " path_seed / \"model_repr\",\n",
+ " MediaItem.factory(plot_model_repr(model), media_type=\"text/html\"),\n",
+ " )\n",
+ " project.put(\n",
+ " path_seed / \"feature importance\", plot_feature_importance(model, bunch)\n",
+ " )\n",
+ " seed_scores.append(scores)\n",
+ "\n",
+ " agg_scores = aggregate_seeds_results(seed_scores)\n",
+ " project.put(path_model / \"agg_scores\", agg_scores)\n",
+ "\n",
+ " return agg_scores"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "091fceee",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def evaluate(model, bunch):\n",
+ " y_pred = model.predict(bunch.X_test)\n",
+ " y_test = bunch[\"y_test\"]\n",
+ "\n",
+ " r2 = r2_score(y_test, y_pred)\n",
+ " mae = mean_absolute_error(y_test, y_pred)\n",
+ " mse = mean_squared_error(y_test, y_pred)\n",
+ "\n",
+ " scores = {\n",
+ " \"y_pred\": y_pred.tolist(),\n",
+ " \"r2\": r2,\n",
+ " \"mae\": mae,\n",
+ " \"mse\": mse,\n",
+ " }\n",
+ "\n",
+ " return scores"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "376fe6df",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def aggregate_seeds_results(scores):\n",
+ " agg_score = dict()\n",
+ " for metric in [\"r2\", \"mae\", \"mse\", \"fit_time\"]:\n",
+ " score_seeds = [score[metric] for score in scores]\n",
+ " agg_score.update(\n",
+ " {\n",
+ " f\"mean_{metric}\": np.mean(score_seeds),\n",
+ " f\"std_{metric}\": np.std(score_seeds),\n",
+ " }\n",
+ " )\n",
+ "\n",
+ " agg_score[\"model_name\"] = scores[0][\"model_name\"]\n",
+ "\n",
+ " return agg_score"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "37ae1640",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def get_data(random_state, split=True):\n",
+ " dataset = fetch_employee_salaries()\n",
+ " X, y = dataset.X, dataset.y\n",
+ " if split:\n",
+ " X_train, X_test, y_train, y_test = train_test_split(\n",
+ " X, y, random_state=random_state\n",
+ " )\n",
+ " return Bunch(\n",
+ " X_train=X_train,\n",
+ " y_train=y_train,\n",
+ " X_test=X_test,\n",
+ " y_test=y_test,\n",
+ " )\n",
+ " else:\n",
+ " return Bunch(X=X, y=y)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "737d4087",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def plot_table_metrics(results):\n",
+ " df = pd.DataFrame(results)\n",
+ " rename = {\n",
+ " \"r2\": \"R2 (↑)\",\n",
+ " \"mse\": \"MSE (↓)\",\n",
+ " \"mae\": \"MAE (↓)\",\n",
+ " \"fit_time\": \"Fit time (↓)\",\n",
+ " }\n",
+ "\n",
+ " for metric in [\"r2\", \"mae\", \"mse\", \"fit_time\"]:\n",
+ " mean_key, std_key = f\"mean_{metric}\", f\"std_{metric}\"\n",
+ " df[rename[metric]] = (\n",
+ " df[mean_key].round(4).astype(str) + \" ± \" + df[std_key].round(4).astype(str)\n",
+ " )\n",
+ " df = df.drop([mean_key, std_key], axis=1)\n",
+ "\n",
+ " return df"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "43b46d58",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import matplotlib.figure\n",
+ "\n",
+ "\n",
+ "def plot_r2_vs_fit_time(results) -> matplotlib.figure.Figure:\n",
+ " df = pd.DataFrame(results)\n",
+ "\n",
+ " model_names = df[\"model_name\"].tolist()\n",
+ " palette = dict(\n",
+ " zip(\n",
+ " list(model_names),\n",
+ " sns.color_palette(\"colorblind\", n_colors=len(model_names)),\n",
+ " )\n",
+ " )\n",
+ "\n",
+ " fig, ax = plt.subplots(figsize=(8, 5), dpi=100)\n",
+ " c = \"black\"\n",
+ " plt.errorbar(\n",
+ " x=df[\"mean_fit_time\"],\n",
+ " y=df[\"mean_r2\"],\n",
+ " yerr=df[\"std_r2\"],\n",
+ " fmt=\"none\",\n",
+ " c=c,\n",
+ " capsize=2,\n",
+ " )\n",
+ " plt.errorbar(\n",
+ " x=df[\"mean_fit_time\"],\n",
+ " xerr=df[\"std_fit_time\"],\n",
+ " y=df[\"mean_r2\"],\n",
+ " fmt=\"none\",\n",
+ " c=c,\n",
+ " capsize=2,\n",
+ " )\n",
+ " ax = sns.scatterplot(\n",
+ " df,\n",
+ " x=\"mean_fit_time\",\n",
+ " y=\"mean_r2\",\n",
+ " hue=\"model_name\",\n",
+ " s=200,\n",
+ " palette=palette,\n",
+ " zorder=10,\n",
+ " alpha=1,\n",
+ " )\n",
+ "\n",
+ " ax.grid()\n",
+ " sns.move_legend(ax, \"upper left\", bbox_to_anchor=(1, 1))\n",
+ " # plt.tight_layout()\n",
+ "\n",
+ " return fig"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "49227a48",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def plot_skrub_report():\n",
+ " bunch = get_data(random_state=0, split=False)\n",
+ " df = pd.concat([bunch.X, bunch.y], axis=1)\n",
+ " return TableReport(df).html()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f7d07611",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def plot_feature_importance(model, bunch) -> alt.Chart:\n",
+ " importances = permutation_importance(model, bunch.X_test, bunch.y_test, n_jobs=4)\n",
+ "\n",
+ " feature_imp = pd.DataFrame(\n",
+ " importances[\"importances\"].T, columns=bunch.X_train.columns\n",
+ " ).melt() # Convert the dataframe to a long format\n",
+ "\n",
+ " return (\n",
+ " alt.Chart(feature_imp)\n",
+ " .mark_boxplot(extent=\"min-max\")\n",
+ " .encode(\n",
+ " alt.X(\"value:Q\").scale(domain=[0, 1]),\n",
+ " alt.Y(\"variable:N\"),\n",
+ " )\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "df31f379",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def plot_y_distribution() -> alt.Chart:\n",
+ " bunch = get_data(random_state=0, split=False)\n",
+ " df = pd.concat([bunch.X, bunch.y], axis=1)\n",
+ " N = min(1000, df.shape[0])\n",
+ " df = df.sample(N)\n",
+ "\n",
+ " # alt.data_transformers.enable(\"vegafusion\")\n",
+ "\n",
+ " return (\n",
+ " alt.Chart(df)\n",
+ " .mark_bar()\n",
+ " .encode(\n",
+ " x=alt.X(\"current_annual_salary:Q\", bin=alt.Bin(maxbins=30)),\n",
+ " y=\"count()\",\n",
+ " color=\"gender:N\",\n",
+ " )\n",
+ " .properties(width=600, height=400)\n",
+ " .interactive()\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f22c67e2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def plot_model_repr(model) -> str:\n",
+ " return model._repr_html_()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "e0b1b4e8",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "if __name__ == \"__main__\":\n",
+ " evaluate_models(model_names=list(INIT_MODEL_FUNC))"
+ ]
+ }
+ ],
+ "metadata": {
+ "jupytext": {
+ "cell_metadata_filter": "-all",
+ "formats": "ipynb,py:percent",
+ "main_language": "python"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/notebooks/skrub_demo.py b/notebooks/skrub_demo.py
index 562486941..d13632411 100644
--- a/notebooks/skrub_demo.py
+++ b/notebooks/skrub_demo.py
@@ -1,8 +1,22 @@
+# ---
+# jupyter:
+# jupytext:
+# cell_metadata_filter: -all
+# formats: ipynb,py:percent
+# text_representation:
+# extension: .py
+# format_name: percent
+# format_version: '1.3'
+# jupytext_version: 1.16.1
+# ---
+
+# %%
# ruff: noqa
import base64
from pathlib import Path
from time import time
+# %%
import altair as alt
import numpy as np
import pandas as pd
@@ -18,26 +32,39 @@
from skrub.datasets import fetch_employee_salaries
from tqdm import tqdm
-from mandr import Mandr
-from mandr.storage import FileSystem
+# %%
+from skore import load
+from skore.item import MediaItem
+# %%
DIR_MANDER = "datamander"
PATH_PROJECT = Path("skrub_demo")
N_SEEDS = 5
+# %%
+# Create a project at path './skrub_demo.skore'
+# !python -m skore create skrub_demo
+# %% [markdown]
+# Launch the web UI with `python -m skore launch skrub_demo`
+
+
+# %%
def init_ridge():
return tabular_learner(RidgeCV())
+# %%
def init_rf():
return tabular_learner(RandomForestRegressor(n_jobs=4))
+# %%
def init_gb():
return tabular_learner(HistGradientBoostingRegressor())
+# %%
INIT_MODEL_FUNC = {
"ridge": init_ridge,
"rf": init_rf,
@@ -45,19 +72,25 @@ def init_gb():
}
+# %%
def evaluate_models(model_names):
results = []
for model_name in model_names:
print(f"{' Evaluating ' + model_name + ' ':=^50}")
results.append(evaluate_seeds(model_name))
- mander = get_mander(str(PATH_PROJECT))
- mander.insert("skrub_report", plot_skrub_report(), display_type="html")
- mander.insert("target_distribution", plot_y_distribution())
- mander.insert("Metrics", plot_table_metrics(results))
- mander.insert("R2 vs fit time", plot_r2_vs_fit_time(results), display_type="html")
+ project = load(PATH_PROJECT)
+ project.put_item(
+ "skrub_report",
+ MediaItem.factory(plot_skrub_report(), media_type="text/html"),
+ )
+
+ project.put("target_distribution", plot_y_distribution())
+ project.put("Metrics", plot_table_metrics(results))
+ project.put("R2 vs fit time", plot_r2_vs_fit_time(results))
+# %%
def evaluate_seeds(model_name):
path_model = PATH_PROJECT / model_name
@@ -80,19 +113,25 @@ def evaluate_seeds(model_name):
)
path_seed = path_model / f"random_state{random_state}"
- mander = get_mander(str(path_seed))
- mander.insert("scores", scores) # scores is a dict
- mander.insert("model_repr", plot_model_repr(model), display_type="html")
- mander.insert("feature importance", plot_feature_importance(model, bunch))
+
+ project = load(PATH_PROJECT)
+ project.put(path_seed / "scores", scores) # scores is a dict
+ project.put_item(
+ path_seed / "model_repr",
+ MediaItem.factory(plot_model_repr(model), media_type="text/html"),
+ )
+ project.put(
+ path_seed / "feature importance", plot_feature_importance(model, bunch)
+ )
seed_scores.append(scores)
agg_scores = aggregate_seeds_results(seed_scores)
- mander = get_mander(str(path_model))
- mander.insert("agg_scores", agg_scores)
+ project.put(path_model / "agg_scores", agg_scores)
return agg_scores
+# %%
def evaluate(model, bunch):
y_pred = model.predict(bunch.X_test)
y_test = bunch["y_test"]
@@ -111,6 +150,7 @@ def evaluate(model, bunch):
return scores
+# %%
def aggregate_seeds_results(scores):
agg_score = dict()
for metric in ["r2", "mae", "mse", "fit_time"]:
@@ -127,6 +167,7 @@ def aggregate_seeds_results(scores):
return agg_score
+# %%
def get_data(random_state, split=True):
dataset = fetch_employee_salaries()
X, y = dataset.X, dataset.y
@@ -144,6 +185,7 @@ def get_data(random_state, split=True):
return Bunch(X=X, y=y)
+# %%
def plot_table_metrics(results):
df = pd.DataFrame(results)
rename = {
@@ -163,7 +205,11 @@ def plot_table_metrics(results):
return df
-def plot_r2_vs_fit_time(results):
+# %%
+import matplotlib.figure
+
+
+def plot_r2_vs_fit_time(results) -> matplotlib.figure.Figure:
df = pd.DataFrame(results)
model_names = df["model_name"].tolist()
@@ -207,26 +253,25 @@ def plot_r2_vs_fit_time(results):
sns.move_legend(ax, "upper left", bbox_to_anchor=(1, 1))
# plt.tight_layout()
- filename = "r2_vs_fit_time.png"
- plt.savefig(filename)
-
- return convert_fig_to_html(filename)
+ return fig
+# %%
def plot_skrub_report():
bunch = get_data(random_state=0, split=False)
df = pd.concat([bunch.X, bunch.y], axis=1)
return TableReport(df).html()
-def plot_feature_importance(model, bunch):
+# %%
+def plot_feature_importance(model, bunch) -> alt.Chart:
importances = permutation_importance(model, bunch.X_test, bunch.y_test, n_jobs=4)
feature_imp = pd.DataFrame(
importances["importances"].T, columns=bunch.X_train.columns
).melt() # Convert the dataframe to a long format
- fig = (
+ return (
alt.Chart(feature_imp)
.mark_boxplot(extent="min-max")
.encode(
@@ -235,17 +280,17 @@ def plot_feature_importance(model, bunch):
)
)
- return fig
-
-def plot_y_distribution():
+# %%
+def plot_y_distribution() -> alt.Chart:
bunch = get_data(random_state=0, split=False)
df = pd.concat([bunch.X, bunch.y], axis=1)
N = min(1000, df.shape[0])
df = df.sample(N)
- alt.data_transformers.enable("vegafusion")
- fig = (
+ # alt.data_transformers.enable("vegafusion")
+
+ return (
alt.Chart(df)
.mark_bar()
.encode(
@@ -257,29 +302,12 @@ def plot_y_distribution():
.interactive()
)
- return fig
-
-def plot_model_repr(model):
+# %%
+def plot_model_repr(model) -> str:
return model._repr_html_()
-def get_mander(path):
- return Mandr(
- path,
- storage=FileSystem(directory=DIR_MANDER),
- )
-
-
-def delete_mander(mander):
- for k in list(mander):
- mander.delete(k)
-
-
-def convert_fig_to_html(path_file):
- data_uri = base64.b64encode(open(path_file, "rb").read()).decode("utf-8")
- return f'
'
-
-
+# %%
if __name__ == "__main__":
evaluate_models(model_names=list(INIT_MODEL_FUNC))
diff --git a/pyproject.toml b/pyproject.toml
index 004105597..bb8b7f89a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -30,6 +30,7 @@ dependencies = [
"pydantic_numpy",
"uvicorn",
"rich",
+ "skops",
]
[project.optional-dependencies]
@@ -40,6 +41,7 @@ test = [
"pre-commit",
"pytest",
"pytest-cov",
+ "pytest-order",
"pytest-randomly",
"ruff",
]
diff --git a/requirements-doc.txt b/requirements-doc.txt
index b749fa165..a3c09f174 100644
--- a/requirements-doc.txt
+++ b/requirements-doc.txt
@@ -1,5 +1,5 @@
#
-# This file is autogenerated by pip-compile with Python 3.12
+# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --extra=doc --output-file=requirements-doc.txt pyproject.toml
@@ -8,7 +8,7 @@ accessible-pygments==0.0.5
# via pydata-sphinx-theme
alabaster==1.0.0
# via sphinx
-altair==5.4.0
+altair==5.4.1
# via skore (pyproject.toml)
annotated-types==0.7.0
# via pydantic
@@ -24,7 +24,7 @@ babel==2.16.0
# sphinx
beautifulsoup4==4.12.3
# via pydata-sphinx-theme
-certifi==2024.7.4
+certifi==2024.8.30
# via requests
charset-normalizer==3.3.2
# via requests
@@ -32,7 +32,7 @@ click==8.1.7
# via uvicorn
compress-pickle[lz4]==2.1.0
# via pydantic-numpy
-contourpy==1.2.1
+contourpy==1.3.0
# via matplotlib
cycler==0.12.1
# via matplotlib
@@ -42,16 +42,17 @@ docutils==0.21.2
# via
# pydata-sphinx-theme
# sphinx
-fastapi==0.112.1
+fastapi==0.114.1
# via skore (pyproject.toml)
-filelock==3.15.4
+filelock==3.16.0
# via
# huggingface-hub
# torch
# transformers
+ # triton
fonttools==4.53.1
# via matplotlib
-fsspec==2024.6.1
+fsspec==2024.9.0
# via
# huggingface-hub
# torch
@@ -60,15 +61,16 @@ h11==0.14.0
huggingface-hub==0.24.6
# via
# sentence-transformers
+ # skops
# tokenizers
# transformers
-idna==3.7
+idna==3.8
# via
# anyio
# requests
imagesize==1.4.1
# via sphinx
-inflect==7.3.1
+inflect==7.4.0
# via skore (pyproject.toml)
jinja2==3.1.4
# via
@@ -82,7 +84,7 @@ jsonschema==4.23.0
# via altair
jsonschema-specifications==2023.12.1
# via jsonschema
-kiwisolver==1.4.5
+kiwisolver==1.4.7
# via matplotlib
lz4==4.3.3
# via compress-pickle
@@ -97,17 +99,17 @@ matplotlib==3.9.2
# skrub
mdurl==0.1.2
# via markdown-it-py
-more-itertools==10.4.0
+more-itertools==10.5.0
# via inflect
mpmath==1.3.0
# via sympy
-narwhals==1.5.2
+narwhals==1.6.4
# via altair
networkx==3.3
# via
# skore (pyproject.toml)
# torch
-numpy==2.1.0
+numpy==2.1.1
# via
# contourpy
# matplotlib
@@ -122,12 +124,44 @@ numpy==2.1.0
# skrub
# statsmodels
# transformers
+nvidia-cublas-cu12==12.1.3.1
+ # via
+ # nvidia-cudnn-cu12
+ # nvidia-cusolver-cu12
+ # torch
+nvidia-cuda-cupti-cu12==12.1.105
+ # via torch
+nvidia-cuda-nvrtc-cu12==12.1.105
+ # via torch
+nvidia-cuda-runtime-cu12==12.1.105
+ # via torch
+nvidia-cudnn-cu12==9.1.0.70
+ # via torch
+nvidia-cufft-cu12==11.0.2.54
+ # via torch
+nvidia-curand-cu12==10.3.2.106
+ # via torch
+nvidia-cusolver-cu12==11.4.5.107
+ # via torch
+nvidia-cusparse-cu12==12.1.0.106
+ # via
+ # nvidia-cusolver-cu12
+ # torch
+nvidia-nccl-cu12==2.20.5
+ # via torch
+nvidia-nvjitlink-cu12==12.6.68
+ # via
+ # nvidia-cusolver-cu12
+ # nvidia-cusparse-cu12
+nvidia-nvtx-cu12==12.1.105
+ # via torch
packaging==24.1
# via
# altair
# huggingface-hub
# matplotlib
# pydata-sphinx-theme
+ # skops
# skrub
# sphinx
# statsmodels
@@ -145,16 +179,16 @@ pillow==10.4.0
# matplotlib
# sentence-transformers
# sphinx-gallery
-polars==1.5.0
+polars==1.6.0
# via skore (pyproject.toml)
pyarrow==17.0.0
# via skore (pyproject.toml)
-pydantic==2.8.2
+pydantic==2.9.1
# via
# fastapi
# pydantic-numpy
# skore (pyproject.toml)
-pydantic-core==2.20.1
+pydantic-core==2.23.3
# via pydantic
pydantic-numpy==6.0.0
# via skore (pyproject.toml)
@@ -166,13 +200,13 @@ pygments==2.18.0
# pydata-sphinx-theme
# rich
# sphinx
-pyparsing==3.1.2
+pyparsing==3.1.4
# via matplotlib
python-dateutil==2.9.0.post0
# via
# matplotlib
# pandas
-pytz==2024.1
+pytz==2024.2
# via pandas
pyyaml==6.0.2
# via
@@ -189,7 +223,7 @@ requests==2.32.3
# huggingface-hub
# sphinx
# transformers
-rich==13.7.1
+rich==13.8.1
# via skore (pyproject.toml)
rpds-py==0.20.0
# via
@@ -199,11 +233,12 @@ ruamel-yaml==0.18.6
# via pydantic-numpy
ruamel-yaml-clib==0.2.8
# via ruamel-yaml
-safetensors==0.4.4
+safetensors==0.4.5
# via transformers
scikit-learn==1.5.1
# via
# sentence-transformers
+ # skops
# skore (pyproject.toml)
# skrub
scipy==1.14.1
@@ -222,6 +257,8 @@ six==1.16.0
# via
# patsy
# python-dateutil
+skops==0.10.0
+ # via skore (pyproject.toml)
skrub==0.3.0
# via skore (pyproject.toml)
sniffio==1.3.1
@@ -249,17 +286,19 @@ sphinxcontrib-qthelp==2.0.0
# via sphinx
sphinxcontrib-serializinghtml==2.0.0
# via sphinx
-starlette==0.38.2
+starlette==0.38.5
# via fastapi
statsmodels==0.14.2
# via skore (pyproject.toml)
sympy==1.13.2
# via torch
+tabulate==0.9.0
+ # via skops
threadpoolctl==3.5.0
# via scikit-learn
tokenizers==0.19.1
# via transformers
-torch==2.4.0
+torch==2.4.1
# via sentence-transformers
tqdm==4.66.5
# via
@@ -267,8 +306,10 @@ tqdm==4.66.5
# sentence-transformers
# skore (pyproject.toml)
# transformers
-transformers==4.44.1
+transformers==4.44.2
# via sentence-transformers
+triton==3.0.0
+ # via torch
typeguard==4.3.0
# via inflect
typing-extensions==4.12.2
@@ -287,6 +328,3 @@ urllib3==2.2.2
# via requests
uvicorn==0.30.6
# via skore (pyproject.toml)
-
-# The following packages are considered to be unsafe in a requirements file:
-# setuptools
diff --git a/requirements-test.txt b/requirements-test.txt
index 0afeb7082..ae214932d 100644
--- a/requirements-test.txt
+++ b/requirements-test.txt
@@ -1,10 +1,10 @@
#
-# This file is autogenerated by pip-compile with Python 3.12
+# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --extra=test --output-file=requirements-test.txt pyproject.toml
#
-altair==5.4.0
+altair==5.4.1
# via skore (pyproject.toml)
annotated-types==0.7.0
# via pydantic
@@ -18,17 +18,20 @@ attrs==24.2.0
# via
# jsonschema
# referencing
-certifi==2024.7.4
+certifi==2024.8.30
# via
# httpcore
# httpx
+ # requests
cfgv==3.4.0
# via pre-commit
+charset-normalizer==3.3.2
+ # via requests
click==8.1.7
# via uvicorn
compress-pickle[lz4]==2.1.0
# via pydantic-numpy
-contourpy==1.2.1
+contourpy==1.3.0
# via matplotlib
coverage[toml]==7.6.1
# via pytest-cov
@@ -38,29 +41,36 @@ diskcache==5.6.3
# via skore (pyproject.toml)
distlib==0.3.8
# via virtualenv
-fastapi==0.112.1
+fastapi==0.114.1
# via skore (pyproject.toml)
-filelock==3.15.4
- # via virtualenv
+filelock==3.16.0
+ # via
+ # huggingface-hub
+ # virtualenv
fonttools==4.53.1
# via matplotlib
fqdn==1.5.1
# via jsonschema
+fsspec==2024.9.0
+ # via huggingface-hub
h11==0.14.0
# via
# httpcore
# uvicorn
httpcore==1.0.5
# via httpx
-httpx==0.27.0
+httpx==0.27.2
# via skore (pyproject.toml)
+huggingface-hub==0.24.6
+ # via skops
identify==2.6.0
# via pre-commit
-idna==3.7
+idna==3.8
# via
# anyio
# httpx
# jsonschema
+ # requests
iniconfig==2.0.0
# via pytest
isoduration==20.11.0
@@ -77,7 +87,7 @@ jsonschema[format]==4.23.0
# skore (pyproject.toml)
jsonschema-specifications==2023.12.1
# via jsonschema
-kiwisolver==1.4.5
+kiwisolver==1.4.7
# via matplotlib
lz4==4.3.3
# via compress-pickle
@@ -89,11 +99,11 @@ matplotlib==3.9.2
# via skore (pyproject.toml)
mdurl==0.1.2
# via markdown-it-py
-narwhals==1.5.2
+narwhals==1.6.4
# via altair
nodeenv==1.9.1
# via pre-commit
-numpy==2.1.0
+numpy==2.1.1
# via
# contourpy
# matplotlib
@@ -104,34 +114,36 @@ numpy==2.1.0
packaging==24.1
# via
# altair
+ # huggingface-hub
# matplotlib
# pytest
+ # skops
pandas==2.2.2
# via skore (pyproject.toml)
pillow==10.4.0
# via matplotlib
-platformdirs==4.2.2
+platformdirs==4.3.2
# via virtualenv
pluggy==1.5.0
# via pytest
-polars==1.5.0
+polars==1.6.0
# via skore (pyproject.toml)
pre-commit==3.8.0
# via skore (pyproject.toml)
-pydantic==2.8.2
+pydantic==2.9.1
# via
# fastapi
# pydantic-numpy
# skore (pyproject.toml)
-pydantic-core==2.20.1
+pydantic-core==2.23.3
# via pydantic
pydantic-numpy==6.0.0
# via skore (pyproject.toml)
pygments==2.18.0
# via rich
-pyparsing==3.1.2
+pyparsing==3.1.4
# via matplotlib
-pytest==8.3.2
+pytest==8.3.3
# via
# pytest-cov
# pytest-randomly
@@ -145,19 +157,23 @@ python-dateutil==2.9.0.post0
# arrow
# matplotlib
# pandas
-pytz==2024.1
+pytz==2024.2
# via pandas
pyyaml==6.0.2
- # via pre-commit
+ # via
+ # huggingface-hub
+ # pre-commit
referencing==0.35.1
# via
# jsonschema
# jsonschema-specifications
+requests==2.32.3
+ # via huggingface-hub
rfc3339-validator==0.1.4
# via jsonschema
rfc3987==1.3.8
# via jsonschema
-rich==13.7.1
+rich==13.8.1
# via skore (pyproject.toml)
rpds-py==0.20.0
# via
@@ -167,10 +183,12 @@ ruamel-yaml==0.18.6
# via pydantic-numpy
ruamel-yaml-clib==0.2.8
# via ruamel-yaml
-ruff==0.6.1
+ruff==0.6.4
# via skore (pyproject.toml)
scikit-learn==1.5.1
- # via skore (pyproject.toml)
+ # via
+ # skops
+ # skore (pyproject.toml)
scipy==1.14.1
# via scikit-learn
semver==3.0.2
@@ -179,29 +197,38 @@ six==1.16.0
# via
# python-dateutil
# rfc3339-validator
+skops==0.10.0
+ # via skore (pyproject.toml)
sniffio==1.3.1
# via
# anyio
# httpx
-starlette==0.38.2
+starlette==0.38.5
# via fastapi
+tabulate==0.9.0
+ # via skops
threadpoolctl==3.5.0
# via scikit-learn
-types-python-dateutil==2.9.0.20240821
+tqdm==4.66.5
+ # via huggingface-hub
+types-python-dateutil==2.9.0.20240906
# via arrow
typing-extensions==4.12.2
# via
# altair
# fastapi
+ # huggingface-hub
# pydantic
# pydantic-core
tzdata==2024.1
# via pandas
uri-template==1.3.0
# via jsonschema
+urllib3==2.2.2
+ # via requests
uvicorn==0.30.6
# via skore (pyproject.toml)
-virtualenv==20.26.3
+virtualenv==20.26.4
# via pre-commit
webcolors==24.8.0
# via jsonschema
diff --git a/requirements-tools.txt b/requirements-tools.txt
index 234610864..f4e2b752d 100644
--- a/requirements-tools.txt
+++ b/requirements-tools.txt
@@ -1,10 +1,10 @@
#
-# This file is autogenerated by pip-compile with Python 3.12
+# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --extra=tools --output-file=requirements-tools.txt pyproject.toml
#
-altair==5.4.0
+altair==5.4.1
# via skore (pyproject.toml)
annotated-types==0.7.0
# via pydantic
@@ -14,28 +14,40 @@ attrs==24.2.0
# via
# jsonschema
# referencing
-build==1.2.1
+build==1.2.2
# via pip-tools
+certifi==2024.8.30
+ # via requests
+charset-normalizer==3.3.2
+ # via requests
click==8.1.7
# via
# pip-tools
# uvicorn
compress-pickle[lz4]==2.1.0
# via pydantic-numpy
-contourpy==1.2.1
+contourpy==1.3.0
# via matplotlib
cycler==0.12.1
# via matplotlib
diskcache==5.6.3
# via skore (pyproject.toml)
-fastapi==0.112.1
+fastapi==0.114.1
# via skore (pyproject.toml)
+filelock==3.16.0
+ # via huggingface-hub
fonttools==4.53.1
# via matplotlib
+fsspec==2024.9.0
+ # via huggingface-hub
h11==0.14.0
# via uvicorn
-idna==3.7
- # via anyio
+huggingface-hub==0.24.6
+ # via skops
+idna==3.8
+ # via
+ # anyio
+ # requests
jinja2==3.1.4
# via altair
joblib==1.4.2
@@ -44,7 +56,7 @@ jsonschema==4.23.0
# via altair
jsonschema-specifications==2023.12.1
# via jsonschema
-kiwisolver==1.4.5
+kiwisolver==1.4.7
# via matplotlib
lz4==4.3.3
# via compress-pickle
@@ -56,9 +68,9 @@ matplotlib==3.9.2
# via skore (pyproject.toml)
mdurl==0.1.2
# via markdown-it-py
-narwhals==1.5.2
+narwhals==1.6.4
# via altair
-numpy==2.1.0
+numpy==2.1.1
# via
# contourpy
# matplotlib
@@ -70,27 +82,29 @@ packaging==24.1
# via
# altair
# build
+ # huggingface-hub
# matplotlib
+ # skops
pandas==2.2.2
# via skore (pyproject.toml)
pillow==10.4.0
# via matplotlib
pip-tools==7.4.1
# via skore (pyproject.toml)
-polars==1.5.0
+polars==1.6.0
# via skore (pyproject.toml)
-pydantic==2.8.2
+pydantic==2.9.1
# via
# fastapi
# pydantic-numpy
# skore (pyproject.toml)
-pydantic-core==2.20.1
+pydantic-core==2.23.3
# via pydantic
pydantic-numpy==6.0.0
# via skore (pyproject.toml)
pygments==2.18.0
# via rich
-pyparsing==3.1.2
+pyparsing==3.1.4
# via matplotlib
pyproject-hooks==1.1.0
# via
@@ -100,13 +114,17 @@ python-dateutil==2.9.0.post0
# via
# matplotlib
# pandas
-pytz==2024.1
+pytz==2024.2
# via pandas
+pyyaml==6.0.2
+ # via huggingface-hub
referencing==0.35.1
# via
# jsonschema
# jsonschema-specifications
-rich==13.7.1
+requests==2.32.3
+ # via huggingface-hub
+rich==13.8.1
# via skore (pyproject.toml)
rpds-py==0.20.0
# via
@@ -117,27 +135,38 @@ ruamel-yaml==0.18.6
ruamel-yaml-clib==0.2.8
# via ruamel-yaml
scikit-learn==1.5.1
- # via skore (pyproject.toml)
+ # via
+ # skops
+ # skore (pyproject.toml)
scipy==1.14.1
# via scikit-learn
semver==3.0.2
# via pydantic-numpy
six==1.16.0
# via python-dateutil
+skops==0.10.0
+ # via skore (pyproject.toml)
sniffio==1.3.1
# via anyio
-starlette==0.38.2
+starlette==0.38.5
# via fastapi
+tabulate==0.9.0
+ # via skops
threadpoolctl==3.5.0
# via scikit-learn
+tqdm==4.66.5
+ # via huggingface-hub
typing-extensions==4.12.2
# via
# altair
# fastapi
+ # huggingface-hub
# pydantic
# pydantic-core
tzdata==2024.1
# via pandas
+urllib3==2.2.2
+ # via requests
uvicorn==0.30.6
# via skore (pyproject.toml)
wheel==0.44.0
diff --git a/requirements.txt b/requirements.txt
index a468c0cb2..11cce4ccf 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,10 +1,10 @@
#
-# This file is autogenerated by pip-compile with Python 3.12
+# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --output-file=requirements.txt pyproject.toml
#
-altair==5.4.0
+altair==5.4.1
# via skore (pyproject.toml)
annotated-types==0.7.0
# via pydantic
@@ -14,24 +14,36 @@ attrs==24.2.0
# via
# jsonschema
# referencing
+certifi==2024.8.30
+ # via requests
+charset-normalizer==3.3.2
+ # via requests
click==8.1.7
# via uvicorn
compress-pickle[lz4]==2.1.0
# via pydantic-numpy
-contourpy==1.2.1
+contourpy==1.3.0
# via matplotlib
cycler==0.12.1
# via matplotlib
diskcache==5.6.3
# via skore (pyproject.toml)
-fastapi==0.112.1
+fastapi==0.114.1
# via skore (pyproject.toml)
+filelock==3.16.0
+ # via huggingface-hub
fonttools==4.53.1
# via matplotlib
+fsspec==2024.9.0
+ # via huggingface-hub
h11==0.14.0
# via uvicorn
-idna==3.7
- # via anyio
+huggingface-hub==0.24.6
+ # via skops
+idna==3.8
+ # via
+ # anyio
+ # requests
jinja2==3.1.4
# via altair
joblib==1.4.2
@@ -40,7 +52,7 @@ jsonschema==4.23.0
# via altair
jsonschema-specifications==2023.12.1
# via jsonschema
-kiwisolver==1.4.5
+kiwisolver==1.4.7
# via matplotlib
lz4==4.3.3
# via compress-pickle
@@ -52,9 +64,9 @@ matplotlib==3.9.2
# via skore (pyproject.toml)
mdurl==0.1.2
# via markdown-it-py
-narwhals==1.5.2
+narwhals==1.6.4
# via altair
-numpy==2.1.0
+numpy==2.1.1
# via
# contourpy
# matplotlib
@@ -65,37 +77,43 @@ numpy==2.1.0
packaging==24.1
# via
# altair
+ # huggingface-hub
# matplotlib
+ # skops
pandas==2.2.2
# via skore (pyproject.toml)
pillow==10.4.0
# via matplotlib
-polars==1.5.0
+polars==1.6.0
# via skore (pyproject.toml)
-pydantic==2.8.2
+pydantic==2.9.1
# via
# fastapi
# pydantic-numpy
# skore (pyproject.toml)
-pydantic-core==2.20.1
+pydantic-core==2.23.3
# via pydantic
pydantic-numpy==6.0.0
# via skore (pyproject.toml)
pygments==2.18.0
# via rich
-pyparsing==3.1.2
+pyparsing==3.1.4
# via matplotlib
python-dateutil==2.9.0.post0
# via
# matplotlib
# pandas
-pytz==2024.1
+pytz==2024.2
# via pandas
+pyyaml==6.0.2
+ # via huggingface-hub
referencing==0.35.1
# via
# jsonschema
# jsonschema-specifications
-rich==13.7.1
+requests==2.32.3
+ # via huggingface-hub
+rich==13.8.1
# via skore (pyproject.toml)
rpds-py==0.20.0
# via
@@ -106,26 +124,37 @@ ruamel-yaml==0.18.6
ruamel-yaml-clib==0.2.8
# via ruamel-yaml
scikit-learn==1.5.1
- # via skore (pyproject.toml)
+ # via
+ # skops
+ # skore (pyproject.toml)
scipy==1.14.1
# via scikit-learn
semver==3.0.2
# via pydantic-numpy
six==1.16.0
# via python-dateutil
+skops==0.10.0
+ # via skore (pyproject.toml)
sniffio==1.3.1
# via anyio
-starlette==0.38.2
+starlette==0.38.5
# via fastapi
+tabulate==0.9.0
+ # via skops
threadpoolctl==3.5.0
# via scikit-learn
+tqdm==4.66.5
+ # via huggingface-hub
typing-extensions==4.12.2
# via
# altair
# fastapi
+ # huggingface-hub
# pydantic
# pydantic-core
tzdata==2024.1
# via pandas
+urllib3==2.2.2
+ # via requests
uvicorn==0.30.6
# via skore (pyproject.toml)
diff --git a/src/skore/__init__.py b/src/skore/__init__.py
index 28bd240a5..3b01c1474 100644
--- a/src/skore/__init__.py
+++ b/src/skore/__init__.py
@@ -4,12 +4,11 @@
import rich.logging
-from skore.store import Store
-from skore.store import Store as Skore
+from skore.project import Project, load
__all__ = [
- "Skore",
- "Store",
+ "load",
+ "Project",
]
diff --git a/src/skore/__main__.py b/src/skore/__main__.py
index af2676b5e..3dc77165d 100644
--- a/src/skore/__main__.py
+++ b/src/skore/__main__.py
@@ -2,7 +2,12 @@
import sys
-from skore.cli import cli
+from skore.cli.cli import cli
if __name__ == "__main__":
+ import rich.traceback
+
+ # Display error tracebacks with Rich
+ rich.traceback.install(show_locals=True)
+
cli(sys.argv[1:])
diff --git a/src/skore/api/__init__.py b/src/skore/api/__init__.py
deleted file mode 100644
index a8f9bcbbe..000000000
--- a/src/skore/api/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-"""The API to interact with stores."""
-
-from skore.api.app import create_api_app
-
-__all__ = ["create_api_app"]
diff --git a/src/skore/api/app.py b/src/skore/api/app.py
deleted file mode 100644
index 102ee1b43..000000000
--- a/src/skore/api/app.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""FastAPI factory used to create the API to interact with stores."""
-
-from fastapi import FastAPI
-from fastapi.middleware.cors import CORSMiddleware
-
-from skore.api.routes import ROOT_ROUTER
-
-
-def create_api_app() -> FastAPI:
- """FastAPI factory used to create the API to interact with `stores`."""
- app = FastAPI()
-
- # Enable CORS support on all routes, for all origins and methods.
- app.add_middleware(
- CORSMiddleware,
- allow_origins=["*"],
- allow_credentials=True,
- allow_methods=["*"],
- allow_headers=["*"],
- )
-
- # Include routers from bottom to top.
- # Include routers always after all routes have been defined/imported.
- app.include_router(ROOT_ROUTER)
-
- return app
diff --git a/src/skore/api/routes/__init__.py b/src/skore/api/routes/__init__.py
deleted file mode 100644
index c8a3d0982..000000000
--- a/src/skore/api/routes/__init__.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""The definition of API routes to interact with stores."""
-
-from fastapi import APIRouter
-
-from skore.api.routes.fake_stores import FAKE_STORES_ROUTER
-from skore.api.routes.stores import SKORES_ROUTER, STORES_ROUTER
-
-__all__ = ["ROOT_ROUTER"]
-
-
-ROOT_ROUTER = APIRouter(prefix="/api")
-SUBROUTERS = [
- FAKE_STORES_ROUTER,
- SKORES_ROUTER,
- STORES_ROUTER,
-]
-
-
-for router in SUBROUTERS:
- ROOT_ROUTER.include_router(router)
diff --git a/src/skore/api/routes/fake_store.json b/src/skore/api/routes/fake_store.json
deleted file mode 100644
index 60bf0799e..000000000
--- a/src/skore/api/routes/fake_store.json
+++ /dev/null
@@ -1,3195 +0,0 @@
-{
- "schema": "schema:dashboard:v0",
- "uri": "probal-ai/demo-usecase/training/1",
- "payload": {
- "title": {
- "type": "string",
- "data": "My Awesome Dashboard"
- },
- "errors": {
- "type": "array",
- "data": [0.1, 0.2, 0.3, 0.4, 0.5]
- },
- "creation date": {
- "type": "date",
- "data": "2024-07-24"
- },
- "last updated": {
- "type": "datetime",
- "data": "2024-07-24T11:31:00Z"
- },
- "score": {
- "type": "number",
- "data": 0.87
- },
- "count": {
- "type": "integer",
- "data": 234567
- },
- "roc curve": {
- "type": "vega",
- "data": {
- "$schema": "https://vega.github.io/schema/vega-lite/v5.17.0.json",
- "config": {
- "view": {
- "continuousHeight": 300,
- "continuousWidth": 300
- }
- },
- "data": {
- "name": "data-22c73fe76cc0c40f5fb00a8dc69fa568"
- },
- "datasets": {
- "data-22c73fe76cc0c40f5fb00a8dc69fa568": [
- {
- "fpr": 0.0,
- "pos_label": "0",
- "thresholds": null,
- "tpr": 0.0
- },
- {
- "fpr": 0.0,
- "pos_label": "0",
- "thresholds": 1.0,
- "tpr": 0.6698113207547169
- },
- {
- "fpr": 0.0,
- "pos_label": "0",
- "thresholds": 0.9999999999999396,
- "tpr": 0.7169811320754716
- },
- {
- "fpr": 0.0028011204481792717,
- "pos_label": "0",
- "thresholds": 0.9999999999999307,
- "tpr": 0.7169811320754716
- },
- {
- "fpr": 0.0028011204481792717,
- "pos_label": "0",
- "thresholds": 0.9999999981444985,
- "tpr": 0.7783018867924528
- },
- {
- "fpr": 0.0056022408963585435,
- "pos_label": "0",
- "thresholds": 0.999999997464883,
- "tpr": 0.7783018867924528
- },
- {
- "fpr": 0.0056022408963585435,
- "pos_label": "0",
- "thresholds": 0.9999999668521392,
- "tpr": 0.8018867924528302
- },
- {
- "fpr": 0.008403361344537815,
- "pos_label": "0",
- "thresholds": 0.9999999610792231,
- "tpr": 0.8018867924528302
- },
- {
- "fpr": 0.008403361344537815,
- "pos_label": "0",
- "thresholds": 0.9999987046109086,
- "tpr": 0.8254716981132075
- },
- {
- "fpr": 0.011204481792717087,
- "pos_label": "0",
- "thresholds": 0.999998181248838,
- "tpr": 0.8254716981132075
- },
- {
- "fpr": 0.011204481792717087,
- "pos_label": "0",
- "thresholds": 0.9999659078870095,
- "tpr": 0.8490566037735849
- },
- {
- "fpr": 0.014005602240896359,
- "pos_label": "0",
- "thresholds": 0.9999421334800987,
- "tpr": 0.8490566037735849
- },
- {
- "fpr": 0.014005602240896359,
- "pos_label": "0",
- "thresholds": 0.999932137607778,
- "tpr": 0.8537735849056604
- },
- {
- "fpr": 0.01680672268907563,
- "pos_label": "0",
- "thresholds": 0.9997590011357096,
- "tpr": 0.8537735849056604
- },
- {
- "fpr": 0.01680672268907563,
- "pos_label": "0",
- "thresholds": 0.9989091880385522,
- "tpr": 0.8584905660377359
- },
- {
- "fpr": 0.0196078431372549,
- "pos_label": "0",
- "thresholds": 0.998350452447781,
- "tpr": 0.8584905660377359
- },
- {
- "fpr": 0.0196078431372549,
- "pos_label": "0",
- "thresholds": 0.9825639858770081,
- "tpr": 0.8726415094339622
- },
- {
- "fpr": 0.025210084033613446,
- "pos_label": "0",
- "thresholds": 0.8655270602759685,
- "tpr": 0.8726415094339622
- },
- {
- "fpr": 0.025210084033613446,
- "pos_label": "0",
- "thresholds": 0.7646074203900165,
- "tpr": 0.8867924528301887
- },
- {
- "fpr": 0.028011204481792718,
- "pos_label": "0",
- "thresholds": 0.6646294130401568,
- "tpr": 0.8867924528301887
- },
- {
- "fpr": 0.028011204481792718,
- "pos_label": "0",
- "thresholds": 0.40471949913401667,
- "tpr": 0.8962264150943396
- },
- {
- "fpr": 0.03361344537815126,
- "pos_label": "0",
- "thresholds": 0.36320656941537444,
- "tpr": 0.8962264150943396
- },
- {
- "fpr": 0.03361344537815126,
- "pos_label": "0",
- "thresholds": 0.33769169067775684,
- "tpr": 0.9009433962264151
- },
- {
- "fpr": 0.04201680672268908,
- "pos_label": "0",
- "thresholds": 0.16639301744913013,
- "tpr": 0.9009433962264151
- },
- {
- "fpr": 0.04201680672268908,
- "pos_label": "0",
- "thresholds": 0.06455975025113853,
- "tpr": 0.910377358490566
- },
- {
- "fpr": 0.04481792717086835,
- "pos_label": "0",
- "thresholds": 0.06146430828297247,
- "tpr": 0.910377358490566
- },
- {
- "fpr": 0.04481792717086835,
- "pos_label": "0",
- "thresholds": 0.0317948134992976,
- "tpr": 0.9198113207547169
- },
- {
- "fpr": 0.05042016806722689,
- "pos_label": "0",
- "thresholds": 0.02215919839773141,
- "tpr": 0.9198113207547169
- },
- {
- "fpr": 0.05042016806722689,
- "pos_label": "0",
- "thresholds": 0.01621676037851363,
- "tpr": 0.9292452830188679
- },
- {
- "fpr": 0.056022408963585436,
- "pos_label": "0",
- "thresholds": 0.009404106919762152,
- "tpr": 0.9292452830188679
- },
- {
- "fpr": 0.056022408963585436,
- "pos_label": "0",
- "thresholds": 0.004858845091869692,
- "tpr": 0.9433962264150944
- },
- {
- "fpr": 0.058823529411764705,
- "pos_label": "0",
- "thresholds": 0.004827637467222754,
- "tpr": 0.9433962264150944
- },
- {
- "fpr": 0.058823529411764705,
- "pos_label": "0",
- "thresholds": 0.002384143683618341,
- "tpr": 0.9575471698113207
- },
- {
- "fpr": 0.06162464985994398,
- "pos_label": "0",
- "thresholds": 0.0021912117921168437,
- "tpr": 0.9575471698113207
- },
- {
- "fpr": 0.06162464985994398,
- "pos_label": "0",
- "thresholds": 0.0013894481753717399,
- "tpr": 0.9622641509433962
- },
- {
- "fpr": 0.0784313725490196,
- "pos_label": "0",
- "thresholds": 0.00022626381148652976,
- "tpr": 0.9622641509433962
- },
- {
- "fpr": 0.0784313725490196,
- "pos_label": "0",
- "thresholds": 0.00020599522884723722,
- "tpr": 0.9669811320754716
- },
- {
- "fpr": 0.08403361344537816,
- "pos_label": "0",
- "thresholds": 8.377626248421777e-5,
- "tpr": 0.9669811320754716
- },
- {
- "fpr": 0.08403361344537816,
- "pos_label": "0",
- "thresholds": 7.371594859191336e-5,
- "tpr": 0.9716981132075472
- },
- {
- "fpr": 0.09243697478991597,
- "pos_label": "0",
- "thresholds": 4.947255524423883e-5,
- "tpr": 0.9716981132075472
- },
- {
- "fpr": 0.09243697478991597,
- "pos_label": "0",
- "thresholds": 3.916884846660099e-5,
- "tpr": 0.9764150943396226
- },
- {
- "fpr": 0.09803921568627451,
- "pos_label": "0",
- "thresholds": 2.3646246318136593e-5,
- "tpr": 0.9764150943396226
- },
- {
- "fpr": 0.09803921568627451,
- "pos_label": "0",
- "thresholds": 6.738863883302622e-6,
- "tpr": 0.9811320754716981
- },
- {
- "fpr": 0.13165266106442577,
- "pos_label": "0",
- "thresholds": 4.2559516048626543e-7,
- "tpr": 0.9811320754716981
- },
- {
- "fpr": 0.13165266106442577,
- "pos_label": "0",
- "thresholds": 3.7118692472513785e-7,
- "tpr": 0.9858490566037735
- },
- {
- "fpr": 0.1876750700280112,
- "pos_label": "0",
- "thresholds": 5.99215689686648e-9,
- "tpr": 0.9858490566037735
- },
- {
- "fpr": 0.1876750700280112,
- "pos_label": "0",
- "thresholds": 2.445513495072508e-9,
- "tpr": 0.9905660377358491
- },
- {
- "fpr": 0.226890756302521,
- "pos_label": "0",
- "thresholds": 4.153210597618201e-10,
- "tpr": 0.9905660377358491
- },
- {
- "fpr": 0.226890756302521,
- "pos_label": "0",
- "thresholds": 3.5558033534957316e-10,
- "tpr": 0.9952830188679245
- },
- {
- "fpr": 0.3277310924369748,
- "pos_label": "0",
- "thresholds": 1.0440210711403832e-11,
- "tpr": 0.9952830188679245
- },
- {
- "fpr": 0.3277310924369748,
- "pos_label": "0",
- "thresholds": 9.42984342776014e-12,
- "tpr": 1.0
- },
- {
- "fpr": 1.0,
- "pos_label": "0",
- "thresholds": 8.946841256656631e-23,
- "tpr": 1.0
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": null,
- "tpr": 0.0
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 1.0,
- "tpr": 0.22969187675070027
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999996,
- "tpr": 0.23249299719887956
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999991,
- "tpr": 0.24369747899159663
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999982,
- "tpr": 0.2773109243697479
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999969,
- "tpr": 0.28291316526610644
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999964,
- "tpr": 0.2969187675070028
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999958,
- "tpr": 0.29971988795518206
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999947,
- "tpr": 0.32212885154061627
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999942,
- "tpr": 0.32492997198879553
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999929,
- "tpr": 0.3389355742296919
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999911,
- "tpr": 0.3473389355742297
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999898,
- "tpr": 0.35014005602240894
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999893,
- "tpr": 0.3557422969187675
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999867,
- "tpr": 0.3585434173669468
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.999999999999984,
- "tpr": 0.36694677871148457
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999822,
- "tpr": 0.37254901960784315
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999805,
- "tpr": 0.38095238095238093
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.999999999999968,
- "tpr": 0.38935574229691877
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999574,
- "tpr": 0.4005602240896359
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999485,
- "tpr": 0.4117647058823529
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999396,
- "tpr": 0.42857142857142855
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999378,
- "tpr": 0.4369747899159664
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.999999999999936,
- "tpr": 0.43977591036414565
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999999325,
- "tpr": 0.44537815126050423
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.999999999999929,
- "tpr": 0.453781512605042
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999998632,
- "tpr": 0.484593837535014
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999998401,
- "tpr": 0.49019607843137253
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.999999999996632,
- "tpr": 0.6302521008403361
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999956941,
- "tpr": 0.6358543417366946
- },
- {
- "fpr": 0.0,
- "pos_label": "1",
- "thresholds": 0.9999999999907327,
- "tpr": 0.6722689075630253
- },
- {
- "fpr": 0.0047169811320754715,
- "pos_label": "1",
- "thresholds": 0.9999999999905711,
- "tpr": 0.6722689075630253
- },
- {
- "fpr": 0.0047169811320754715,
- "pos_label": "1",
- "thresholds": 0.9999999996871161,
- "tpr": 0.773109243697479
- },
- {
- "fpr": 0.009433962264150943,
- "pos_label": "1",
- "thresholds": 0.9999999996444195,
- "tpr": 0.773109243697479
- },
- {
- "fpr": 0.009433962264150943,
- "pos_label": "1",
- "thresholds": 0.9999999977235259,
- "tpr": 0.8123249299719888
- },
- {
- "fpr": 0.014150943396226415,
- "pos_label": "1",
- "thresholds": 0.9999999975544864,
- "tpr": 0.8123249299719888
- },
- {
- "fpr": 0.014150943396226415,
- "pos_label": "1",
- "thresholds": 0.999999648527957,
- "tpr": 0.8683473389355743
- },
- {
- "fpr": 0.018867924528301886,
- "pos_label": "1",
- "thresholds": 0.9999996288130754,
- "tpr": 0.8683473389355743
- },
- {
- "fpr": 0.018867924528301886,
- "pos_label": "1",
- "thresholds": 0.9999941164923394,
- "tpr": 0.9019607843137255
- },
- {
- "fpr": 0.02358490566037736,
- "pos_label": "1",
- "thresholds": 0.9999932611361166,
- "tpr": 0.9019607843137255
- },
- {
- "fpr": 0.02358490566037736,
- "pos_label": "1",
- "thresholds": 0.9999728354195855,
- "tpr": 0.907563025210084
- },
- {
- "fpr": 0.02830188679245283,
- "pos_label": "1",
- "thresholds": 0.9999608311515332,
- "tpr": 0.907563025210084
- },
- {
- "fpr": 0.02830188679245283,
- "pos_label": "1",
- "thresholds": 0.9999304769012171,
- "tpr": 0.9159663865546218
- },
- {
- "fpr": 0.0330188679245283,
- "pos_label": "1",
- "thresholds": 0.999926284051408,
- "tpr": 0.9159663865546218
- },
- {
- "fpr": 0.0330188679245283,
- "pos_label": "1",
- "thresholds": 0.9998384656217951,
- "tpr": 0.9215686274509803
- },
- {
- "fpr": 0.03773584905660377,
- "pos_label": "1",
- "thresholds": 0.9997940047711527,
- "tpr": 0.9215686274509803
- },
- {
- "fpr": 0.03773584905660377,
- "pos_label": "1",
- "thresholds": 0.9990898641799949,
- "tpr": 0.938375350140056
- },
- {
- "fpr": 0.04245283018867924,
- "pos_label": "1",
- "thresholds": 0.9986105518246283,
- "tpr": 0.938375350140056
- },
- {
- "fpr": 0.04245283018867924,
- "pos_label": "1",
- "thresholds": 0.9978087882078832,
- "tpr": 0.9411764705882353
- },
- {
- "fpr": 0.05660377358490566,
- "pos_label": "1",
- "thresholds": 0.9957821829468887,
- "tpr": 0.9411764705882353
- },
- {
- "fpr": 0.05660377358490566,
- "pos_label": "1",
- "thresholds": 0.9951723625327773,
- "tpr": 0.9439775910364145
- },
- {
- "fpr": 0.07075471698113207,
- "pos_label": "1",
- "thresholds": 0.9926664066593506,
- "tpr": 0.9439775910364145
- },
- {
- "fpr": 0.07075471698113207,
- "pos_label": "1",
- "thresholds": 0.9904986185121076,
- "tpr": 0.9495798319327731
- },
- {
- "fpr": 0.08018867924528301,
- "pos_label": "1",
- "thresholds": 0.9805684331700446,
- "tpr": 0.9495798319327731
- },
- {
- "fpr": 0.08018867924528301,
- "pos_label": "1",
- "thresholds": 0.9768401895265262,
- "tpr": 0.9551820728291317
- },
- {
- "fpr": 0.08962264150943396,
- "pos_label": "1",
- "thresholds": 0.9598034540637072,
- "tpr": 0.9551820728291317
- },
- {
- "fpr": 0.08962264150943396,
- "pos_label": "1",
- "thresholds": 0.9385356917170277,
- "tpr": 0.957983193277311
- },
- {
- "fpr": 0.09905660377358491,
- "pos_label": "1",
- "thresholds": 0.9204427411536626,
- "tpr": 0.957983193277311
- },
- {
- "fpr": 0.09905660377358491,
- "pos_label": "1",
- "thresholds": 0.7102408871754577,
- "tpr": 0.9663865546218487
- },
- {
- "fpr": 0.10377358490566038,
- "pos_label": "1",
- "thresholds": 0.6623083093222434,
- "tpr": 0.9663865546218487
- },
- {
- "fpr": 0.10377358490566038,
- "pos_label": "1",
- "thresholds": 0.6344419953464188,
- "tpr": 0.9719887955182073
- },
- {
- "fpr": 0.11320754716981132,
- "pos_label": "1",
- "thresholds": 0.4510063453176261,
- "tpr": 0.9719887955182073
- },
- {
- "fpr": 0.11320754716981132,
- "pos_label": "1",
- "thresholds": 0.3353705869598435,
- "tpr": 0.9747899159663865
- },
- {
- "fpr": 0.12735849056603774,
- "pos_label": "1",
- "thresholds": 0.15322515244760518,
- "tpr": 0.9747899159663865
- },
- {
- "fpr": 0.12735849056603774,
- "pos_label": "1",
- "thresholds": 0.11067042388303172,
- "tpr": 0.9803921568627451
- },
- {
- "fpr": 0.14150943396226415,
- "pos_label": "1",
- "thresholds": 0.005719319146429531,
- "tpr": 0.9803921568627451
- },
- {
- "fpr": 0.14150943396226415,
- "pos_label": "1",
- "thresholds": 0.0016495475522191826,
- "tpr": 0.9831932773109243
- },
- {
- "fpr": 0.14622641509433962,
- "pos_label": "1",
- "thresholds": 0.001090811961447725,
- "tpr": 0.9831932773109243
- },
- {
- "fpr": 0.14622641509433962,
- "pos_label": "1",
- "thresholds": 0.00024099886429094003,
- "tpr": 0.9859943977591037
- },
- {
- "fpr": 0.1509433962264151,
- "pos_label": "1",
- "thresholds": 6.786239222238548e-5,
- "tpr": 0.9859943977591037
- },
- {
- "fpr": 0.1509433962264151,
- "pos_label": "1",
- "thresholds": 5.7866519903227076e-5,
- "tpr": 0.988795518207283
- },
- {
- "fpr": 0.17452830188679244,
- "pos_label": "1",
- "thresholds": 3.4620679132379072e-6,
- "tpr": 0.988795518207283
- },
- {
- "fpr": 0.17452830188679244,
- "pos_label": "1",
- "thresholds": 1.8187511620340435e-6,
- "tpr": 0.9915966386554622
- },
- {
- "fpr": 0.19811320754716982,
- "pos_label": "1",
- "thresholds": 6.923524564368466e-8,
- "tpr": 0.9915966386554622
- },
- {
- "fpr": 0.19811320754716982,
- "pos_label": "1",
- "thresholds": 3.892077801785215e-8,
- "tpr": 0.9943977591036415
- },
- {
- "fpr": 0.22169811320754718,
- "pos_label": "1",
- "thresholds": 1.4309801311336624e-8,
- "tpr": 0.9943977591036415
- },
- {
- "fpr": 0.22169811320754718,
- "pos_label": "1",
- "thresholds": 2.5351181708759625e-9,
- "tpr": 0.9971988795518207
- },
- {
- "fpr": 0.2830188679245283,
- "pos_label": "1",
- "thresholds": 1.0489229467297905e-13,
- "tpr": 0.9971988795518207
- },
- {
- "fpr": 0.2830188679245283,
- "pos_label": "1",
- "thresholds": 7.021589067055656e-14,
- "tpr": 1.0
- },
- {
- "fpr": 0.9905660377358491,
- "pos_label": "1",
- "thresholds": 1.1040872800972363e-273,
- "tpr": 1.0
- },
- {
- "fpr": 1.0,
- "pos_label": "1",
- "thresholds": 0.0,
- "tpr": 1.0
- }
- ]
- },
- "encoding": {
- "color": {
- "field": "pos_label",
- "type": "nominal"
- },
- "tooltip": {
- "field": "thresholds",
- "type": "quantitative"
- },
- "x": {
- "field": "fpr",
- "title": "False positive rate",
- "type": "quantitative"
- },
- "y": {
- "field": "tpr",
- "title": "True positive rate",
- "type": "quantitative"
- }
- },
- "mark": {
- "type": "line"
- },
- "params": [
- {
- "bind": "scales",
- "name": "param_9",
- "select": {
- "encodings": ["x", "y"],
- "type": "interval"
- }
- }
- ],
- "title": "ROC curve",
- "width": "container"
- }
- },
- "confusion matrix": {
- "type": "vega",
- "data": {
- "$schema": "https://vega.github.io/schema/vega-lite/v5.17.0.json",
- "config": {
- "view": {
- "continuousHeight": 300,
- "continuousWidth": 300
- }
- },
- "data": {
- "name": "data-6e0a10ddbcb181bbeeb0f13c5b4ed553"
- },
- "datasets": {
- "data-6e0a10ddbcb181bbeeb0f13c5b4ed553": [
- {
- "v": 189,
- "x": 0,
- "y": 0
- },
- {
- "v": 23,
- "x": 0,
- "y": 1
- },
- {
- "v": 10,
- "x": 1,
- "y": 0
- },
- {
- "v": 347,
- "x": 1,
- "y": 1
- }
- ]
- },
- "encoding": {
- "color": {
- "field": "v",
- "type": "quantitative"
- },
- "x": {
- "axis": {
- "orient": "top"
- },
- "field": "x",
- "title": "Predicted class",
- "type": "ordinal"
- },
- "y": {
- "field": "y",
- "title": "True class",
- "type": "ordinal"
- }
- },
- "mark": {
- "type": "rect"
- },
- "params": [
- {
- "bind": "scales",
- "name": "param_7",
- "select": {
- "encodings": ["x", "y"],
- "type": "interval"
- }
- }
- ],
- "title": "Confusion matrix",
- "width": "container"
- }
- },
- "class frequency": {
- "type": "vega",
- "data": {
- "$schema": "https://vega.github.io/schema/vega-lite/v5.17.0.json",
- "config": {
- "view": {
- "continuousHeight": 300,
- "continuousWidth": 300
- }
- },
- "data": {
- "name": "data-c040321cb26cc724ae22f2f7f4e3b172"
- },
- "datasets": {
- "data-c040321cb26cc724ae22f2f7f4e3b172": [
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 1
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 0
- },
- {
- "y": 1
- }
- ]
- },
- "encoding": {
- "tooltip": [
- {
- "field": "y",
- "title": "Class",
- "type": "quantitative"
- },
- {
- "aggregate": "count",
- "title": "Frequency",
- "type": "quantitative"
- }
- ],
- "x": {
- "field": "y",
- "title": "Class",
- "type": "ordinal"
- },
- "y": {
- "aggregate": "count",
- "title": "Frequency",
- "type": "quantitative"
- }
- },
- "mark": {
- "size": 20,
- "type": "bar"
- },
- "params": [
- {
- "bind": "scales",
- "name": "param_8",
- "select": {
- "encodings": ["x", "y"],
- "type": "interval"
- }
- }
- ],
- "title": "Class frequency",
- "width": "container"
- }
- },
- "monitoring": {
- "type": "markdown",
- "data": "- The fitting run used **92.24347826086958%** of your CPU (min: 0.0%; max: 100.4%)\n- The fitting run used **0.7128300874129586%** of your RAM (min: 0.7058143615722656%; max: 0.7147789001464844%)"
- },
- "custom html": {
- "type": "html",
- "data": "
"
- },
- "dataset": {
- "type": "dataframe",
- "data": {
- "columns": ["id", "firstname", "lastname", "email", "gender", "ip"],
- "data": [
- [
- 1,
- "Porter",
- "Brannigan",
- "pbrannigan0@exblog.jp",
- "Male",
- "176.31.12.247"
- ],
- [2, "Vito", "Axup", "vaxup1@nih.gov", "Male", "26.11.46.135"],
- [
- 3,
- "Dougy",
- "Pendrick",
- "dpendrick2@ca.gov",
- "Male",
- "74.150.179.26"
- ],
- [
- 4,
- "Emmy",
- "Gabrieli",
- "egabrieli3@army.mil",
- "Female",
- "200.90.32.35"
- ],
- [
- 5,
- "Emlyn",
- "Padfield",
- "epadfield4@amazon.co.uk",
- "Female",
- "61.136.213.57"
- ],
- [
- 6,
- "Vladamir",
- "Croose",
- "vcroose5@comsenz.com",
- "Male",
- "118.210.90.30"
- ],
- [
- 7,
- "Milt",
- "Jump",
- "mjump6@arstechnica.com",
- "Male",
- "226.235.88.209"
- ],
- [
- 8,
- "Napoleon",
- "Wynter",
- "nwynter7@bigcartel.com",
- "Bigender",
- "169.52.152.217"
- ],
- [
- 9,
- "Jackelyn",
- "Mailey",
- "jmailey8@microsoft.com",
- "Female",
- "186.254.80.150"
- ],
- [
- 10,
- "Rosalyn",
- "Nelson",
- "rnelson9@dailymail.co.uk",
- "Female",
- "123.211.17.54"
- ],
- [
- 11,
- "Bradly",
- "Pittaway",
- "bpittawaya@sciencedaily.com",
- "Male",
- "221.240.51.105"
- ],
- [
- 12,
- "Corbin",
- "Surpliss",
- "csurplissb@eventbrite.com",
- "Male",
- "124.163.62.79"
- ],
- [
- 13,
- "Carlin",
- "Gehricke",
- "cgehrickec@microsoft.com",
- "Male",
- "67.200.168.184"
- ],
- [14, "El", "Juschka", "ejuschkad@nih.gov", "Male", "206.114.99.172"],
- [
- 15,
- "Kippy",
- "Hamel",
- "khamele@msn.com",
- "Female",
- "226.192.174.226"
- ],
- [
- 16,
- "Deirdre",
- "Ceci",
- "dcecif@amazon.de",
- "Female",
- "226.109.33.104"
- ],
- [
- 17,
- "Noam",
- "Licciardi",
- "nlicciardig@naver.com",
- "Male",
- "205.158.227.29"
- ],
- [
- 18,
- "Ruddy",
- "Gaber",
- "rgaberh@bloglines.com",
- "Male",
- "151.186.11.90"
- ],
- [
- 19,
- "Ezekiel",
- "Dawidowsky",
- "edawidowskyi@illinois.edu",
- "Male",
- "173.210.50.28"
- ],
- [
- 20,
- "Anthiathia",
- "Atterbury",
- "aatterburyj@4shared.com",
- "Female",
- "8.209.20.215"
- ],
- [
- 21,
- "Vivyan",
- "Drayton",
- "vdraytonk@guardian.co.uk",
- "Female",
- "54.249.7.58"
- ],
- [
- 22,
- "Sam",
- "Balkwill",
- "sbalkwilll@woothemes.com",
- "Non-binary",
- "78.242.137.90"
- ],
- [
- 23,
- "Brannon",
- "Kowalski",
- "bkowalskim@businessinsider.com",
- "Male",
- "253.162.106.67"
- ],
- [
- 24,
- "Roosevelt",
- "Bootman",
- "rbootmann@fda.gov",
- "Male",
- "41.21.8.149"
- ],
- [
- 25,
- "Alistair",
- "Sapson",
- "asapsono@china.com.cn",
- "Male",
- "125.254.70.31"
- ],
- [
- 26,
- "Fernande",
- "Mortar",
- "fmorterp@ocn.ne.jp",
- "Female",
- "89.112.153.152"
- ],
- [
- 27,
- "Brockie",
- "Lennon",
- "blennonq@businesswire.com",
- "Genderfluid",
- "149.242.194.85"
- ],
- [
- 28,
- "Goraud",
- "Ingraham",
- "gingrahamr@eepurl.com",
- "Male",
- "103.232.225.62"
- ],
- [
- 29,
- "Karlyn",
- "Twinterman",
- "ktwintermans@oakley.com",
- "Female",
- "95.152.81.226"
- ],
- [
- 30,
- "Aldridge",
- "De Freyne",
- "adefreynet@reverbnation.com",
- "Genderfluid",
- "48.222.64.234"
- ],
- [
- 31,
- "Luise",
- "Bagehot",
- "lbagehotu@about.me",
- "Female",
- "11.252.152.94"
- ],
- [
- 32,
- "Car",
- "Robrow",
- "crobrowv@examiner.com",
- "Male",
- "74.169.129.98"
- ],
- [
- 33,
- "Maegan",
- "MacMychem",
- "mmacmychemw@altervista.org",
- "Female",
- "2.1.155.157"
- ],
- [
- 34,
- "Burgess",
- "Fothergill",
- "bfothergillx@netscape.com",
- "Male",
- "130.100.57.252"
- ],
- [35, "Kare", "Lias", "kliasy@unblog.fr", "Female", "201.48.25.95"],
- [
- 36,
- "Reed",
- "Follett",
- "rfollettz@tmall.com",
- "Male",
- "163.186.248.71"
- ],
- [
- 37,
- "Stearne",
- "Feehely",
- "sfeehely10@about.me",
- "Male",
- "98.245.179.115"
- ],
- [
- 38,
- "Regan",
- "Handke",
- "rhandke11@scientificamerican.com",
- "Male",
- "62.183.66.194"
- ],
- [
- 39,
- "Ezekiel",
- "Matuszyk",
- "ematuszyk12@nih.gov",
- "Male",
- "239.64.165.64"
- ],
- [
- 40,
- "Bevan",
- "Beadles",
- "bbeadles13@issuu.com",
- "Male",
- "87.119.72.177"
- ],
- [
- 41,
- "Pen",
- "Grainger",
- "pgrainger14@berkeley.edu",
- "Female",
- "227.191.151.117"
- ],
- [
- 42,
- "Erika",
- "Sallenger",
- "esallenger15@bbb.org",
- "Female",
- "76.147.137.94"
- ],
- [
- 43,
- "Marylinda",
- "Fittall",
- "mfittall16@reverbnation.com",
- "Female",
- "234.149.229.180"
- ],
- [
- 44,
- "Gerald",
- "Tatchell",
- "gtatchell17@msu.edu",
- "Male",
- "119.5.236.165"
- ],
- [
- 45,
- "Burr",
- "McGrann",
- "bmcgrann18@topsy.com",
- "Male",
- "157.138.92.223"
- ],
- [
- 46,
- "Bartram",
- "Eastlake",
- "beastlake19@earthlink.net",
- "Male",
- "144.74.73.253"
- ],
- [
- 47,
- "Colette",
- "Backup",
- "cbacup1a@cbc.ca",
- "Female",
- "174.156.58.36"
- ],
- [
- 48,
- "Aubrey",
- "Milburn",
- "amilburn1b@tiny.cc",
- "Male",
- "76.140.198.152"
- ],
- [
- 49,
- "Hettie",
- "Juckes",
- "hjuckes1c@pen.io",
- "Female",
- "58.230.142.235"
- ],
- [
- 50,
- "Mal",
- "Whelan",
- "mwhelan1d@amazonaws.com",
- "Male",
- "10.53.187.113"
- ]
- ]
- }
- },
- "Photo by Oshadha Viduranga": {
- "type": "image",
- "data": {
- "mime-type": "image/webp",
- "data": "UklGRv4IAABXRUJQVlA4WAoAAAAQAAAAYwAAhAAAQUxQSBkAAAABD3Dg/4iIDETaNnMw/2rH7uAkov/JZTUFAFZQOCC+CAAAUCgAnQEqZACFAD6RQJtKpaOiJykw6njgEglAGAQI+vKWZfm/kF23A3O5adDvS0/c6gRlW7/an+6ZnQM1lGLGp5jv/fwJNn+NKfh8vSWS1lEDoYE7N80evzD/b9ZqWNIfmMIz4ROz3PpCEGZbnYoffbUIQrukKg3uIaDsCPFOZ37fYH3WvnPvWCrMCS1zUuJ+uUNaui3vJ1yOpUSKFBp+grUEF9DuqBEDOU6Zd/q7zTD3+LRFY5Dt1Ftl7k0quR+hkKX3VeIEKTXoqdYhCBzd4DcRpsX8NKCNpaGpI6IsywNuqcrHTALYzwEbKJsNhb5LB1hQ9Bqg5WsHr/XNwpCYxM0ANMU0M07nyhyGdK2TUSg4hzQV5lM96G7yWh98QIpXU7HjGXArt3uBADdKPBsGD+u1ylvRC59qh8u+rfriZWXUSGY2m4vJrREmgAD+/lMiPCvfHG35p37ejNrTns55Aedd7T5cWbbQPEc952mYJVuTLLF0nqjbWFqxXmGmNK4KtTyAtV6vnCQG+vpzLzWn/pR+N9UemR6tO2Dg8SJmO+0NLzbmypyoPjmk1KUImv4f/9jSYLNgJqo41gYh29pWPnIUaEmUNdPhbJHFUxEOSuVtEI+tNxADt3zr0FR36o4Ma9ADFe2v1JKuQ0a93rX1rNvinA5ODpf3VOVyWkibM4ruBTdoJ4/3aLE8cXTMpMgo0WvrLz1W8Qs0mrsEUkH5n3OBxSOfZ3oMIA64oNB0nAV8sr5iIy4uk1QaAFkgS6PCk2kNpbD6v0yI8A5e/hBmxFtRllE7bouG1s5aZ8M82jZM7/W9GOdj873Dz3gH6WNCPUiAiF1+OskQiuTmQkxjCXw5B5LDFqbv1pnnm7f0cYWMwyKHtFF6hwwZ2trP1c+TJm+W7LN6S7Bl/isiNg52I++Hc3OKCmXE6W4Il5dSAgYy7w+bxbQqgc/kCE0Mgx6K72qoXj9dYJARSBC7oCWgUA4jIhxFA4DUcow96/vVbS6gjeDtnlcKsfsUslIhaUC4TnX7rOfVl+fbWqWGNH/J3EX4mxKjP8QMyWkNqn/kcB9YplCcj9cMrDT+31X9W33DBBY4h9QoUwCtw4gFkvMA0Jp9cjnI0GKq6lAt+QEjthlIN439x4cnJe4/cXsmSEXsaCmBFl2vc6dXPeTAxb7H+XLzoX43wUvWKusaZErYKrLmhzOTDbJQWQSVMEv2RtIkwy/x13pLuhpBhyMsYCdnQhwdc2OBNABJbyayKZlOUSEqE8uOVZNx7R0QgOZsQRdwP3214OydpaBEFzYR73bCdePnKFC4fL/5OtUbQRA9LjONm9ZWwZb3ICoWjLuEaxsI33TMgoZapd5GSLbQ1YN27pAZyZDCqv2neVSb/lcZqJcf/8D5f6PRj45J5ats/2WrciM3R4FC2kjzdCKyT1k4IrRs64XHwO5WtKoYv3auihjcs54/co2n2TVyzYytBQEteFGzL4bnA9KMnXgK4Y828DT8kAbq++JvEAFK8DvrFibQTBsh6MUMMx0MwLqyOU5Fbn2unafzG2qOTGAIFWzlnr4g638/ts026ILfGHgDXwWqbYKSPNkA83qWDLRY0eUQTWi3Y9RNfmXYxJmO7I1ad8B4l8L5PuubrdA/xImd6xjlYe9MwdHboDM0jwsBf58odXLoba/OieC+QvNoLSR53rsQLa/4y1eJDbfM+C+G+mmwRtbn+GRblsY1xo47Za4LIFyfXcoTlOcHpVBAs5hBUGZFAERQokQIBn470SEi4yzx1rQIKD8ggOPZ3KVBq50bmjfM6+L3+dUK/zJYtSP5LGz1qVEWHCC4chuWYgfvs3SnArEmfHPQg3+seOgSqZb/2+p8Q6n1WrSmFBstUMkKUH/qKIgb7aRdN8MTkER3gC8L/KEsAt8ktL3q+sMwt8fR2zOyZTfjKdgw1z58FYbljmpblHzlvpqEDwfV3KSps75I0GQtsl7yswa1GrtnasC/ngFVfA54x8rnb6T4zhhPcRx0d/5+hY1cNVc01V/C/Al9not7cjLygynKO2Tb4I4YyiIfeCDrvQqpLq6R6dRACq/VtTK2Utk0ugrEaHhwqCdZ+N/fgtQ7zgrw2zyVcYLZMg+sA0eiwQ0+Z5GR6A846PNgA+mt12YRAfVUpxw/lGD1R5iWxTo61mAoYuBVjm9Lj+dRuzjq7pdyJRMYihhNO+TcahkJdsXZt/9f2lo+Bvx6Ecl/GeKMI+PyI2SnGT5Se3LEfrg0rcqtTEL4Gidy8sd4OzgSGvP+CKIvXVXMSDBKG4l7G3pMdHDTDkrorq/C3HcVCI1T1Kwwwds4GNyEW7Nwz3wFTwLHt58DhsY7e3/jHoJ7a0f1/U/4z+0n7HmyKi5FqXiRp1stxVZcH2eOEtgnAl10i7SSyqGRod70D9g/QBpM6jf9FrTnOMhAfEWanp9iSZNJCKUXVAf4X/i8sneLfhIkO1ZbMytUW1/vt9yXQpLZPP2/O7KMBITgyVwMdB6PkvIJ/T2j+85Roi8+nDm+I3bgUX+4k9Rynmklw21rEj1qq/lKXPI6pZo9vmDyu3Tk1FDh2XqbjNCgtRABiQmxtsNdA3Db5ETMMYlG02FKTDugTC6q4qjp9Hk2T4C7/fAsUMjlay6pM+XKl/3PYQ9nhpratJsKro3JDFdG3dibY6NUsDEPurHYX7+PQ1BLP/c8LxyvNoxP7azlNdbG8wauW152ouI4uAKFY1veilvARRm16wfP6Bew0PyONKm/HnqfZbn+MoECXOqL/Gl/+PqBEthnMVZUBAxHCa0rbw/nhMNL0+qswbW4qju1O+LZDMpRQDI9kIxsD2YK8lz3zm7cWyA/QpMvpK19IEWG5uwdRGK6Y7Ds5RRfJKPrxtVsKH2v+XWwCrck9NIgMT8bEL+tVUuaDq7qrG+M5J1FN4bhBT6ShEn6ixLyA7JWmYJjHL3P1+cW6wsJMjpXaVBy+8ajq1WAmtlvcSEcAAAA"
- }
- },
- "cv_results": {
- "type": "cv_results",
- "data": {
- "id": "id-873d8d",
- "start_datetime": "2024-07-24T11:31:00Z",
- "roc_curve_spec": {
- "$schema": "https://vega.github.io/schema/vega-lite/v5.json",
- "description": "A simple bar chart with embedded data.",
- "data": {
- "values": [
- {
- "a": "A",
- "b": 28
- },
- {
- "a": "B",
- "b": 55
- },
- {
- "a": "C",
- "b": 43
- },
- {
- "a": "D",
- "b": 91
- },
- {
- "a": "E",
- "b": 81
- },
- {
- "a": "F",
- "b": 53
- },
- {
- "a": "G",
- "b": 19
- },
- {
- "a": "H",
- "b": 87
- },
- {
- "a": "I",
- "b": 52
- }
- ]
- },
- "mark": "bar",
- "encoding": {
- "x": {
- "field": "a",
- "type": "nominal",
- "axis": {
- "labelAngle": 0
- }
- },
- "y": {
- "field": "b",
- "type": "quantitative"
- }
- }
- },
- "cv_results_table": {
- "columns": [
- "a",
- "b"
- ],
- "data": []
- }
- }
- }
- }
-}
diff --git a/src/skore/api/routes/fake_stores.py b/src/skore/api/routes/fake_stores.py
deleted file mode 100644
index df6a1e4bb..000000000
--- a/src/skore/api/routes/fake_stores.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""The definition of API routes to get fake store."""
-
-from pathlib import Path
-
-from fastapi import APIRouter
-from fastapi.responses import FileResponse
-
-FAKE_SKORES_ROUTER = APIRouter(prefix="/fake-skores", deprecated=True)
-FAKE_STORES_ROUTER = APIRouter(prefix="/fake-stores")
-
-
-@FAKE_SKORES_ROUTER.get("/{uri:path}", response_class=FileResponse)
-@FAKE_STORES_ROUTER.get("/{uri:path}", response_class=FileResponse)
-async def get_fake_store_by_uri(uri: str):
- """Route used to get the fake store, regardless of its URI."""
- return Path(__file__).parent / "fake_store.json"
diff --git a/src/skore/api/routes/stores.py b/src/skore/api/routes/stores.py
deleted file mode 100644
index 1f41a431a..000000000
--- a/src/skore/api/routes/stores.py
+++ /dev/null
@@ -1,130 +0,0 @@
-"""The definition of API routes to list stores and get them."""
-
-import os
-from pathlib import Path
-from typing import Any, Iterable
-
-import fastapi
-from fastapi import APIRouter, HTTPException, status
-from fastapi.encoders import jsonable_encoder
-from fastapi.templating import Jinja2Templates
-
-from skore import registry
-from skore.api import schema
-from skore.storage import URI, FileSystem
-from skore.store.layout import Layout
-from skore.store.store import Store, _get_storage_path
-
-SKORES_ROUTER = APIRouter(prefix="/skores", deprecated=True)
-STORES_ROUTER = APIRouter(prefix="/stores")
-
-# TODO Move this to a more appropriate place
-STATIC_FILES_PATH = (
- Path(__file__).resolve().parent.parent.parent / "dashboard" / "static"
-)
-
-
-def serialize_store(store: Store):
- """Serialize a Store."""
- # mypy does not understand union in generator
- user_items: Iterable[tuple[str, Any, dict]] = filter(
- lambda i: i[0] != Store.LAYOUT_KEY,
- store.items(metadata=True), # type: ignore
- )
-
- payload: dict = {}
- for key, value, metadata in user_items:
- payload[key] = {
- "type": str(metadata["display_type"]),
- "data": value,
- "metadata": metadata,
- }
-
- layout = store.get_layout()
-
- model = schema.Store(
- schema="schema:dashboard:v0",
- uri=str(store.uri),
- payload=payload,
- layout=layout,
- )
-
- return model.model_dump(by_alias=True)
-
-
-@SKORES_ROUTER.get("/share/{uri:path}")
-@STORES_ROUTER.get("/share/{uri:path}")
-async def share_store(request: fastapi.Request, uri: str):
- """Serve an inlined shareable HTML page."""
-
- # Get static assets to inject them into the report template
- def read_asset_content(path):
- with open(STATIC_FILES_PATH / path) as f:
- return f.read()
-
- script_content = read_asset_content("skore.umd.cjs")
- styles_content = read_asset_content("style.css")
-
- # Get Skore and serialize it
- directory = _get_storage_path(os.environ.get("SKORE_ROOT"))
- storage = FileSystem(directory=directory)
- store = registry.find_store_by_uri(URI(uri), storage)
- if store is None:
- raise HTTPException(status_code=404, detail=f"No store found in '{uri}'")
-
- store_data = jsonable_encoder(serialize_store(store))
-
- # Fill the Jinja context
- context = {
- "uri": store.uri,
- "store_data": store_data,
- "script": script_content,
- "styles": styles_content,
- }
-
- # Render the template and send the result
- templates = Jinja2Templates(directory=Path(__file__).resolve().parent / "templates")
- return templates.TemplateResponse(
- request=request, name="share.html.jinja", context=context
- )
-
-
-@SKORES_ROUTER.get("")
-@SKORES_ROUTER.get("/")
-@STORES_ROUTER.get("")
-@STORES_ROUTER.get("/")
-async def list_stores() -> list[str]:
- """Route used to list the URI of stores."""
- directory = _get_storage_path(os.environ.get("SKORE_ROOT"))
- storage = FileSystem(directory=directory)
-
- return sorted(str(store.uri) for store in registry.stores(storage))
-
-
-@SKORES_ROUTER.get("/{uri:path}")
-@STORES_ROUTER.get("/{uri:path}")
-async def get_store_by_uri(uri: str):
- """Route used to get a store by its URI."""
- directory = _get_storage_path(os.environ.get("SKORE_ROOT"))
- storage = FileSystem(directory=directory)
-
- store = registry.find_store_by_uri(URI(uri), storage)
- if store is not None:
- return serialize_store(store)
-
- raise HTTPException(status_code=404, detail=f"No store found in '{uri}'")
-
-
-@SKORES_ROUTER.put("/{uri:path}/layout", status_code=status.HTTP_201_CREATED)
-@STORES_ROUTER.put("/{uri:path}/layout", status_code=status.HTTP_201_CREATED)
-async def put_layout(uri: str, payload: Layout):
- """Save the report layout configuration."""
- directory = _get_storage_path(os.environ.get("SKORE_ROOT"))
- storage = FileSystem(directory=directory)
-
- store = registry.find_store_by_uri(URI(uri), storage)
- if store is not None:
- store.set_layout(payload)
- return serialize_store(store)
-
- raise HTTPException(status_code=404, detail=f"No store found in '{uri}'")
diff --git a/src/skore/api/schema/__init__.py b/src/skore/api/schema/__init__.py
deleted file mode 100644
index d147d9c8e..000000000
--- a/src/skore/api/schema/__init__.py
+++ /dev/null
@@ -1,86 +0,0 @@
-"""Schema to define endpoint requirements in the API."""
-
-import typing
-
-import pydantic
-import typing_extensions
-
-from skore.api.schema.any import Any
-from skore.api.schema.array import Array
-from skore.api.schema.boolean import Boolean
-from skore.api.schema.dataframe import DataFrame
-from skore.api.schema.date import Date
-from skore.api.schema.datetime import Datetime
-from skore.api.schema.file import File
-from skore.api.schema.html import HTML
-from skore.api.schema.integer import Integer
-from skore.api.schema.markdown import Markdown
-from skore.api.schema.matplotlib_figure import MatplotlibFigure
-from skore.api.schema.number import Number
-from skore.api.schema.numpy_array import NumpyArray
-from skore.api.schema.sklearn_model import SKLearnModel
-from skore.api.schema.string import String
-from skore.api.schema.vega import Vega
-from skore.store.layout import Layout
-
-__all__ = [
- "Any",
- "Array",
- "Boolean",
- "DataFrame",
- "Date",
- "Datetime",
- "File",
- "HTML",
- "Integer",
- "Markdown",
- "MatplotlibFigure",
- "Number",
- "NumpyArray",
- "Store",
- "String",
- "SKLearnModel",
- "Vega",
-]
-
-
-class Store(pydantic.BaseModel):
- """Highest schema to transfer key-value pairs from store to dashboard.
-
- Examples
- --------
- >>> Store(uri="/root", payload={"key": {"type": "integer", "data": 0}})
- Store(...)
- """
-
- __NAME__ = "schema:dashboard:v0"
-
- model_config = pydantic.ConfigDict(strict=True)
-
- version: typing.Literal[__NAME__] = pydantic.Field(__NAME__, alias="schema")
- uri: str
- payload: dict[
- str,
- typing_extensions.Annotated[
- typing.Union[
- Any,
- Array,
- Boolean,
- DataFrame,
- Date,
- Datetime,
- File,
- HTML,
- Integer,
- Markdown,
- MatplotlibFigure,
- Number,
- NumpyArray,
- String,
- SKLearnModel,
- Vega,
- ],
- pydantic.Field(discriminator="type"),
- ],
- ]
- layout: Layout = pydantic.Field(default=[])
diff --git a/src/skore/api/schema/any.py b/src/skore/api/schema/any.py
deleted file mode 100644
index 9ecc292a2..000000000
--- a/src/skore/api/schema/any.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""Schema to transfer any JSON serializable value from store to dashboard."""
-
-import typing
-
-import pydantic
-
-
-class Any(pydantic.BaseModel):
- """Schema to transfer any JSON serializable value from store to dashboard.
-
- Examples
- --------
- >>> Any(data=None)
- Any(...)
-
- >>> Any(type="any", data=None)
- Any(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["any"] = "any"
- data: typing.Any
- metadata: typing.Optional[typing.Any] = None
diff --git a/src/skore/api/schema/array.py b/src/skore/api/schema/array.py
deleted file mode 100644
index 2f91c48a5..000000000
--- a/src/skore/api/schema/array.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""Schema to transfer listable value from store to dashboard."""
-
-import typing
-
-import pydantic
-
-
-class Array(pydantic.BaseModel):
- """Schema to transfer listable value from store to dashboard.
-
- Examples
- --------
- >>> Array(data=[1, 2, 3])
- Array(...)
-
- >>> Array(data=(1, 2, 3))
- Array(...)
-
- >>> Array(type="array", data=(1, 2, 3))
- Array(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["array"] = "array"
- data: typing.Iterable
- metadata: typing.Optional[typing.Any] = None
-
- @pydantic.field_serializer("data")
- def serialize_data(self, data: typing.Iterable) -> list:
- """Serialize data from iterable to list."""
- return list(data)
diff --git a/src/skore/api/schema/boolean.py b/src/skore/api/schema/boolean.py
deleted file mode 100644
index 55269c8c8..000000000
--- a/src/skore/api/schema/boolean.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""Schema to transfer boolean value from store to dashboard."""
-
-import typing
-
-import pydantic
-
-
-class Boolean(pydantic.BaseModel):
- """Schema to transfer boolean value from store to dashboard.
-
- Examples
- --------
- >>> Boolean(data=True)
- Boolean(...)
-
- >>> Boolean(type="boolean", data=True)
- Boolean(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["boolean"] = "boolean"
- data: bool
- metadata: typing.Optional[typing.Any] = None
diff --git a/src/skore/api/schema/dataframe.py b/src/skore/api/schema/dataframe.py
deleted file mode 100644
index 4b614e421..000000000
--- a/src/skore/api/schema/dataframe.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""Schema to transfer `pandas.DataFrame` value from store to dashboard."""
-
-import typing
-
-import pandas
-import pydantic
-
-
-class DataFrame(pydantic.BaseModel):
- """Schema to transfer `pandas.DataFrame` value from store to dashboard.
-
- Examples
- --------
- >>> DataFrame(data=pandas.DataFrame())
- DataFrame(...)
-
- >>> DataFrame(type="dataframe", data=pandas.DataFrame())
- DataFrame(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True, arbitrary_types_allowed=True)
-
- type: typing.Literal["dataframe"] = "dataframe"
- data: pandas.DataFrame
- metadata: typing.Optional[typing.Any] = None
-
- @pydantic.field_serializer("data")
- def serialize_data(self, data: pandas.DataFrame) -> dict:
- """Serialize data from `pandas.DataFrame` to dict."""
- return data.to_dict(orient="split")
diff --git a/src/skore/api/schema/date.py b/src/skore/api/schema/date.py
deleted file mode 100644
index 3dc885dea..000000000
--- a/src/skore/api/schema/date.py
+++ /dev/null
@@ -1,25 +0,0 @@
-"""Schema to transfer date value from store to dashboard."""
-
-import datetime
-import typing
-
-import pydantic
-
-
-class Date(pydantic.BaseModel):
- """Schema to transfer date value from store to dashboard.
-
- Examples
- --------
- >>> Date(data=datetime.date(2024, 1, 1))
- Date(...)
-
- >>> Date(type="date", data=datetime.date(2024, 1, 1))
- Date(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["date"] = "date"
- data: datetime.date
- metadata: typing.Optional[typing.Any] = None
diff --git a/src/skore/api/schema/datetime.py b/src/skore/api/schema/datetime.py
deleted file mode 100644
index bc7f3b3a4..000000000
--- a/src/skore/api/schema/datetime.py
+++ /dev/null
@@ -1,25 +0,0 @@
-"""Schema to transfer datetime value from store to dashboard."""
-
-import datetime
-import typing
-
-import pydantic
-
-
-class Datetime(pydantic.BaseModel):
- """Schema to transfer datetime value from store to dashboard.
-
- Examples
- --------
- >>> Datetime(data=datetime.datetime(2024, 1, 1, 0, 0, 0))
- Datetime(...)
-
- >>> Datetime(type="datetime", data=datetime.datetime(2024, 1, 1, 0, 0, 0))
- Datetime(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["datetime"] = "datetime"
- data: datetime.datetime
- metadata: typing.Optional[typing.Any] = None
diff --git a/src/skore/api/schema/file.py b/src/skore/api/schema/file.py
deleted file mode 100644
index dc97c318f..000000000
--- a/src/skore/api/schema/file.py
+++ /dev/null
@@ -1,25 +0,0 @@
-"""Schema to transfer filepath value from store to dashboard."""
-
-import pathlib
-import typing
-
-import pydantic
-
-
-class File(pydantic.BaseModel):
- """Schema to transfer filepath value from store to dashboard.
-
- Examples
- --------
- >>> File(data=pathlib.Path("/tmp/myfile.txt"))
- File(...)
-
- >>> File(type="file", data=pathlib.Path("/tmp/myfile.txt"))
- File(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["file"] = "file"
- data: pathlib.Path
- metadata: typing.Optional[typing.Any] = None
diff --git a/src/skore/api/schema/html.py b/src/skore/api/schema/html.py
deleted file mode 100644
index 751bfee84..000000000
--- a/src/skore/api/schema/html.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""Schema to transfer HTML-like string value from store to dashboard."""
-
-import typing
-
-import pydantic
-
-
-class HTML(pydantic.BaseModel):
- """Schema to transfer HTML-like string value from store to dashboard.
-
- Examples
- --------
- >>> HTML(data="
")
- HTML(...)
-
- >>> HTML(type="html", data="
")
- HTML(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["html"] = "html"
- data: str
- metadata: typing.Optional[typing.Any] = None
diff --git a/src/skore/api/schema/integer.py b/src/skore/api/schema/integer.py
deleted file mode 100644
index ec2074be6..000000000
--- a/src/skore/api/schema/integer.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""Schema to transfer integer value from store to dashboard."""
-
-import typing
-
-import pydantic
-
-
-class Integer(pydantic.BaseModel):
- """Schema to transfer integer value from store to dashboard.
-
- Examples
- --------
- >>> Integer(data=1)
- Integer(...)
-
- >>> Integer(type="integer", data=1)
- Integer(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["integer"] = "integer"
- data: int
- metadata: typing.Optional[typing.Any] = None
diff --git a/src/skore/api/schema/markdown.py b/src/skore/api/schema/markdown.py
deleted file mode 100644
index 4312ca174..000000000
--- a/src/skore/api/schema/markdown.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""Schema to transfer Markdown-like string value from store to dashboard."""
-
-import typing
-
-import pydantic
-
-
-class Markdown(pydantic.BaseModel):
- """Schema to transfer Markdown-like string value from store to dashboard.
-
- Examples
- --------
- >>> Markdown(data="# title")
- Markdown(...)
-
- >>> Markdown(type="markdown", data="# title")
- Markdown(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["markdown"] = "markdown"
- data: str
- metadata: typing.Optional[typing.Any] = None
diff --git a/src/skore/api/schema/matplotlib_figure.py b/src/skore/api/schema/matplotlib_figure.py
deleted file mode 100644
index ca2aa6ef6..000000000
--- a/src/skore/api/schema/matplotlib_figure.py
+++ /dev/null
@@ -1,38 +0,0 @@
-"""Schema to transfer any JSON serializable value from store to dashboard."""
-
-import base64
-import typing
-from io import StringIO
-
-import matplotlib
-import pydantic
-
-
-class MatplotlibFigure(pydantic.BaseModel):
- """Schema to transfer a `matplotlib.figure.Figure` from store to dashboard.
-
- Examples
- --------
- >>> import matplotlib.pyplot as plt
- >>> fig, ax = plt.subplots()
-
- >>> MatplotlibFigure(data=fig)
- MatplotlibFigure(...)
-
- >>> MatplotlibFigure(type="matplotlib_figure", data=fig)
- MatplotlibFigure(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True, arbitrary_types_allowed=True)
-
- type: typing.Literal["matplotlib_figure"] = "matplotlib_figure"
- data: matplotlib.figure.Figure
-
- @pydantic.field_serializer("data")
- def serialize_data(self, data: matplotlib.figure.Figure):
- """Serialize data from matplotlib Figure to SVG image."""
- output = StringIO()
- data.savefig(output, format="svg")
- image_string = output.getvalue()
- image_bytes = image_string.encode("utf-8")
- return base64.b64encode(image_bytes)
diff --git a/src/skore/api/schema/number.py b/src/skore/api/schema/number.py
deleted file mode 100644
index 9f1d057cb..000000000
--- a/src/skore/api/schema/number.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""Schema to transfer number value from store to dashboard."""
-
-import typing
-
-import pydantic
-
-
-class Number(pydantic.BaseModel):
- """Schema to transfer number value from store to dashboard.
-
- Examples
- --------
- >>> Number(data=1.1)
- Number(...)
-
- >>> Number(type="number", data=1.1)
- Number(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["number"] = "number"
- data: float
- metadata: typing.Optional[typing.Any] = None
diff --git a/src/skore/api/schema/numpy_array.py b/src/skore/api/schema/numpy_array.py
deleted file mode 100644
index 9a08a81af..000000000
--- a/src/skore/api/schema/numpy_array.py
+++ /dev/null
@@ -1,34 +0,0 @@
-"""Schema to transfer listable value from store to dashboard."""
-
-import typing
-
-import numpy as np
-import pydantic
-import pydantic_numpy.typing as pnd
-
-
-class NumpyArray(pydantic.BaseModel):
- """Schema to transfer NumPy array value from store to dashboard.
-
- Examples
- --------
- >>> NumpyArray(data=np.random.randint(0, 100, size=50))
- NumpyArray(...)
-
- >>> NumpyArray(data=np.random.randint(0, 100, size=50))
- NumpyArray(...)
-
- >>> NumpyArray(type="numpy_array", data=np.random.randint(0, 100, size=50))
- NumpyArray(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["numpy_array"] = "numpy_array"
- data: pnd.NpNDArray
- metadata: typing.Optional[typing.Any] = None
-
- @pydantic.field_serializer("data")
- def serialize_data(self, data: np.ndarray) -> list:
- """Serialize data from ndarray to list."""
- return data.tolist()
diff --git a/src/skore/api/schema/sklearn_model.py b/src/skore/api/schema/sklearn_model.py
deleted file mode 100644
index aab3ca903..000000000
--- a/src/skore/api/schema/sklearn_model.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""Schema to transfer any JSON serializable value from store to dashboard."""
-
-import typing
-
-import pydantic
-from sklearn.base import BaseEstimator, estimator_html_repr
-
-
-class SKLearnModel(pydantic.BaseModel):
- """Schema to transfer a `sklearn.BaseEstimator` from store to dashboard.
-
- Examples
- --------
- >>> import numpy as np
- >>> from sklearn.linear_model import LinearRegression
- >>> X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
- >>> y = np.dot(X, np.array([1, 2])) + 3
- >>> model = LinearRegression().fit(X, y)
- >>> SKLearnModel(data=model)
- SKLearnModel(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True, arbitrary_types_allowed=True)
-
- type: typing.Literal["sklearn_model"] = "sklearn_model"
- data: BaseEstimator
- metadata: typing.Optional[typing.Any] = None
-
- @pydantic.field_serializer("data")
- def serialize_data(self, data: BaseEstimator):
- """Serialize a sklearn model to it's HTML representation."""
- return estimator_html_repr(data)
diff --git a/src/skore/api/schema/string.py b/src/skore/api/schema/string.py
deleted file mode 100644
index 805cc7d2a..000000000
--- a/src/skore/api/schema/string.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""Schema to transfer string value from store to dashboard."""
-
-import typing
-
-import pydantic
-
-
-class String(pydantic.BaseModel):
- """Schema to transfer string value from store to dashboard.
-
- Examples
- --------
- >>> String(data="value")
- String(...)
-
- >>> String(type="string", data="value")
- String(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True)
-
- type: typing.Literal["string"] = "string"
- data: str
- metadata: typing.Optional[typing.Any] = None
diff --git a/src/skore/api/schema/vega.py b/src/skore/api/schema/vega.py
deleted file mode 100644
index 1ac430e7f..000000000
--- a/src/skore/api/schema/vega.py
+++ /dev/null
@@ -1,30 +0,0 @@
-"""Schema to transfer (altair) VEGA-chart value from store to dashboard."""
-
-import typing
-
-import altair.vegalite.v5.schema.core
-import pydantic
-
-
-class Vega(pydantic.BaseModel):
- """Schema to transfer VEGA-chart value from store to dashboard.
-
- Examples
- --------
- >>> Vega(data=altair.Chart())
- Vega(...)
-
- >>> Vega(type="vega", data=altair.Chart())
- Vega(...)
- """
-
- model_config = pydantic.ConfigDict(strict=True, arbitrary_types_allowed=True)
-
- type: typing.Literal["vega"] = "vega"
- data: altair.vegalite.v5.schema.core.TopLevelSpec
- metadata: typing.Optional[typing.Any] = None
-
- @pydantic.field_serializer("data")
- def serialize_data(self, data: altair.vegalite.v5.api.Chart) -> dict:
- """Serialize data from `altair.vegalite.v5.api.Chart` to dict."""
- return data.to_dict()
diff --git a/src/skore/cli/__init__.py b/src/skore/cli/__init__.py
new file mode 100644
index 000000000..a3a5aae7b
--- /dev/null
+++ b/src/skore/cli/__init__.py
@@ -0,0 +1,13 @@
+"""Implement skore CLI."""
+
+import logging
+
+formatter = logging.Formatter("%(message)s")
+
+console_handler = logging.StreamHandler()
+console_handler.setLevel(logging.INFO)
+console_handler.setFormatter(formatter)
+
+logger = logging.getLogger(__name__)
+logger.addHandler(console_handler)
+logger.propagate = False
diff --git a/src/skore/cli.py b/src/skore/cli/cli.py
similarity index 84%
rename from src/skore/cli.py
rename to src/skore/cli/cli.py
index 673eb4324..740724ed0 100644
--- a/src/skore/cli.py
+++ b/src/skore/cli/cli.py
@@ -4,9 +4,9 @@
import pathlib
from importlib.metadata import version
-from skore.create_project import create_project
-from skore.dashboard.dashboard import __launch
-from skore.quickstart_command import __quickstart
+from skore.cli.create_project import __create
+from skore.cli.launch_dashboard import __launch
+from skore.cli.quickstart_command import __quickstart
def cli(args: list[str]):
@@ -14,17 +14,17 @@ def cli(args: list[str]):
parser = argparse.ArgumentParser(prog="skore")
parser.add_argument(
- "--version", action="version", version=f"%(prog)s {version("skore")}"
+ "--version", action="version", version=f"%(prog)s {version('skore')}"
)
subparsers = parser.add_subparsers(dest="subcommand")
- parser_launch = subparsers.add_parser("launch", help="Launch the dashboard")
+ parser_launch = subparsers.add_parser("launch", help="Launch the web UI")
parser_launch.add_argument(
"project_name",
nargs="?",
- help="the name of the project to open (default: %(default)s)",
- default="project",
+ help="the name or path of the project to open (default: %(default)s)",
+ default="project.skore",
)
parser_launch.add_argument(
"--port",
@@ -36,7 +36,7 @@ def cli(args: list[str]):
"--open-browser",
action=argparse.BooleanOptionalAction,
help=(
- "whether to automatically open a browser tab showing the dashboard "
+ "whether to automatically open a browser tab showing the web UI "
"(default: %(default)s)"
),
default=True,
@@ -46,7 +46,7 @@ def cli(args: list[str]):
parser_create.add_argument(
"project_name",
nargs="?",
- help="the name of the project (default: %(default)s)",
+ help="the name or path of the project to create (default: %(default)s)",
default="project",
)
parser_create.add_argument(
@@ -60,7 +60,7 @@ def cli(args: list[str]):
)
subparsers.add_parser(
- "quickstart", help='Create a "project.skore" file and start the dashboard'
+ "quickstart", help='Create a "project.skore" file and start the UI'
)
parsed_args: argparse.Namespace = parser.parse_args(args)
@@ -75,7 +75,7 @@ def cli(args: list[str]):
open_browser=parsed_args.open_browser,
)
case "create":
- create_project(
+ __create(
project_name=parsed_args.project_name,
working_dir=parsed_args.working_dir,
)
diff --git a/src/skore/create_project.py b/src/skore/cli/create_project.py
similarity index 87%
rename from src/skore/create_project.py
rename to src/skore/cli/create_project.py
index 30cd31f23..0710cdc57 100644
--- a/src/skore/create_project.py
+++ b/src/skore/cli/create_project.py
@@ -1,10 +1,9 @@
"""Implement the "create project" feature."""
-import os
import re
from pathlib import Path
-from skore import logger
+from skore.cli import logger
class ProjectNameTooLong(Exception):
@@ -79,7 +78,7 @@ class ProjectPermissionError(Exception):
"""Permissions in the directory do not allow creating a file."""
-def create_project(project_name: str | Path, working_dir: Path | None = None) -> Path:
+def __create(project_name: str | Path, working_dir: Path | None = None) -> Path:
"""Create a project file named according to `project_name`.
Parameters
@@ -119,7 +118,7 @@ def create_project(project_name: str | Path, working_dir: Path | None = None) ->
)
try:
- os.mkdir(project_directory)
+ project_directory.mkdir()
except FileExistsError as e:
raise ProjectAlreadyExists(
f"Unable to create project file '{project_directory}' because a file "
@@ -136,5 +135,23 @@ def create_project(project_name: str | Path, working_dir: Path | None = None) ->
f"Unable to create project file '{project_directory}'."
) from e
+ # Once the main project directory has been created, created the nested directories
+
+ items_dir = project_directory / "items"
+ try:
+ items_dir.mkdir()
+ except Exception as e:
+ raise ProjectCreationError(
+ f"Unable to create project file '{items_dir}'."
+ ) from e
+
+ layouts_dir = project_directory / "layouts"
+ try:
+ layouts_dir.mkdir()
+ except Exception as e:
+ raise ProjectCreationError(
+ f"Unable to create project file '{layouts_dir}'."
+ ) from e
+
logger.info(f"Project file '{project_directory}' was successfully created.")
return project_directory
diff --git a/src/skore/cli/launch_dashboard.py b/src/skore/cli/launch_dashboard.py
new file mode 100644
index 000000000..0100fdec5
--- /dev/null
+++ b/src/skore/cli/launch_dashboard.py
@@ -0,0 +1,51 @@
+"""Implement the "launch" command."""
+
+import threading
+import time
+import webbrowser
+from pathlib import Path
+
+import uvicorn
+
+from skore.cli import logger
+from skore.project import load
+from skore.ui.app import create_app
+
+
+class ProjectNotFound(Exception):
+ """Project was not found."""
+
+ project_path: Path
+
+
+def __open_browser(port: int):
+ time.sleep(0.5)
+ webbrowser.open(f"http://localhost:{port}")
+
+
+def __launch(project_name: str | Path, port: int, open_browser: bool):
+ """Launch the UI to visualize a project.
+
+ Parameters
+ ----------
+ project_name : Path-like
+ Name of the project to be created, or a relative or absolute path.
+ port : int
+ Port at which to bind the UI server.
+ open_browser: bool
+ Whether to automatically open a browser tab showing the UI.
+ """
+ project = load(project_name)
+ app = create_app(project=project)
+
+ if open_browser:
+ threading.Thread(target=lambda: __open_browser(port=port)).start()
+
+ try:
+ # TODO: check port is free
+ logger.info(
+ f"Running skore UI from '{project_name}' at URL http://localhost:{port}"
+ )
+ uvicorn.run(app, port=port, log_level="error")
+ except KeyboardInterrupt:
+ logger.info("Closing skore UI")
diff --git a/src/skore/quickstart_command.py b/src/skore/cli/quickstart_command.py
similarity index 66%
rename from src/skore/quickstart_command.py
rename to src/skore/cli/quickstart_command.py
index 797650e6f..83dd76ee4 100644
--- a/src/skore/quickstart_command.py
+++ b/src/skore/cli/quickstart_command.py
@@ -1,14 +1,14 @@
"""Implement the "quickstart" command."""
-from skore import logger
-from skore.create_project import ProjectAlreadyExists, create_project
-from skore.dashboard.dashboard import __launch
+from skore.cli import logger
+from skore.cli.create_project import ProjectAlreadyExists, __create
+from skore.cli.launch_dashboard import __launch
def __quickstart():
"""Quickstart a Skore project.
- Create it if it does not exist, then launch the dashboard.
+ Create it if it does not exist, then launch the web UI.
Parameters
----------
@@ -16,8 +16,9 @@ def __quickstart():
Port at which to bind the UI server.
"""
project_name = "project.skore"
+
try:
- create_project(project_name=project_name)
+ __create(project_name=project_name)
except ProjectAlreadyExists:
logger.info(
f"Project file '{project_name}' already exists. Skipping creation step."
diff --git a/src/skore/dashboard/__init__.py b/src/skore/dashboard/__init__.py
deleted file mode 100644
index 8b5faf8eb..000000000
--- a/src/skore/dashboard/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-"""The dashboard to display stores."""
-
-from skore.dashboard.app import create_dashboard_app
-
-__all__ = [
- "create_dashboard_app",
-]
diff --git a/src/skore/dashboard/app.py b/src/skore/dashboard/app.py
deleted file mode 100644
index fdd6fbbfb..000000000
--- a/src/skore/dashboard/app.py
+++ /dev/null
@@ -1,25 +0,0 @@
-"""FastAPI factory used to create the dashboard to display stores."""
-
-from pathlib import Path
-
-from fastapi.staticfiles import StaticFiles
-
-from skore.api import create_api_app
-
-
-def create_dashboard_app():
- """FastAPI factory used to create the dashboard to display stores."""
- app = create_api_app()
-
- # Mount frontend from the static directory.
- app.mount(
- "/",
- StaticFiles(
- directory=(Path(__file__).parent / "static"),
- html=True,
- follow_symlink=True,
- ),
- name="static",
- )
-
- return app
diff --git a/src/skore/dashboard/dashboard.py b/src/skore/dashboard/dashboard.py
deleted file mode 100644
index cbdafcf76..000000000
--- a/src/skore/dashboard/dashboard.py
+++ /dev/null
@@ -1,74 +0,0 @@
-"""Implement the "launch" command."""
-
-import os
-import threading
-import time
-import webbrowser
-from pathlib import Path
-
-import uvicorn
-
-from skore import logger
-
-
-class ProjectNotFound(Exception):
- """Project was not found."""
-
- project_path: Path
-
-
-def __open_browser(port: int):
- time.sleep(0.5)
- webbrowser.open(f"http://localhost:{port}")
-
-
-def __launch(project_name: str | Path, port: int, open_browser: bool):
- """Launch dashboard to visualize a project.
-
- Parameters
- ----------
- project_name : Path-like
- Name of the project to be created, or a relative or absolute path.
- port : int
- Port at which to bind the UI server.
- open_browser: bool
- Whether to automatically open a browser tab showing the dashboard.
-
- Returns
- -------
- A tuple with the dashboard and the project directory path if succeeded,
- None if failed
- """
- if Path(project_name).exists():
- pass
- elif Path(project_name + ".skore").exists():
- project_name = project_name + ".skore"
- else:
- raise ProjectNotFound(
- f"Project '{project_name}' not found. "
- "Maybe you forget to create it? Please check the file name and try again."
- )
-
- # FIXME: Passing the project name through environment variables is smelly
- if os.environ.get("SKORE_ROOT") is None:
- os.environ["SKORE_ROOT"] = project_name
-
- logger.info(
- f"Running dashboard for project file '{project_name}' at URL http://localhost:{port}"
- )
-
- if open_browser:
- threading.Thread(target=lambda: __open_browser(port=port)).start()
-
- # TODO: Check beforehand that port is not already bound
- config = uvicorn.Config(
- app="skore.dashboard.app:create_dashboard_app",
- port=port,
- log_level="error",
- factory=True,
- )
- server = uvicorn.Server(config=config)
- try:
- server.run()
- except KeyboardInterrupt:
- logger.info("Closing dashboard")
diff --git a/src/skore/item/__init__.py b/src/skore/item/__init__.py
index a11fae011..3e9af9066 100644
--- a/src/skore/item/__init__.py
+++ b/src/skore/item/__init__.py
@@ -1,10 +1,19 @@
-"""Classes used to store data and metadata."""
+"""Item types for the skore package."""
-from skore.item.display_type import DisplayType
-from skore.item.item import Item, ItemMetadata
+from skore.item.item import Item
+from skore.item.item_repository import ItemRepository
+from skore.item.media_item import MediaItem
+from skore.item.numpy_array_item import NumpyArrayItem
+from skore.item.pandas_dataframe_item import PandasDataFrameItem
+from skore.item.primitive_item import PrimitiveItem
+from skore.item.sklearn_base_estimator_item import SklearnBaseEstimatorItem
__all__ = [
- "DisplayType",
"Item",
- "ItemMetadata",
+ "ItemRepository",
+ "MediaItem",
+ "NumpyArrayItem",
+ "PandasDataFrameItem",
+ "PrimitiveItem",
+ "SklearnBaseEstimatorItem",
]
diff --git a/src/skore/item/display_type.py b/src/skore/item/display_type.py
deleted file mode 100644
index 7b1d9036a..000000000
--- a/src/skore/item/display_type.py
+++ /dev/null
@@ -1,93 +0,0 @@
-"""Implement a type-inference algorithm.
-
-This aims to simplify the insertion of data into an `Store`, by avoiding the need
-for the user to explicitly specify a type when adding a value.
-"""
-
-from __future__ import annotations
-
-import datetime
-import pathlib
-from enum import StrEnum, auto
-from typing import Any
-
-import altair.vegalite.v5.schema.core
-import matplotlib.figure
-import numpy
-import pandas
-import polars
-from sklearn.base import BaseEstimator
-
-
-class DisplayType(StrEnum):
- """Type used to customize the visualization of objects stored in a `Store`."""
-
- ANY = auto()
- ARRAY = auto()
- BOOLEAN = auto()
- DATAFRAME = auto()
- DATE = auto()
- DATETIME = auto()
- FILE = auto()
- HTML = auto()
- IMAGE = auto()
- INTEGER = auto()
- MARKDOWN = auto()
- MATPLOTLIB_FIGURE = auto()
- NUMBER = auto()
- NUMPY_ARRAY = auto()
- STRING = auto()
- VEGA = auto()
- SKLEARN_MODEL = auto()
-
- @staticmethod
- def infer(x: Any) -> DisplayType:
- """Infer the type of `x`.
-
- Notes
- -----
- If no match can be found the output is `DisplayType.ANY`.
- Strings are interpreted as Markdown by default.
- In general it is difficult to detect HTML or an image when given only a string,
- so for now we never infer these two types.
-
- Examples
- --------
- >>> DisplayType.infer(3)
-
-
- >>> DisplayType.infer(None)
-
-
- >>> DisplayType.infer((1, "b"))
-
-
- >>> DisplayType.infer("hello")
-
- """
- TYPE_TO_DISPLAY_TYPE = {
- list: DisplayType.ARRAY,
- bool: DisplayType.BOOLEAN,
- pandas.DataFrame: DisplayType.DATAFRAME,
- polars.DataFrame: DisplayType.DATAFRAME,
- datetime.date: DisplayType.DATE,
- datetime.datetime: DisplayType.DATETIME,
- int: DisplayType.INTEGER,
- str: DisplayType.MARKDOWN,
- matplotlib.figure.Figure: DisplayType.MATPLOTLIB_FIGURE,
- float: DisplayType.NUMBER,
- numpy.ndarray: DisplayType.NUMPY_ARRAY,
- }
-
- if isinstance(x, altair.vegalite.v5.schema.core.TopLevelSpec):
- return DisplayType.VEGA
-
- # `Paths` are `PosixPath` or `WindowsPath` when instantiated
- if isinstance(x, pathlib.Path):
- return DisplayType.FILE
-
- if isinstance(x, BaseEstimator):
- return DisplayType.SKLEARN_MODEL
-
- # Exact match
- return TYPE_TO_DISPLAY_TYPE.get(type(x), DisplayType.ANY)
diff --git a/src/skore/item/item.py b/src/skore/item/item.py
index 721f1c360..970cb65d6 100644
--- a/src/skore/item/item.py
+++ b/src/skore/item/item.py
@@ -1,29 +1,69 @@
-"""Item class used to store data."""
+"""Base class for all items in the project."""
from __future__ import annotations
-from dataclasses import dataclass
-from typing import TYPE_CHECKING
+import inspect
+from abc import ABC, abstractmethod
+from datetime import UTC, datetime
+from functools import cached_property
+from typing import Any
-if TYPE_CHECKING:
- from datetime import datetime
- from typing import Any
- from skore.item.display_type import DisplayType
+class Item(ABC):
+ """
+ Abstract base class for all items in the project.
+ This class provides a common interface for all items, including
+ creation and update timestamps.
-@dataclass(kw_only=True, frozen=True)
-class ItemMetadata:
- """ItemMetadata class used to store metadata."""
+ Parameters
+ ----------
+ created_at : str | None, optional
+ The creation timestamp of the item. If None, the current time is used.
+ updated_at : str | None, optional
+ The last update timestamp of the item. If None, the current time is used.
- display_type: DisplayType
- created_at: datetime
- updated_at: datetime
+ Attributes
+ ----------
+ created_at : str
+ The creation timestamp of the item.
+ updated_at : str
+ The last update timestamp of the item.
+ """
+ def __init__(
+ self,
+ created_at: str | None = None,
+ updated_at: str | None = None,
+ ):
+ now = datetime.now(tz=UTC).isoformat()
-@dataclass(kw_only=True, frozen=True)
-class Item:
- """Item class used to store data and metadata."""
+ self.created_at = created_at or now
+ self.updated_at = updated_at or now
- data: Any
- metadata: ItemMetadata
+ @classmethod
+ @abstractmethod
+ def factory(cls) -> Item:
+ """
+ Create and return a new instance of the Item.
+
+ Returns
+ -------
+ Item
+ A new instance of the Item.
+ """
+
+ @cached_property
+ def __parameters__(self) -> dict[str, Any]:
+ """
+ Get the parameters of the Item instance.
+
+ Returns
+ -------
+ dict[str, Any]
+ A dictionary containing the parameters of the Item instance.
+ """
+ cls = self.__class__
+ cls_parameters = inspect.signature(cls).parameters
+
+ return {parameter: getattr(self, parameter) for parameter in cls_parameters}
diff --git a/src/skore/item/item_repository.py b/src/skore/item/item_repository.py
new file mode 100644
index 000000000..e01c0f6a4
--- /dev/null
+++ b/src/skore/item/item_repository.py
@@ -0,0 +1,111 @@
+"""ItemRepository for managing storage and retrieval of items.
+
+This module provides the ItemRepository class, which is responsible for
+storing, retrieving, and deleting items in a storage system.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from skore.item.item import Item
+ from skore.persistence.abstract_storage import AbstractStorage
+
+
+from skore.item.media_item import MediaItem
+from skore.item.numpy_array_item import NumpyArrayItem
+from skore.item.pandas_dataframe_item import PandasDataFrameItem
+from skore.item.primitive_item import PrimitiveItem
+from skore.item.sklearn_base_estimator_item import SklearnBaseEstimatorItem
+
+
+class ItemRepository:
+ """
+ A repository for managing storage and retrieval of items.
+
+ This class provides methods to get, put, and delete items from a storage system.
+ """
+
+ ITEM_CLASS_NAME_TO_ITEM_CLASS = {
+ "MediaItem": MediaItem,
+ "NumpyArrayItem": NumpyArrayItem,
+ "PandasDataFrameItem": PandasDataFrameItem,
+ "PrimitiveItem": PrimitiveItem,
+ "SklearnBaseEstimatorItem": SklearnBaseEstimatorItem,
+ }
+
+ def __init__(self, storage: AbstractStorage):
+ """
+ Initialize the ItemRepository with a storage system.
+
+ Parameters
+ ----------
+ storage : AbstractStorage
+ The storage system to be used by the repository.
+ """
+ self.storage = storage
+
+ def get_item(self, key) -> Item:
+ """
+ Retrieve an item from storage.
+
+ Parameters
+ ----------
+ key : Any
+ The key used to identify the item in storage.
+
+ Returns
+ -------
+ Item
+ The retrieved item.
+ """
+ value = self.storage[key]
+ item_class_name = value["item_class_name"]
+ item_class = ItemRepository.ITEM_CLASS_NAME_TO_ITEM_CLASS[item_class_name]
+ item = value["item"]
+
+ return item_class(**item)
+
+ def put_item(self, key, item: Item) -> None:
+ """
+ Store an item in storage.
+
+ Parameters
+ ----------
+ key : Any
+ The key to use for storing the item.
+ item : Item
+ The item to be stored.
+ """
+ item_parameters = item.__parameters__
+
+ if key in self.storage:
+ item_parameters["created_at"] = self.storage[key]["item"]["created_at"]
+
+ self.storage[key] = {
+ "item_class_name": item.__class__.__name__,
+ "item": item_parameters,
+ }
+
+ def delete_item(self, key):
+ """
+ Delete an item from storage.
+
+ Parameters
+ ----------
+ key : Any
+ The key of the item to be deleted.
+ """
+ del self.storage[key]
+
+ def keys(self) -> list[str]:
+ """
+ Get all keys of items stored in the repository.
+
+ Returns
+ -------
+ list[str]
+ A list of all keys in the storage.
+ """
+ return list(self.storage.keys())
diff --git a/src/skore/item/media_item.py b/src/skore/item/media_item.py
new file mode 100644
index 000000000..95d90d1be
--- /dev/null
+++ b/src/skore/item/media_item.py
@@ -0,0 +1,222 @@
+"""MediaItem.
+
+This module defines the MediaItem class, which represents media items.
+"""
+
+from __future__ import annotations
+
+from io import BytesIO
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from altair.vegalite.v5.schema.core import TopLevelSpec as Altair
+ from matplotlib.figure import Figure as Matplotlib
+ from PIL.Image import Image as Pillow
+
+from skore.item.item import Item
+
+
+class MediaItem(Item):
+ """
+ A class to represent a media item.
+
+ This class encapsulates various types of media along with metadata.
+ """
+
+ def __init__(
+ self,
+ media_bytes: bytes,
+ media_encoding: str,
+ media_type: str,
+ created_at: str | None = None,
+ updated_at: str | None = None,
+ ):
+ """
+ Initialize a MediaItem.
+
+ Parameters
+ ----------
+ media_bytes : bytes
+ The raw bytes of the media content.
+ media_encoding : str
+ The encoding of the media content.
+ media_type : str
+ The MIME type of the media content.
+ created_at : str, optional
+ The creation timestamp in ISO format.
+ updated_at : str, optional
+ The last update timestamp in ISO format.
+ """
+ super().__init__(created_at, updated_at)
+
+ self.media_bytes = media_bytes
+ self.media_encoding = media_encoding
+ self.media_type = media_type
+
+ @classmethod
+ def factory(cls, media, *args, **kwargs):
+ """
+ Create a new MediaItem instance.
+
+ This is a generic factory method that dispatches to specific
+ factory methods based on the type of media provided.
+
+ Parameters
+ ----------
+ media : Any
+ The media content to store.
+
+ Raises
+ ------
+ NotImplementedError
+ If the type of media is not supported.
+
+ Returns
+ -------
+ MediaItem
+ A new MediaItem instance.
+ """
+ media_mro_fullnames = {
+ f"{cls.__module__}.{cls.__name__}" for cls in media.__class__.__mro__
+ }
+
+ if "builtins.bytes" in media_mro_fullnames:
+ return cls.factory_bytes(media, *args, **kwargs)
+ if "builtins.str" in media_mro_fullnames:
+ return cls.factory_str(media, *args, **kwargs)
+ if "altair.vegalite.v5.schema.core.TopLevelSpec" in media_mro_fullnames:
+ return cls.factory_altair(media, *args, **kwargs)
+ if "matplotlib.figure.Figure" in media_mro_fullnames:
+ return cls.factory_matplotlib(media, *args, **kwargs)
+ if "PIL.Image.Image" in media_mro_fullnames:
+ return cls.factory_pillow(media, *args, **kwargs)
+
+ raise NotImplementedError(f"Type '{media.__class__}' is not yet supported")
+
+ @classmethod
+ def factory_bytes(
+ cls,
+ media: bytes,
+ media_encoding: str = "utf-8",
+ media_type: str = "application/octet-stream",
+ ) -> MediaItem:
+ """
+ Create a new MediaItem instance from bytes.
+
+ Parameters
+ ----------
+ media : bytes
+ The raw bytes of the media content.
+ media_encoding : str, optional
+ The encoding of the media content, by default "utf-8".
+ media_type : str, optional
+ The MIME type of the media content, by default "application/octet-stream".
+
+ Returns
+ -------
+ MediaItem
+ A new MediaItem instance.
+ """
+ return cls(
+ media_bytes=media,
+ media_encoding=media_encoding,
+ media_type=media_type,
+ )
+
+ @classmethod
+ def factory_str(cls, media: str, media_type: str = "text/markdown") -> MediaItem:
+ """
+ Create a new MediaItem instance from a string.
+
+ Parameters
+ ----------
+ media : str
+ The string content to store.
+ media_type : str, optional
+ The MIME type of the media content, by default "text/html".
+
+ Returns
+ -------
+ MediaItem
+ A new MediaItem instance.
+ """
+ media_bytes = media.encode("utf-8")
+
+ return cls(
+ media_bytes=media_bytes,
+ media_encoding="utf-8",
+ media_type=media_type,
+ )
+
+ @classmethod
+ def factory_altair(cls, media: Altair) -> MediaItem:
+ """
+ Create a new MediaItem instance from an Altair chart.
+
+ Parameters
+ ----------
+ media : Altair
+ The Altair chart to store.
+
+ Returns
+ -------
+ MediaItem
+ A new MediaItem instance.
+ """
+ media_bytes = media.to_json().encode("utf-8")
+
+ return cls(
+ media_bytes=media_bytes,
+ media_encoding="utf-8",
+ media_type="application/vnd.vega.v5+json",
+ )
+
+ @classmethod
+ def factory_matplotlib(cls, media: Matplotlib) -> MediaItem:
+ """
+ Create a new MediaItem instance from a Matplotlib figure.
+
+ Parameters
+ ----------
+ media : Matplotlib
+ The Matplotlib figure to store.
+
+ Returns
+ -------
+ MediaItem
+ A new MediaItem instance.
+ """
+ with BytesIO() as stream:
+ media.savefig(stream, format="svg")
+ media_bytes = stream.getvalue()
+
+ return cls(
+ media_bytes=media_bytes,
+ media_encoding="utf-8",
+ media_type="image/svg+xml",
+ )
+
+ @classmethod
+ def factory_pillow(cls, media: Pillow) -> MediaItem:
+ """
+ Create a new MediaItem instance from a Pillow image.
+
+ Parameters
+ ----------
+ media : Pillow
+ The Pillow image to store.
+
+ Returns
+ -------
+ MediaItem
+ A new MediaItem instance.
+ """
+ with BytesIO() as stream:
+ media.save(stream, format="png")
+ media_bytes = stream.getvalue()
+
+ return cls(
+ media_bytes=media_bytes,
+ media_encoding="utf-8",
+ media_type="image/png",
+ )
diff --git a/src/skore/item/numpy_array_item.py b/src/skore/item/numpy_array_item.py
new file mode 100644
index 000000000..ad9b4b0d5
--- /dev/null
+++ b/src/skore/item/numpy_array_item.py
@@ -0,0 +1,96 @@
+"""NumpyArrayItem.
+
+This module defines the NumpyArrayItem class, which represents a NumPy array item.
+"""
+
+from __future__ import annotations
+
+from functools import cached_property
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ import numpy
+
+from skore.item.item import Item
+
+
+class NumpyArrayItem(Item):
+ """
+ A class to represent a NumPy array item.
+
+ This class encapsulates a NumPy array along with its creation and update timestamps.
+
+ Attributes
+ ----------
+ array_list : list
+ The list representation of the NumPy array.
+ created_at : str
+ The timestamp when the item was created, in ISO format.
+ updated_at : str
+ The timestamp when the item was last updated, in ISO format.
+
+ Methods
+ -------
+ array() : numpy.ndarray
+ Returns the NumPy array representation of the stored list.
+ factory(array: numpy.ndarray) : NumpyArrayItem
+ Creates a new NumpyArrayItem instance from a NumPy array.
+ """
+
+ def __init__(
+ self,
+ array_list: list,
+ created_at: str | None = None,
+ updated_at: str | None = None,
+ ):
+ """
+ Initialize a NumpyArrayItem.
+
+ Parameters
+ ----------
+ array_list : list
+ The list representation of the NumPy array.
+ created_at : str
+ The creation timestamp in ISO format.
+ updated_at : str
+ The last update timestamp in ISO format.
+ """
+ super().__init__(created_at, updated_at)
+
+ self.array_list = array_list
+
+ @cached_property
+ def array(self) -> numpy.ndarray:
+ """
+ Convert the stored list to a NumPy array.
+
+ Returns
+ -------
+ numpy.ndarray
+ The NumPy array representation of the stored list.
+ """
+ import numpy
+
+ return numpy.asarray(self.array_list)
+
+ @classmethod
+ def factory(cls, array: numpy.ndarray) -> NumpyArrayItem:
+ """
+ Create a new NumpyArrayItem instance from a NumPy array.
+
+ Parameters
+ ----------
+ array : numpy.ndarray
+ The NumPy array to store.
+
+ Returns
+ -------
+ NumpyArrayItem
+ A new NumpyArrayItem instance.
+ """
+ instance = cls(array_list=array.tolist())
+
+ # add array as cached property
+ instance.array = array
+
+ return instance
diff --git a/src/skore/item/pandas_dataframe_item.py b/src/skore/item/pandas_dataframe_item.py
new file mode 100644
index 000000000..5a4c85591
--- /dev/null
+++ b/src/skore/item/pandas_dataframe_item.py
@@ -0,0 +1,82 @@
+"""PandasDataFrameItem.
+
+This module defines the PandasDataFrameItem class,
+which represents a pandas DataFrame item.
+"""
+
+from __future__ import annotations
+
+from functools import cached_property
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ import pandas
+
+from skore.item.item import Item
+
+
+class PandasDataFrameItem(Item):
+ """
+ A class to represent a pandas DataFrame item.
+
+ This class encapsulates a pandas DataFrame along with its
+ creation and update timestamps.
+ """
+
+ def __init__(
+ self,
+ dataframe_dict: dict,
+ created_at: str | None = None,
+ updated_at: str | None = None,
+ ):
+ """
+ Initialize a PandasDataFrameItem.
+
+ Parameters
+ ----------
+ dataframe_dict : dict
+ The dict representation of the dataframe.
+ created_at : str
+ The creation timestamp in ISO format.
+ updated_at : str
+ The last update timestamp in ISO format.
+ """
+ super().__init__(created_at, updated_at)
+
+ self.dataframe_dict = dataframe_dict
+
+ @cached_property
+ def dataframe(self) -> pandas.DataFrame:
+ """
+ Convert the stored dictionary to a pandas DataFrame.
+
+ Returns
+ -------
+ pd.DataFrame
+ The pandas DataFrame representation of the stored dictionary.
+ """
+ import pandas
+
+ return pandas.DataFrame.from_dict(self.dataframe_dict, orient="tight")
+
+ @classmethod
+ def factory(cls, dataframe: pandas.DataFrame) -> PandasDataFrameItem:
+ """
+ Create a new PandasDataFrameItem instance from a pandas DataFrame.
+
+ Parameters
+ ----------
+ dataframe : pd.DataFrame
+ The pandas DataFrame to store.
+
+ Returns
+ -------
+ PandasDataFrameItem
+ A new PandasDataFrameItem instance.
+ """
+ instance = cls(dataframe_dict=dataframe.to_dict(orient="tight"))
+
+ # add dataframe as cached property
+ instance.dataframe = dataframe
+
+ return instance
diff --git a/src/skore/item/primitive_item.py b/src/skore/item/primitive_item.py
new file mode 100644
index 000000000..e420e228b
--- /dev/null
+++ b/src/skore/item/primitive_item.py
@@ -0,0 +1,88 @@
+"""Define PrimitiveItem.
+
+PrimitiveItems represents a primitive item with creation and update timestamps.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from skore.item.item import Item
+
+if TYPE_CHECKING:
+ from typing import Union
+
+ Primitive = Union[
+ bool,
+ float,
+ int,
+ str,
+ list["Primitive"],
+ tuple["Primitive"],
+ dict[str | int | float, "Primitive"],
+ ]
+
+
+def is_primitive(obj: object) -> bool:
+ """Check if the object is a primitive."""
+ if isinstance(obj, (bool, float, int, str)):
+ return True
+ if isinstance(obj, (list, tuple)):
+ return all(is_primitive(item) for item in obj)
+ if isinstance(obj, dict):
+ return all(
+ isinstance(k, (bool, float, int, str)) and is_primitive(v)
+ for k, v in obj.items()
+ )
+ return False
+
+
+class PrimitiveItem(Item):
+ """
+ A class to represent a primitive item.
+
+ This class encapsulates a primitive value
+ along with its creation and update timestamps.
+ """
+
+ def __init__(
+ self,
+ primitive: Primitive,
+ created_at: str | None = None,
+ updated_at: str | None = None,
+ ):
+ """
+ Initialize a PrimitiveItem.
+
+ Parameters
+ ----------
+ primitive : Primitive
+ The primitive value to store.
+ created_at : str, optional
+ The creation timestamp as ISO format.
+ updated_at : str, optional
+ The last update timestamp as ISO format.
+ """
+ super().__init__(created_at, updated_at)
+
+ self.primitive = primitive
+
+ @classmethod
+ def factory(cls, primitive: Primitive) -> PrimitiveItem:
+ """
+ Create a new PrimitiveItem with the current timestamp.
+
+ Parameters
+ ----------
+ primitive : Primitive
+ The primitive value to store.
+
+ Returns
+ -------
+ PrimitiveItem
+ A new PrimitiveItem instance.
+ """
+ if not is_primitive(primitive):
+ raise ValueError(f"{primitive} is not Primitive.")
+
+ return cls(primitive=primitive)
diff --git a/src/skore/item/sklearn_base_estimator_item.py b/src/skore/item/sklearn_base_estimator_item.py
new file mode 100644
index 000000000..917a7693d
--- /dev/null
+++ b/src/skore/item/sklearn_base_estimator_item.py
@@ -0,0 +1,92 @@
+"""SklearnBaseEstimatorItem.
+
+This module defines the SklearnBaseEstimatorItem class,
+which represents a scikit-learn BaseEstimator item.
+"""
+
+from __future__ import annotations
+
+from functools import cached_property
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ import sklearn.base
+
+from skore.item.item import Item
+
+
+class SklearnBaseEstimatorItem(Item):
+ """
+ A class to represent a scikit-learn BaseEstimator item.
+
+ This class encapsulates a scikit-learn BaseEstimator along with its
+ creation and update timestamps.
+ """
+
+ def __init__(
+ self,
+ estimator_skops,
+ estimator_html_repr,
+ created_at: str | None = None,
+ updated_at: str | None = None,
+ ):
+ """
+ Initialize a SklearnBaseEstimatorItem.
+
+ Parameters
+ ----------
+ estimator_skops : Any
+ The skops representation of the scikit-learn estimator.
+ estimator_html_repr : str
+ The HTML representation of the scikit-learn estimator.
+ created_at : str, optional
+ The creation timestamp in ISO format.
+ updated_at : str, optional
+ The last update timestamp in ISO format.
+ """
+ super().__init__(created_at, updated_at)
+
+ self.estimator_skops = estimator_skops
+ self.estimator_html_repr = estimator_html_repr
+
+ @cached_property
+ def estimator(self) -> sklearn.base.BaseEstimator:
+ """
+ Convert the stored skops object to a scikit-learn BaseEstimator.
+
+ Returns
+ -------
+ sklearn.base.BaseEstimator
+ The scikit-learn BaseEstimator representation of the stored skops object.
+ """
+ import skops.io
+
+ return skops.io.loads(self.estimator_skops)
+
+ @classmethod
+ def factory(cls, estimator: sklearn.base.BaseEstimator) -> SklearnBaseEstimatorItem:
+ """
+ Create a SklearnBaseEstimatorItem instance from a scikit-learn BaseEstimator.
+
+ Parameters
+ ----------
+ estimator : sklearn.base.BaseEstimator
+ The scikit-learn BaseEstimator to store.
+
+ Returns
+ -------
+ SklearnBaseEstimatorItem
+ A new SklearnBaseEstimatorItem instance.
+ """
+ import sklearn.utils
+ import skops.io
+
+ instance = cls(
+ estimator_skops=skops.io.dumps(estimator),
+ estimator_html_repr=sklearn.utils.estimator_html_repr(estimator),
+ )
+
+ # add estimator as cached property
+ instance.estimator = estimator
+
+ return instance
diff --git a/src/skore/layout/__init__.py b/src/skore/layout/__init__.py
new file mode 100644
index 000000000..54c2e7010
--- /dev/null
+++ b/src/skore/layout/__init__.py
@@ -0,0 +1,6 @@
+"""Implement layout primitives and storage."""
+
+from skore.layout.layout import Layout
+from skore.layout.layout_repository import LayoutRepository
+
+__all__ = ["Layout", "LayoutRepository"]
diff --git a/src/skore/layout/layout.py b/src/skore/layout/layout.py
new file mode 100644
index 000000000..440fc1a77
--- /dev/null
+++ b/src/skore/layout/layout.py
@@ -0,0 +1,23 @@
+"""Layout models."""
+
+from dataclasses import dataclass
+from enum import StrEnum
+
+
+class LayoutItemSize(StrEnum):
+ """The size of a layout item."""
+
+ SMALL = "small"
+ MEDIUM = "medium"
+ LARGE = "large"
+
+
+@dataclass
+class LayoutItem:
+ """A layout item."""
+
+ key: str
+ size: LayoutItemSize
+
+
+Layout = list[LayoutItem]
diff --git a/src/skore/layout/layout_repository.py b/src/skore/layout/layout_repository.py
new file mode 100644
index 000000000..59f80e7e1
--- /dev/null
+++ b/src/skore/layout/layout_repository.py
@@ -0,0 +1,65 @@
+"""LayoutRepository."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from skore.layout import Layout
+ from skore.persistence.abstract_storage import AbstractStorage
+
+
+class LayoutRepository:
+ """
+ A repository for managing storage and retrieval of layouts.
+
+ This class provides methods to get, put, and delete layouts from a storage system.
+ """
+
+ def __init__(self, storage: AbstractStorage):
+ """
+ Initialize the LayoutRepository with a storage system.
+
+ Parameters
+ ----------
+ storage : AbstractStorage
+ The storage system to be used by the repository.
+ """
+ self.storage = storage
+
+ def get_layout(self) -> Layout:
+ """
+ Retrieve the layout from storage.
+
+ Returns
+ -------
+ Layout
+ The retrieved layout.
+ """
+ return self.storage["layout"]
+
+ def put_layout(self, layout: Layout):
+ """
+ Store a layout in storage.
+
+ Parameters
+ ----------
+ layout : Layout
+ The layout to be stored.
+ """
+ self.storage["layout"] = layout
+
+ def delete_layout(self):
+ """Delete the layout from storage."""
+ del self.storage["layout"]
+
+ def keys(self) -> list[str]:
+ """
+ Get all keys of items stored in the repository.
+
+ Returns
+ -------
+ list[str]
+ A list of all keys in the storage.
+ """
+ return list(self.storage.keys())
diff --git a/src/skore/persistence/__init__.py b/src/skore/persistence/__init__.py
new file mode 100644
index 000000000..9f7ec6ae9
--- /dev/null
+++ b/src/skore/persistence/__init__.py
@@ -0,0 +1 @@
+"""Provide a set of persistence classes for storing and retrieving data."""
diff --git a/src/skore/persistence/abstract_storage.py b/src/skore/persistence/abstract_storage.py
new file mode 100644
index 000000000..f978a05c6
--- /dev/null
+++ b/src/skore/persistence/abstract_storage.py
@@ -0,0 +1,129 @@
+"""Abstract storage interface."""
+
+from abc import ABC, abstractmethod
+from typing import Any, Iterator
+
+
+class AbstractStorage(ABC):
+ """Persist data in a storage."""
+
+ @abstractmethod
+ def __getitem__(self, key: str) -> Any:
+ """
+ Get the item for the specified key.
+
+ Parameters
+ ----------
+ key : str
+ The key of the item to retrieve.
+
+ Returns
+ -------
+ Any
+ The value associated with the key.
+
+ Raises
+ ------
+ KeyError
+ If the key is not found in the storage.
+ """
+
+ @abstractmethod
+ def __setitem__(self, key: str, value: Any):
+ """
+ Set the item for the specified key.
+
+ Parameters
+ ----------
+ key : str
+ The key to associate with the value.
+ value : Any
+ The value to store.
+ """
+
+ @abstractmethod
+ def __delitem__(self, key: str):
+ """
+ Delete the item for the specified key.
+
+ Parameters
+ ----------
+ key : str
+ The key of the item to delete.
+
+ Raises
+ ------
+ KeyError
+ If the key is not found in the storage.
+ """
+
+ @abstractmethod
+ def keys(self) -> Iterator[str]:
+ """
+ Yield the keys in the storage.
+
+ Returns
+ -------
+ Iterator[str]
+ An iterator yielding all keys in the storage.
+ """
+
+ @abstractmethod
+ def values(self) -> Iterator[Any]:
+ """
+ Yield the values in the storage.
+
+ Returns
+ -------
+ Iterator[Any]
+ An iterator yielding all values in the storage.
+ """
+
+ @abstractmethod
+ def items(self) -> Iterator[tuple[str, Any]]:
+ """
+ Yield the pairs (key, value) in the storage.
+
+ Returns
+ -------
+ Iterator[tuple[str, Any]]
+ An iterator yielding all (key, value) pairs in the storage.
+ """
+
+ def __contains__(self, key: str) -> bool:
+ """
+ Return True if the storage has the specified key, else False.
+
+ Parameters
+ ----------
+ key : str
+ The key to check for existence in the storage.
+
+ Returns
+ -------
+ bool
+ True if the key is in the storage, else False.
+ """
+ return key in self.keys()
+
+ def __len__(self) -> int:
+ """
+ Return the number of items in the storage.
+
+ Returns
+ -------
+ int
+ The number of items in the storage.
+ """
+ return len(list(self.keys()))
+
+ def __iter__(self) -> Iterator[str]:
+ """
+ Yield the keys in the storage.
+
+ Returns
+ -------
+ Iterator[str]
+ An iterator yielding all keys in the storage.
+ """
+ return self.keys()
diff --git a/src/skore/persistence/disk_cache_storage.py b/src/skore/persistence/disk_cache_storage.py
new file mode 100644
index 000000000..9deeba869
--- /dev/null
+++ b/src/skore/persistence/disk_cache_storage.py
@@ -0,0 +1,140 @@
+"""In-memory storage."""
+
+from pathlib import Path
+from typing import Any, Iterator
+
+from diskcache import Cache
+
+from .abstract_storage import AbstractStorage
+
+
+class DirectoryDoesNotExist(Exception):
+ """Directory does not exist."""
+
+
+class DiskCacheStorage(AbstractStorage):
+ """
+ Disk-based storage implementation using diskcache.
+
+ This class provides a persistent storage solution using the diskcache library,
+ which allows for efficient caching of data on disk.
+
+ Parameters
+ ----------
+ directory : Path
+ The directory path where the cache will be stored.
+
+ Attributes
+ ----------
+ storage : Cache
+ The underlying diskcache Cache object.
+ """
+
+ def __init__(self, directory: Path):
+ """
+ Initialize the DiskCacheStorage with the specified directory.
+
+ Parameters
+ ----------
+ directory : Path
+ The directory path where the cache will be stored.
+ """
+ if not directory.exists():
+ raise DirectoryDoesNotExist(f"Directory {directory} does not exist.")
+ self.storage = Cache(directory)
+
+ def __getitem__(self, key: str) -> Any:
+ """
+ Retrieve an item from the storage.
+
+ Parameters
+ ----------
+ key : str
+ The key of the item to retrieve.
+
+ Returns
+ -------
+ Any
+ The value associated with the given key.
+
+ Raises
+ ------
+ KeyError
+ If the key is not found in the storage.
+ """
+ return self.storage[key]
+
+ def __setitem__(self, key: str, value: Any):
+ """
+ Set an item in the storage.
+
+ Parameters
+ ----------
+ key : str
+ The key to associate with the value.
+ value : Any
+ The value to store.
+ """
+ self.storage[key] = value
+
+ def __delitem__(self, key: str):
+ """
+ Delete an item from the storage.
+
+ Parameters
+ ----------
+ key : str
+ The key of the item to delete.
+
+ Raises
+ ------
+ KeyError
+ If the key is not found in the storage.
+ """
+ del self.storage[key]
+
+ def keys(self) -> Iterator[str]:
+ """
+ Get an iterator over the keys in the storage.
+
+ Returns
+ -------
+ Iterator[str]
+ An iterator yielding all keys in the storage.
+ """
+ return self.storage.iterkeys()
+
+ def values(self) -> Iterator[Any]:
+ """
+ Get an iterator over the values in the storage.
+
+ Returns
+ -------
+ Iterator[Any]
+ An iterator yielding all values in the storage.
+ """
+ for key in self.storage.iterkeys():
+ yield self.storage[key]
+
+ def items(self) -> Iterator[tuple[str, Any]]:
+ """
+ Get an iterator over the (key, value) pairs in the storage.
+
+ Returns
+ -------
+ Iterator[tuple[str, Any]]
+ An iterator yielding all (key, value) pairs in the storage.
+ """
+ for key in self.storage.iterkeys():
+ yield (key, self.storage[key])
+
+ def __repr__(self) -> str:
+ """
+ Return a string representation of the storage.
+
+ Returns
+ -------
+ str
+ A string representation of the storage.
+ """
+ return f"DiskCacheStorage(directory='{self.storage.directory}')"
diff --git a/src/skore/persistence/in_memory_storage.py b/src/skore/persistence/in_memory_storage.py
new file mode 100644
index 000000000..3bfd7eb64
--- /dev/null
+++ b/src/skore/persistence/in_memory_storage.py
@@ -0,0 +1,111 @@
+"""In-memory storage."""
+
+from typing import Any, Iterator
+
+from .abstract_storage import AbstractStorage
+
+
+class InMemoryStorage(AbstractStorage):
+ """In-memory storage."""
+
+ def __init__(self):
+ """
+ Initialize an empty in-memory storage.
+
+ The storage is implemented as a dictionary.
+ """
+ self.storage = {}
+
+ def __getitem__(self, key: str) -> Any:
+ """
+ Get the item for the specified key.
+
+ Parameters
+ ----------
+ key : str
+ The key of the item to retrieve.
+
+ Returns
+ -------
+ Any
+ The value associated with the key.
+
+ Raises
+ ------
+ KeyError
+ If the key is not found in the storage.
+ """
+ return self.storage[key]
+
+ def __setitem__(self, key: str, value: Any):
+ """
+ Set the item for the specified key.
+
+ Parameters
+ ----------
+ key : str
+ The key to associate with the value.
+ value : Any
+ The value to store.
+ """
+ self.storage[key] = value
+
+ def __delitem__(self, key: str):
+ """
+ Delete the item for the specified key.
+
+ Parameters
+ ----------
+ key : str
+ The key of the item to delete.
+
+ Raises
+ ------
+ KeyError
+ If the key is not found in the storage.
+ """
+ del self.storage[key]
+
+ def keys(self) -> Iterator[str]:
+ """
+ Yield the keys.
+
+ Returns
+ -------
+ Iterator[str]
+ An iterator yielding all keys in the storage.
+ """
+ return iter(self.storage.keys())
+
+ def values(self) -> Iterator[Any]:
+ """
+ Yield the values.
+
+ Returns
+ -------
+ Iterator[Any]
+ An iterator yielding all values in the storage.
+ """
+ return iter(self.storage.values())
+
+ def items(self) -> Iterator[tuple[str, Any]]:
+ """
+ Yield the pairs (key, value).
+
+ Returns
+ -------
+ Iterator[tuple[str, Any]]
+ An iterator yielding all (key, value) pairs in the storage.
+ """
+ return iter(self.storage.items())
+
+ def __repr__(self) -> str:
+ """
+ Return a string representation of the storage.
+
+ Returns
+ -------
+ str
+ A string representation of the storage.
+ """
+ return "InMemoryStorage()"
diff --git a/src/skore/project.py b/src/skore/project.py
new file mode 100644
index 000000000..54931d69e
--- /dev/null
+++ b/src/skore/project.py
@@ -0,0 +1,143 @@
+"""Define a Project."""
+
+from pathlib import Path
+from typing import Any
+
+import altair
+import matplotlib
+import numpy
+import pandas
+import PIL
+import sklearn
+
+from skore.item import Item
+from skore.item.item_repository import ItemRepository
+from skore.item.media_item import MediaItem
+from skore.item.numpy_array_item import NumpyArrayItem
+from skore.item.pandas_dataframe_item import PandasDataFrameItem
+from skore.item.primitive_item import PrimitiveItem, is_primitive
+from skore.item.sklearn_base_estimator_item import SklearnBaseEstimatorItem
+from skore.layout import Layout, LayoutRepository
+from skore.persistence.disk_cache_storage import DirectoryDoesNotExist, DiskCacheStorage
+
+
+def object_to_item(o: Any) -> Item:
+ """Transform an object into an Item."""
+ if is_primitive(o):
+ return PrimitiveItem.factory(o)
+ elif isinstance(o, pandas.DataFrame):
+ return PandasDataFrameItem.factory(o)
+ elif isinstance(o, numpy.ndarray):
+ return NumpyArrayItem.factory(o)
+ elif isinstance(o, sklearn.base.BaseEstimator):
+ return SklearnBaseEstimatorItem.factory(o)
+ elif isinstance(o, altair.vegalite.v5.schema.core.TopLevelSpec):
+ return MediaItem.factory_altair(o)
+ elif isinstance(o, matplotlib.figure.Figure):
+ return MediaItem.factory_matplotlib(o)
+ elif isinstance(o, PIL.Image.Image):
+ return MediaItem.factory_pillow(o)
+ else:
+ raise NotImplementedError(f"Type {o.__class__.__name__} is not supported yet.")
+
+
+class Project:
+ """A project is a collection of items that are stored in a storage."""
+
+ def __init__(
+ self,
+ item_repository: ItemRepository,
+ layout_repository: LayoutRepository,
+ ):
+ self.item_repository = item_repository
+ self.layout_repository = layout_repository
+
+ def put(self, key: str, value: Any):
+ """Add a value to the Project."""
+ item = object_to_item(value)
+ self.put_item(key, item)
+
+ def put_item(self, key: str, item: Item):
+ """Add an Item to the Project."""
+ self.item_repository.put_item(key, item)
+
+ def get(self, key: str) -> Any:
+ """Get the value corresponding to `key` from the Project."""
+ item = self.get_item(key)
+
+ if isinstance(item, PrimitiveItem):
+ return item.primitive
+ elif isinstance(item, NumpyArrayItem):
+ return item.array
+ elif isinstance(item, PandasDataFrameItem):
+ return item.dataframe
+ elif isinstance(item, SklearnBaseEstimatorItem):
+ return item.estimator
+ elif isinstance(item, MediaItem):
+ return item.media_bytes
+ else:
+ raise ValueError(f"Item {item} is not a known item type.")
+
+ def get_item(self, key: str) -> Item:
+ """Add the Item corresponding to `key` from the Project."""
+ return self.item_repository.get_item(key)
+
+ def list_keys(self) -> list[str]:
+ """List all keys in the Project."""
+ return self.item_repository.keys()
+
+ def delete_item(self, key: str):
+ """Delete an item from the Project."""
+ self.item_repository.delete_item(key)
+
+ def put_report_layout(self, layout: Layout):
+ """Add a report layout to the Project."""
+ self.layout_repository.put_layout(layout)
+
+ def get_report_layout(self) -> Layout:
+ """Get the report layout corresponding to `key` from the Project."""
+ try:
+ return self.layout_repository.get_layout()
+ except KeyError:
+ return []
+
+
+class ProjectLoadError(Exception):
+ """Failed to load project."""
+
+
+def load(project_name: str | Path) -> Project:
+ """Load an existing Project given a project name or path."""
+ # Transform a project name to a directory path:
+ # - Resolve relative path to current working directory,
+ # - Check that the file ends with the ".skore" extension,
+ # - If not provided, it will be automatically appended,
+ # - If project name is an absolute path, we keep that path.
+
+ path = Path(project_name).resolve()
+
+ if path.suffix != ".skore":
+ path = path.parent / (path.name + ".skore")
+
+ if not Path(path).exists():
+ raise ProjectLoadError(f"Project '{path}' does not exist: did you create it?")
+
+ try:
+ # FIXME should those hardcoded string be factorized somewhere ?
+ item_storage = DiskCacheStorage(directory=Path(path) / "items")
+ item_repository = ItemRepository(storage=item_storage)
+ layout_storage = DiskCacheStorage(directory=Path(path) / "layouts")
+ layout_repository = LayoutRepository(storage=layout_storage)
+ project = Project(
+ item_repository=item_repository,
+ layout_repository=layout_repository,
+ )
+ except DirectoryDoesNotExist as e:
+ missing_directory = e.args[0].split()[1]
+ raise ProjectLoadError(
+ f"Project '{path}' is corrupted: "
+ f"directory '{missing_directory}' should exist. "
+ "Consider re-creating the project."
+ ) from e
+
+ return project
diff --git a/src/skore/registry.py b/src/skore/registry.py
deleted file mode 100644
index f73c9f1ee..000000000
--- a/src/skore/registry.py
+++ /dev/null
@@ -1,49 +0,0 @@
-"""Registry used to supervise stores."""
-
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-from skore.storage.uri import URI
-from skore.store import Store
-
-if TYPE_CHECKING:
- from typing import Generator
-
- from skore.storage import Storage
-
-
-def children(store: Store) -> Generator[Store, None, None]:
- """Yield recursively `store` children."""
- stores = {store.uri}
-
- for uri in store.storage:
- if ((uri := uri.parent) not in stores) and (store.uri in uri):
- stores.add(uri)
- yield Store(uri, store.storage)
-
-
-def parent(store: Store, /) -> Store:
- """Return `store` parent."""
- return Store(store.uri.parent, store.storage)
-
-
-def stores(storage: Storage, /) -> Generator[Store, None, None]:
- """Yield stores saved in `storage`."""
- stores = set()
-
- for uri in storage:
- if (uri := uri.parent) not in stores:
- stores.add(uri)
- yield Store(uri, storage)
-
-
-def find_store_by_uri(uri: URI, storage: Storage, /) -> Store | None:
- """Find a Store in `storage` given a URI.
-
- Returns None if no Store is found with the given URI.
- """
- for store in stores(storage):
- if uri == store.uri:
- return store
- return None
diff --git a/src/skore/schema/__init__.py b/src/skore/schema/__init__.py
deleted file mode 100644
index 6e031999e..000000000
--- a/src/skore/schema/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-# noqa: D104
diff --git a/src/skore/schema/dashboard/__init__.py b/src/skore/schema/dashboard/__init__.py
deleted file mode 100644
index 272cc37ed..000000000
--- a/src/skore/schema/dashboard/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-"""JSON Schema for API versioning between skore and dashboard."""
-
-import json
-import pathlib
-
-__all__ = ["v0"]
-
-
-with open(pathlib.Path(__file__).parent / "v0.json") as f:
- v0 = json.load(f)
diff --git a/src/skore/schema/dashboard/v0.json b/src/skore/schema/dashboard/v0.json
deleted file mode 100644
index 2e87b7302..000000000
--- a/src/skore/schema/dashboard/v0.json
+++ /dev/null
@@ -1,353 +0,0 @@
-{
- "$schema": "https://json-schema.org/draft/2020-12/schema",
- "$id": "schema:dashboard:v0",
- "$defs": {
- "if:any": {
- "title": "Any",
- "description": "Any value in JSON format.",
- "if": { "properties": { "type": { "const": "any" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "any" },
- "data": true
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:array": {
- "title": "Array",
- "description": "Array, analogous to the python's list type.",
- "if": { "properties": { "type": { "const": "array" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "array" },
- "data": { "type": "array" }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:boolean": {
- "title": "Boolean",
- "description": "Boolean, analogous to the python's bool type.",
- "if": { "properties": { "type": { "const": "boolean" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "boolean" },
- "data": { "type": "boolean" }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:cv_results": {
- "title": "CV results",
- "description": "Result of a cross-validation, as output by `sklearn.model_selection.cross_validate`.",
- "if": { "properties": { "type": { "const": "cv_results" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "cv_results" },
- "data": {
- "type": "object",
- "properties": {
- "roc_curve_spec": { "$ref": "#/$defs/if:vega" },
- "cv_results_table": { "$ref": "#/$defs/if:dataframe" }
- },
- "required": [ "roc_curve_spec", "cv_results_table" ],
- "additionalProperties": false
- }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:dataframe": {
- "title": "DataFrame",
- "description": "DataFrame.",
- "if": { "properties": { "type": { "const": "dataframe" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "dataframe" },
- "data": { "type": "object" }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:date": {
- "title": "Date",
- "description": "Date string representation in ISO 8601 format.",
- "if": { "properties": { "type": { "const": "date" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "date" },
- "data": {
- "type": "string",
- "format": "date"
- }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:datetime": {
- "title": "Date and time",
- "description": "Date and time string representation in ISO 8601 format.",
- "if": { "properties": { "type": { "const": "datetime" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "datetime" },
- "data": {
- "type": "string",
- "format": "date-time"
- }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:file": {
- "title": "File",
- "description": "File path in URI format, with user metadata and internal data",
- "if": { "properties": { "type": { "const": "file" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "file" },
- "data": {
- "type": "string",
- "format": "uri"
- },
- "metadata": true,
- "internal": true
- },
- "required": [ "type", "data", "metadata", "internal" ],
- "additionalProperties": false
- }
- },
- "if:image": {
- "title": "Image",
- "description": "Image in Base64 encoding with file type.",
- "if": { "properties": { "type": { "const": "image" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "image" },
- "data": {
- "type": "object",
- "properties": {
- "mime-type": {
- "enum": [ "image/svg+xml", "image/png", "image/jpeg", "image/webp" ]
- },
- "data": {
- "type": "string",
- "pattern": "^([a-z]|[A-Z]|\\d|\\+|\/)*=*$",
- "contentEncoding": "base64"
- }
- },
- "required": [ "mime-type", "data" ],
- "additionalProperties": false
- }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:markdown": {
- "title": "Markdown",
- "description": "Text in markdown format.",
- "if": { "properties": { "type": { "const": "markdown" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "markdown" },
- "data": { "type": "string", "contentMediaType": "text/markdown" }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:matplotlib_figure": {
- "title": "Matplotlib figure",
- "description": "Matplotlib figure in SVG format.",
- "if": { "properties": { "type": { "const": "matplotlib_figure" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "matplotlib_figure" },
- "data": { "type": "string", "contentMediaType": "image/svg" }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:number": {
- "title": "Number",
- "description": "Number, analogous to the python's float type.",
- "if": { "properties": { "type": { "const": "number" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "number" },
- "data": { "type": "number" }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:html": {
- "title": "HTML",
- "description": "HTML string representation.",
- "if": { "properties": { "type": { "const": "html" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "html" },
- "data": {
- "type": "string",
- "contentMediaType": "text/html"
- }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:integer": {
- "title": "Integer",
- "description": "Integer, analogous to the python's int type",
- "if": { "properties": { "type": { "const": "integer" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "integer" },
- "data": { "type": "integer" }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:string": {
- "title": "String",
- "description": "String, analogous to the python3's str type.",
- "if": { "properties": { "type": { "const": "string" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "string" },
- "data": { "type": "string" }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:vega": {
- "title": "Vega",
- "description": "Vega chart object in vega's format, result of `vegachart.to_dict()`.",
- "if": { "properties": { "type": { "const": "vega" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "vega" },
- "data": { "$ref": "https://vega.github.io/schema/vega-lite/v5.json" }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- },
- "if:sklearn_model": {
- "title": "sklearn Model",
- "description": "A scikit learn fitted or not model serialized as its HTML representation.",
- "if": { "properties": { "type": { "const": "sklearn_model" } } },
- "then": {
- "type": "object",
- "properties": {
- "type": { "const": "html" },
- "data": {
- "type": "string",
- "contentMediaType": "text/html"
- }
- },
- "required": [ "type", "data" ],
- "additionalProperties": false
- }
- }
- },
-
- "type": "object",
- "properties": {
- "schema": { "const": "schema:dashboard:v0" },
- "uri": {
- "type": "string",
- "format": "uri-reference"
- },
- "payload": {
- "additionalProperties": {
- "allOf": [
- {
- "properties": {
- "type": {
- "enum": [
- "any",
- "array",
- "boolean",
- "cv_results",
- "dataframe",
- "date",
- "datetime",
- "file",
- "image",
- "markdown",
- "matplotlib_figure",
- "number",
- "html",
- "integer",
- "string",
- "vega",
- "sklearn_model"
- ]
- }
- },
- "required": ["type"]
- },
- { "$ref": "#/$defs/if:any" },
- { "$ref": "#/$defs/if:array" },
- { "$ref": "#/$defs/if:boolean" },
- { "$ref": "#/$defs/if:cv_results" },
- { "$ref": "#/$defs/if:dataframe" },
- { "$ref": "#/$defs/if:date" },
- { "$ref": "#/$defs/if:datetime" },
- { "$ref": "#/$defs/if:file" },
- { "$ref": "#/$defs/if:image" },
- { "$ref": "#/$defs/if:markdown" },
- { "$ref": "#/$defs/if:matplotlib_figure" },
- { "$ref": "#/$defs/if:number" },
- { "$ref": "#/$defs/if:html" },
- { "$ref": "#/$defs/if:integer" },
- { "$ref": "#/$defs/if:string" },
- { "$ref": "#/$defs/if:vega" }
- ]
- }
- },
- "layout": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "key": { "type": "string" },
- "size": { "enum": ["small", "medium", "large"] }
- },
- "required": ["key", "size"]
- }
- }
- },
- "required": [ "schema", "uri", "payload" ],
- "additionalProperties": false
-}
diff --git a/src/skore/storage/__init__.py b/src/skore/storage/__init__.py
deleted file mode 100644
index 4052607cd..000000000
--- a/src/skore/storage/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-"""Interface and implementations of storage."""
-
-from skore.storage.filesystem import FileSystem
-from skore.storage.non_persistent_storage import NonPersistentStorage
-from skore.storage.storage import Storage
-from skore.storage.uri import URI
-
-__all__ = [
- "FileSystem",
- "NonPersistentStorage",
- "Storage",
- "URI",
-]
diff --git a/src/skore/storage/filesystem.py b/src/skore/storage/filesystem.py
deleted file mode 100644
index 0cb763ea2..000000000
--- a/src/skore/storage/filesystem.py
+++ /dev/null
@@ -1,64 +0,0 @@
-"""Persistent storage over disk based on the diskcache library."""
-
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-from diskcache import Cache
-
-from skore.storage.storage import Storage
-
-if TYPE_CHECKING:
- from pathlib import Path
- from typing import Generator
-
- from skore.item import Item
- from skore.storage.storage import URI
-
-
-class FileSystem(Storage):
- """Persistent storage implementation over disk based on the diskcache library."""
-
- def __init__(self, *, directory: Path | None = None):
- self.cache = Cache(directory)
-
- def __contains__(self, key: URI) -> bool:
- """Return True if the storage has the specified key, else False."""
- return key in self.cache
-
- def __iter__(self) -> Generator[URI, None, None]:
- """Yield the keys."""
- yield from self.cache.iterkeys()
-
- def getitem(self, key: URI) -> Item:
- """Return the item for te specified key.
-
- Raises
- ------
- KeyError
- If the storage doesn't have the specified key.
- """
- return self.cache[key]
-
- def setitem(self, key: URI, item: Item):
- """Set the item for the specified key."""
- self.cache[key] = item
-
- def delitem(self, key: URI):
- """Delete the specified key and its item.
-
- Raises
- ------
- KeyError
- If the storage doesn't have the specified key.
- """
- del self.cache[key]
-
- def keys(self) -> Generator[URI, None, None]:
- """Yield the keys."""
- yield from self
-
- def items(self) -> Generator[tuple[URI, Item], None, None]:
- """Yield the pairs (key, item)."""
- for key in self.cache.iterkeys():
- yield (key, self.cache[key])
diff --git a/src/skore/storage/non_persistent_storage.py b/src/skore/storage/non_persistent_storage.py
deleted file mode 100644
index d2111000f..000000000
--- a/src/skore/storage/non_persistent_storage.py
+++ /dev/null
@@ -1,60 +0,0 @@
-"""Non-persistent storage over RAM based on dict."""
-
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-from skore.storage.storage import Storage
-
-if TYPE_CHECKING:
- from typing import Generator
-
- from skore.item import Item
- from skore.storage.storage import URI
-
-
-class NonPersistentStorage(Storage):
- """Non-persistent storage over RAM based on dict class."""
-
- def __init__(self, *, content: dict = None):
- self.content = content or {}
-
- def __contains__(self, key: URI) -> bool:
- """Return True if the storage has the specified key, else False."""
- return key in self.content
-
- def __iter__(self) -> Generator[URI, None, None]:
- """Yield the keys."""
- yield from self.content.keys()
-
- def getitem(self, key: URI) -> Item:
- """Return the item for te specified key.
-
- Raises
- ------
- KeyError
- If the storage doesn't have the specified key.
- """
- return self.content[key]
-
- def setitem(self, key: URI, item: Item):
- """Set the item for the specified key."""
- self.content[key] = item
-
- def delitem(self, key: URI):
- """Delete the specified key and its item.
-
- Raises
- ------
- KeyError
- If the storage doesn't have the specified key.
- """
- del self.content[key]
-
- def keys(self) -> Generator[URI, None, None]:
- """Yield the keys."""
- yield from self
-
- def items(self) -> Generator[tuple[URI, Item], None, None]:
- """Yield the pairs (key, item)."""
- yield from self.content.items()
diff --git a/src/skore/storage/storage.py b/src/skore/storage/storage.py
deleted file mode 100644
index c26f3a9cc..000000000
--- a/src/skore/storage/storage.py
+++ /dev/null
@@ -1,56 +0,0 @@
-"""Storage interface used to store key-item pairs."""
-
-from __future__ import annotations
-
-from abc import ABC, abstractmethod
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
- from typing import Generator
-
- from skore.item import Item
- from skore.storage import URI
-
-
-class Storage(ABC):
- """Storage interface used to store key-item pairs."""
-
- @abstractmethod
- def __contains__(self, key: URI) -> bool:
- """Return True if the storage has the specified key, else False."""
-
- @abstractmethod
- def __iter__(self) -> Generator[URI, None, None]:
- """Yield the keys."""
-
- @abstractmethod
- def getitem(self, key: URI) -> Item:
- """Return the item for the specified key.
-
- Raises
- ------
- KeyError
- If the storage doesn't have the specified key.
- """
-
- @abstractmethod
- def setitem(self, key: URI, item: Item):
- """Set the item for the specified key."""
-
- @abstractmethod
- def delitem(self, key: URI):
- """Delete the specified key and its item.
-
- Raises
- ------
- KeyError
- If the storage doesn't have the specified key.
- """
-
- @abstractmethod
- def keys(self) -> Generator[URI, None, None]:
- """Yield the keys."""
-
- @abstractmethod
- def items(self) -> Generator[tuple[URI, Item], None, None]:
- """Yield the pairs (key, item)."""
diff --git a/src/skore/storage/uri.py b/src/skore/storage/uri.py
deleted file mode 100644
index 2e80b0182..000000000
--- a/src/skore/storage/uri.py
+++ /dev/null
@@ -1,129 +0,0 @@
-"""URI class used to manipulate PosixPath-like str."""
-
-from __future__ import annotations
-
-import itertools
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
- from pathlib import PosixPath
- from typing import Any
-
-
-class URI:
- """URI class used to manipulate PosixPath-like str.
-
- It is mainly based on `pathlib.PurePosixPath`.
- """
-
- def __init__(self, *segments: URI | PosixPath | str):
- """Initialize URI with segments.
-
- Parameters
- ----------
- *segments : URI | PosixPath | str
- URI, PosixPath or PosixPath-like str.
- The slashes ("/") can optionally be used to delimit segments in a string.
-
- Examples
- --------
- >>> URI("/", "r", "/", "o", "/", "o", "/", "t")
- URI("/r/o/o/t")
-
- >>> URI("/r/o", "/o/t")
- URI("/r/o/o/t")
-
- >>> URI("/r/o/o/t")
- URI("/r/o/o/t")
-
- >>> URI("r/o/o/t")
- URI("/r/o/o/t")
-
- >>> URI("r", "o", "o", "t")
- URI("/r/o/o/t")
-
- >>> URI("/")
- Traceback (most recent call last):
- ...
- ValueError: Expected a non-empty PosixPath-like string; got '('/',)'.
-
- >>> URI("")
- Traceback (most recent call last):
- ...
- ValueError: Expected a non-empty PosixPath-like string; got '('',)'.
- """
- self.__segments = tuple(
- filter(
- None,
- itertools.chain.from_iterable(
- str(segment).lower().split("/") for segment in segments
- ),
- )
- )
-
- if not self.__segments:
- raise ValueError(
- f"Expected a non-empty PosixPath-like string; got '{segments}'."
- )
-
- @property
- def segments(self):
- """Segments composing the URI."""
- return self.__segments
-
- @property
- def parent(self) -> URI:
- """The logical parent of the URI."""
- if len(self.__segments) < 2:
- raise ValueError(f"{repr(self)} has no parent.")
-
- return URI(*self.__segments[:-1])
-
- @property
- def stem(self) -> str:
- """The final URI segment."""
- return self.__segments[-1]
-
- def __truediv__(self, segment: URI | PosixPath | str):
- """Compose a new URI by appending segment to the URI."""
- return URI(*self.__segments, segment)
-
- def __str__(self) -> str:
- """Return str(self).
-
- The URI's string representation is equivalent to the string representation of
- an _absolute_ PosixPath directory. It means that there's a leading "/", and all
- subsequent segments are delimited by "/".
-
- Examples
- --------
- >>> str(URI("/r/o/o/t"))
- '/r/o/o/t'
-
- >>> str(URI("r", "o", "o", "t"))
- '/r/o/o/t'
-
- >>> str(URI("r/o/o/t"))
- '/r/o/o/t'
- """
- return f"/{'/'.join(self.__segments)}"
-
- def __repr__(self) -> str:
- """Return repr(self)."""
- return f'URI("{self}")'
-
- def __hash__(self) -> int:
- """Return hash(self)."""
- return hash(self.__segments)
-
- def __len__(self) -> int:
- """Return the number of segments of the URI."""
- return len(self.__segments)
-
- def __eq__(self, other: Any) -> bool:
- """Return self == other."""
- return isinstance(other, URI) and (self.__segments == other.segments)
-
- def __contains__(self, other: URI) -> bool:
- """Return True if self is relative to other, else False."""
- return self.__segments[: len(other.segments)] == other.segments
diff --git a/src/skore/store/__init__.py b/src/skore/store/__init__.py
deleted file mode 100644
index 02a779c2d..000000000
--- a/src/skore/store/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-"""Object used used to store pairs of (key, value) by URI over a storage."""
-
-from skore.store.store import Store
-
-__all__ = ["Store"]
diff --git a/src/skore/store/layout.py b/src/skore/store/layout.py
deleted file mode 100644
index 9e5305bb8..000000000
--- a/src/skore/store/layout.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""Models to store state of a report's UI."""
-
-from enum import StrEnum, auto
-
-from pydantic import BaseModel
-
-
-class LayoutItemSize(StrEnum):
- """String enum to store report card size."""
-
- small = auto()
- medium = auto()
- large = auto()
-
-
-class LayoutItem(BaseModel):
- """A Report display some of its items.
-
- This class represents displayed item setting.
- """
-
- key: str
- size: LayoutItemSize
-
-
-Layout = list[LayoutItem]
diff --git a/src/skore/store/store.py b/src/skore/store/store.py
deleted file mode 100644
index a5a9f2fa0..000000000
--- a/src/skore/store/store.py
+++ /dev/null
@@ -1,231 +0,0 @@
-"""Object used to store pairs of (key, value) by URI over a storage."""
-
-from __future__ import annotations
-
-import dataclasses
-import os
-from datetime import UTC, datetime
-from pathlib import Path
-from typing import TYPE_CHECKING
-
-from pydantic import RootModel
-
-from skore.item import DisplayType, Item, ItemMetadata
-from skore.storage import URI, FileSystem
-from skore.store.layout import Layout
-
-if TYPE_CHECKING:
- from pathlib import PosixPath
- from typing import Any, Generator
-
- from skore.storage import Storage
-
-
-def _get_storage_path(SKORE_ROOT: str | None) -> Path:
- """Decide on the `Storage`'s location based on SKORE_ROOT."""
- if SKORE_ROOT is None:
- return Path.cwd() / ".datamander"
-
- if not Path(SKORE_ROOT).is_absolute():
- return Path.cwd() / SKORE_ROOT
-
- return Path(SKORE_ROOT)
-
-
-class Store:
- """Object used to store pairs of (key, value) by URI over a storage."""
-
- # FIXME find a better to isolate layout from users items
- LAYOUT_KEY = "__skore__layout__"
-
- def __init__(self, uri: URI | PosixPath | str, storage: Storage = None):
- self.uri = URI(uri)
- if storage is None:
- directory = _get_storage_path(os.environ.get("SKORE_ROOT"))
- self.storage = FileSystem(directory=directory)
- else:
- self.storage = storage
-
- def __eq__(self, other: Any):
- """Return self == other."""
- return (
- isinstance(other, Store)
- and (self.uri == other.uri)
- and (self.storage == other.storage)
- )
-
- def insert(
- self, key: str, value: Any, *, display_type: DisplayType | str | None = None
- ):
- """Insert the value for the specified key.
-
- Parameters
- ----------
- key : str
- value : Any
- display_type : DisplayType or str, optional
- The type used to display a representation of the value.
-
- Notes
- -----
- Key will be referenced in the storage in a flat pattern with "u/r/i/keyname".
-
- Raises
- ------
- KeyError
- If the store already has the specified key.
- """
- uri = self.uri / key
-
- if uri in self.storage:
- raise KeyError(
- key,
- f"Key '{key}' already exists in {self}; "
- "update or delete the key instead.",
- )
-
- now = datetime.now(tz=UTC).isoformat()
- display_type = (
- DisplayType(display_type) if display_type else DisplayType.infer(value)
- )
- item = Item(
- data=value,
- metadata=ItemMetadata(
- display_type=display_type,
- created_at=now,
- updated_at=now,
- ),
- )
-
- self.storage.setitem(uri, item)
-
- def read(self, key: str, *, metadata: bool = False) -> Any | tuple[Any, dict]:
- """Return the value for the specified key, optionally with its metadata.
-
- Raises
- ------
- KeyError
- If the store doesn't have the specified key.
- """
- try:
- item = self.storage.getitem(self.uri / key)
- except KeyError as e:
- raise KeyError(key, f"Key '{key}' does not exist in {self}.") from e
-
- return (
- item.data
- if not metadata
- else (item.data, dataclasses.asdict(item.metadata))
- )
-
- def update(
- self, key: str, value: Any, *, display_type: DisplayType | str | None = None
- ):
- """Update the value for the specified key.
-
- Parameters
- ----------
- key : str
- value : Any
- display_type : DisplayType or str, optional
- The type used to display a representation of the value.
-
- Raises
- ------
- KeyError
- If the store doesn't have the specified key.
- """
- uri = self.uri / key
-
- if uri not in self.storage:
- raise KeyError(key, f"Key '{key}' does not exist in {self}.")
-
- created_at = self.storage.getitem(uri).metadata.created_at
- updated_at = datetime.now(tz=UTC).isoformat()
- display_type = (
- DisplayType(display_type) if display_type else DisplayType.infer(value)
- )
- item = Item(
- data=value,
- metadata=ItemMetadata(
- display_type=display_type,
- created_at=created_at,
- updated_at=updated_at,
- ),
- )
-
- self.storage.delitem(uri)
- self.storage.setitem(uri, item)
-
- def delete(self, key: str):
- """Delete the specified key and its value.
-
- Raises
- ------
- KeyError
- If the store doesn't have the specified key.
- """
- try:
- self.storage.delitem(self.uri / key)
- except KeyError as e:
- raise KeyError(key, f"Key '{key}' does not exist in {self}.") from e
-
- def __iter__(self) -> Generator[str, None, None]:
- """Yield the keys."""
- yield from (key.stem for key in self.storage if key.parent == self.uri)
-
- def keys(self) -> Generator[str, None, None]:
- """Yield the keys."""
- yield from self
-
- def items(
- self, *, metadata: bool = False
- ) -> Generator[tuple[str, Any] | tuple[str, Any, dict], None, None]:
- """Yield the pairs(key, value), optionally with the value metadata."""
- for key, item in self.storage.items():
- if key.parent == self.uri:
- yield (
- (key.stem, item.data)
- if not metadata
- else (key.stem, item.data, dataclasses.asdict(item.metadata))
- )
-
- def get_layout(self) -> Layout:
- """Get the layout, or `[]` if the layout was never set."""
- try:
- layout: Layout = self.read(Store.LAYOUT_KEY) # type: ignore
- except KeyError:
- layout: Layout = []
- return layout
-
- def set_layout(self, layout: Layout) -> None:
- """Set the layout to `layout`.
-
- Raises
- ------
- KeyError
- If `layout` refers to a key which is not in the Store.
-
- pydantic.ValidationError
- If `layout` is malformed, e.g. if "size" is not a valid
- `LayoutItemSize`.
-
-
- Examples
- --------
- >>> Skore("my_test_root").set_layout([ # doctest: +SKIP
- ... {"key": "my_integer", "size": "small"},
- ... {"key": "my_string", "size": "medium"},
- ... {"key": "my_array", "size": "large"},
- ... ])
- """
- layout = RootModel[Layout].model_validate(layout).root
-
- for layout_item in layout:
- if layout_item.key not in self.keys():
- raise KeyError(f"Key '{layout_item.key}' is not in the store.")
-
- try:
- self.insert(Store.LAYOUT_KEY, layout)
- except KeyError:
- self.update(Store.LAYOUT_KEY, layout)
diff --git a/src/skore/ui/__init__.py b/src/skore/ui/__init__.py
new file mode 100644
index 000000000..d9c5a45df
--- /dev/null
+++ b/src/skore/ui/__init__.py
@@ -0,0 +1 @@
+"""Implement the report UI."""
diff --git a/src/skore/ui/app.py b/src/skore/ui/app.py
new file mode 100644
index 000000000..bb232500d
--- /dev/null
+++ b/src/skore/ui/app.py
@@ -0,0 +1,54 @@
+"""FastAPI factory used to create the API to interact with stores."""
+
+from fastapi import APIRouter, FastAPI
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.staticfiles import StaticFiles
+
+from skore.project import Project, load
+
+from .dependencies import get_static_path
+from .report import router as report_router
+
+
+def create_app(project: Project | None = None) -> FastAPI:
+ """FastAPI factory used to create the API to interact with `stores`."""
+ app = FastAPI()
+
+ # Give the app access to the project
+ if not project:
+ project = load("project.skore")
+
+ app.state.project = project
+
+ # Enable CORS support on all routes, for all origins and methods.
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_credentials=True,
+ allow_methods=["*"],
+ allow_headers=["*"],
+ )
+
+ # Include routers from bottom to top.
+ # Include routers always after all routes have been defined/imported.
+ router = APIRouter(prefix="/api")
+ router.include_router(report_router)
+
+ # Include all sub routers.
+ app.include_router(router)
+
+ # Mount frontend from the static directory.
+ # Should be after the API routes to avoid shadowing previous routes.
+ static_path = get_static_path()
+ if static_path.exists():
+ app.mount(
+ "/",
+ StaticFiles(
+ directory=static_path,
+ html=True,
+ follow_symlink=True,
+ ),
+ name="static",
+ )
+
+ return app
diff --git a/src/skore/ui/dependencies.py b/src/skore/ui/dependencies.py
new file mode 100644
index 000000000..b5bc64592
--- /dev/null
+++ b/src/skore/ui/dependencies.py
@@ -0,0 +1,17 @@
+"""Declare all injectable dependencies."""
+
+from pathlib import Path
+
+from fastapi.templating import Jinja2Templates
+
+__UI_MODULE_PATH = Path(__file__).resolve().parent
+
+
+def get_templates():
+ """Injectable template engine."""
+ return Jinja2Templates(directory=__UI_MODULE_PATH / "templates")
+
+
+def get_static_path():
+ """Injectable static path."""
+ return __UI_MODULE_PATH / "static"
diff --git a/src/skore/ui/report.py b/src/skore/ui/report.py
new file mode 100644
index 000000000..518daf66c
--- /dev/null
+++ b/src/skore/ui/report.py
@@ -0,0 +1,116 @@
+"""The definition of API routes to list project items and get them."""
+
+import base64
+from dataclasses import asdict, dataclass
+from pathlib import Path
+from typing import Annotated, Any
+
+from fastapi import APIRouter, Request
+from fastapi.params import Depends
+from fastapi.templating import Jinja2Templates
+
+from skore.item.media_item import MediaItem
+from skore.item.numpy_array_item import NumpyArrayItem
+from skore.item.pandas_dataframe_item import PandasDataFrameItem
+from skore.item.primitive_item import PrimitiveItem
+from skore.item.sklearn_base_estimator_item import SklearnBaseEstimatorItem
+from skore.layout import Layout
+from skore.project import Project
+
+from .dependencies import get_static_path, get_templates
+
+router = APIRouter()
+
+
+@dataclass
+class SerializedProject:
+ """Serialized project, to be sent to the frontend."""
+
+ layout: Layout
+ items: dict[str, dict[str, Any]]
+
+
+def __serialize_project(project: Project) -> SerializedProject:
+ try:
+ layout = project.get_report_layout()
+ except KeyError:
+ layout = []
+
+ items = {}
+ for key in project.list_keys():
+ item = project.get_item(key)
+
+ media_type = None
+ if isinstance(item, PrimitiveItem):
+ value = item.primitive
+ media_type = "text/markdown"
+ elif isinstance(item, NumpyArrayItem):
+ value = item.array_list
+ media_type = "text/markdown"
+ elif isinstance(item, PandasDataFrameItem):
+ value = item.dataframe_dict
+ media_type = "application/vnd.dataframe+json"
+ elif isinstance(item, SklearnBaseEstimatorItem):
+ value = item.estimator_html_repr
+ media_type = "application/vnd.sklearn.estimator+html"
+ elif isinstance(item, MediaItem):
+ value = base64.b64encode(item.media_bytes).decode()
+ media_type = item.media_type
+ else:
+ raise ValueError(f"Item {item} is not a known item type.")
+
+ items[key] = {
+ "media_type": media_type,
+ "value": value,
+ "updated_at": item.updated_at,
+ "created_at": item.created_at,
+ }
+
+ return SerializedProject(layout=layout, items=items)
+
+
+@router.get("/items")
+async def get_items(request: Request):
+ """Serialize a project and send it."""
+ project = request.app.state.project
+ return __serialize_project(project)
+
+
+@router.post("/report/share")
+async def share_store(
+ request: Request,
+ layout: Layout,
+ templates: Annotated[Jinja2Templates, Depends(get_templates)],
+ static_path: Annotated[Path, Depends(get_static_path)],
+):
+ """Serve an inlined shareable HTML page."""
+ project = request.app.state.project
+
+ # Get static assets to inject them into the report template
+ def read_asset_content(filename: str):
+ with open(static_path / filename) as f:
+ return f.read()
+
+ script_content = read_asset_content("skore.umd.cjs")
+ styles_content = read_asset_content("style.css")
+
+ # Fill the Jinja context
+ context = {
+ "project": asdict(__serialize_project(project)),
+ "layout": [{"key": item.key, "size": item.size} for item in layout],
+ "script": script_content,
+ "styles": styles_content,
+ }
+
+ # Render the template and send the result
+ return templates.TemplateResponse(
+ request=request, name="share.html.jinja", context=context
+ )
+
+
+@router.put("/report/layout", status_code=201)
+async def set_report_layout(request: Request, layout: Layout):
+ """Set the report layout."""
+ project = request.app.state.project
+ project.put_report_layout(layout)
+ return __serialize_project(project)
diff --git a/src/skore/api/routes/templates/share.html.jinja b/src/skore/ui/templates/share.html.jinja
similarity index 83%
rename from src/skore/api/routes/templates/share.html.jinja
rename to src/skore/ui/templates/share.html.jinja
index e59be442d..eebcfa8d9 100644
--- a/src/skore/api/routes/templates/share.html.jinja
+++ b/src/skore/ui/templates/share.html.jinja
@@ -9,8 +9,8 @@
:skore. {{ uri }}
-