Skip to content

Commit

Permalink
Allow downloading more content from a webpage and index it hoarder-ap…
Browse files Browse the repository at this point in the history
…p#215

Added a worker that allows downloading videos depending on the environment variables
refactored the code a bit
added new video asset
updated documentation
  • Loading branch information
kamtschatka committed Jul 21, 2024
1 parent 73e8a12 commit a702812
Show file tree
Hide file tree
Showing 16 changed files with 446 additions and 81 deletions.
31 changes: 25 additions & 6 deletions apps/web/app/api/assets/[assetId]/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,29 @@ export async function GET(
assetId: params.assetId,
});

return new Response(asset, {
status: 200,
headers: {
"Content-type": metadata.contentType,
},
});
const range = request.headers.get("Range");
if (range) {
const parts = range.replace(/bytes=/, "").split("-");
const start = parseInt(parts[0], 10);
const end = parts[1] ? parseInt(parts[1], 10) : asset.length - 1;

const chunk = asset.subarray(start, end + 1);
return new Response(chunk, {
status: 206, // Partial Content
headers: {
"Content-Range": `bytes ${start}-${end}/${asset.length}`,
"Accept-Ranges": "bytes",
"Content-Length": chunk.length.toString(),
"Content-type": metadata.contentType,
},
});
} else {
return new Response(asset, {
status: 200,
headers: {
"Content-Length": asset.length.toString(),
"Content-type": metadata.contentType,
},
});
}
}
19 changes: 19 additions & 0 deletions apps/web/components/dashboard/preview/LinkContentSection.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,20 @@ function CachedContentSection({ link }: { link: ZBookmarkedLink }) {
return <ScrollArea className="h-full">{content}</ScrollArea>;
}

function VideoSection({ link }: { link: ZBookmarkedLink }) {
return (
<div className="relative h-full w-full overflow-hidden">
<div className="absolute inset-0 h-full w-full">
{/* eslint-disable-next-line jsx-a11y/media-has-caption -- captions not (yet) available */}
<video className="m-auto max-h-full max-w-full" controls>
<source src={`/api/assets/${link.videoAssetId}`} />
Not supported by your browser
</video>
</div>
</div>
);
}

export default function LinkContentSection({
bookmark,
}: {
Expand All @@ -76,6 +90,8 @@ export default function LinkContentSection({
content = <CachedContentSection link={bookmark.content} />;
} else if (section === "archive") {
content = <FullPageArchiveSection link={bookmark.content} />;
} else if (section === "video") {
content = <VideoSection link={bookmark.content} />;
} else {
content = <ScreenshotSection link={bookmark.content} />;
}
Expand All @@ -101,6 +117,9 @@ export default function LinkContentSection({
>
Archive
</SelectItem>
<SelectItem value="video" disabled={!bookmark.content.videoAssetId}>
Video
</SelectItem>
</SelectGroup>
</SelectContent>
</Select>
Expand Down
45 changes: 13 additions & 32 deletions apps/workers/crawlerWorker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@ import { Readability } from "@mozilla/readability";
import { Mutex } from "async-mutex";
import Database from "better-sqlite3";
import DOMPurify from "dompurify";
import { eq, ExtractTablesWithRelations } from "drizzle-orm";
import { SQLiteTransaction } from "drizzle-orm/sqlite-core";
import { eq } from "drizzle-orm";
import { execa } from "execa";
import { isShuttingDown } from "exit";
import { JSDOM } from "jsdom";
Expand Down Expand Up @@ -35,6 +34,8 @@ import {
bookmarks,
} from "@hoarder/db/schema";
import { DequeuedJob, Runner } from "@hoarder/queue";
import { db } from "@hoarder/db";
import { bookmarkAssets, bookmarkLinks, bookmarks } from "@hoarder/db/schema";
import {
ASSET_TYPES,
IMAGE_ASSET_TYPES,
Expand All @@ -50,9 +51,14 @@ import {
LinkCrawlerQueue,
OpenAIQueue,
triggerSearchReindex,
triggerVideoWorker,
ZCrawlLinkRequest,
zCrawlLinkRequestSchema,
} from "@hoarder/shared/queues";
import { BookmarkTypes } from "@hoarder/shared/types/bookmarks";
import { DBAssetTypes } from "@hoarder/shared/utils/bookmarkUtils";

import { getBookmarkDetails, updateAsset } from "./workerUtils";

const metascraperParser = metascraper([
metascraperAmazon(),
Expand Down Expand Up @@ -200,33 +206,6 @@ async function changeBookmarkStatus(
.where(eq(bookmarkLinks.id, bookmarkId));
}

async function getBookmarkDetails(bookmarkId: string) {
const bookmark = await db.query.bookmarks.findFirst({
where: eq(bookmarks.id, bookmarkId),
with: {
link: true,
assets: true,
},
});

if (!bookmark || !bookmark.link) {
throw new Error("The bookmark either doesn't exist or not a link");
}
return {
url: bookmark.link.url,
userId: bookmark.userId,
screenshotAssetId: bookmark.assets.find(
(a) => a.assetType == AssetTypes.LINK_SCREENSHOT,
)?.id,
imageAssetId: bookmark.assets.find(
(a) => a.assetType == AssetTypes.LINK_BANNER_IMAGE,
)?.id,
fullPageArchiveAssetId: bookmark.assets.find(
(a) => a.assetType == AssetTypes.LINK_FULL_PAGE_ARCHIVE,
)?.id,
};
}

/**
* This provides some "basic" protection from malicious URLs. However, all of those
* can be easily circumvented by pointing dns of origin to localhost, or with
Expand Down Expand Up @@ -551,14 +530,14 @@ async function crawlAndParseUrl(
screenshotAssetId,
oldScreenshotAssetId,
bookmarkId,
AssetTypes.LINK_SCREENSHOT,
DBAssetTypes.LINK_SCREENSHOT,
txn,
);
await updateAsset(
imageAssetId,
oldImageAssetId,
bookmarkId,
AssetTypes.LINK_BANNER_IMAGE,
DBAssetTypes.LINK_BANNER_IMAGE,
txn,
);
});
Expand All @@ -583,7 +562,7 @@ async function crawlAndParseUrl(
fullPageArchiveAssetId,
oldFullPageArchiveAssetId,
bookmarkId,
AssetTypes.LINK_FULL_PAGE_ARCHIVE,
DBAssetTypes.LINK_FULL_PAGE_ARCHIVE,
txn,
);
});
Expand Down Expand Up @@ -656,6 +635,8 @@ async function runCrawler(job: DequeuedJob<ZCrawlLinkRequest>) {

// Update the search index
await triggerSearchReindex(bookmarkId);
// Trigger a potential download of a video from the URL
await triggerVideoWorker(bookmarkId, url);

// Do the archival as a separate last step as it has the potential for failure
await archivalLogic();
Expand Down
6 changes: 4 additions & 2 deletions apps/workers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,21 @@ import { CrawlerWorker } from "./crawlerWorker";
import { shutdownPromise } from "./exit";
import { OpenAiWorker } from "./openaiWorker";
import { SearchIndexingWorker } from "./searchWorker";
import { VideoWorker } from "./videoWorker";

async function main() {
logger.info(`Workers version: ${serverConfig.serverVersion ?? "not set"}`);
runQueueDBMigrations();

const [crawler, openai, search] = [
const [crawler, openai, search, video] = [
await CrawlerWorker.build(),
OpenAiWorker.build(),
SearchIndexingWorker.build(),
await VideoWorker.build(),
];

await Promise.any([
Promise.all([crawler.run(), openai.run(), search.run()]),
Promise.all([crawler.run(), openai.run(), search.run(), video?.run()]),
shutdownPromise,
]);
}
Expand Down
1 change: 1 addition & 0 deletions apps/workers/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
"puppeteer-extra-plugin-stealth": "^2.11.2",
"tsx": "^4.7.1",
"typescript": "^5.3.3",
"yt-dlp-wrap": "^2.3.12",
"zod": "^3.22.4"
},
"devDependencies": {
Expand Down
Loading

0 comments on commit a702812

Please sign in to comment.