Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow downloading more content from a webpage and index it #215 #265

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 25 additions & 6 deletions apps/web/app/api/assets/[assetId]/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,29 @@ export async function GET(
assetId: params.assetId,
});

return new Response(asset, {
status: 200,
headers: {
"Content-type": metadata.contentType,
},
});
const range = request.headers.get("Range");
if (range) {
const parts = range.replace(/bytes=/, "").split("-");
const start = parseInt(parts[0], 10);
const end = parts[1] ? parseInt(parts[1], 10) : asset.length - 1;

const chunk = asset.subarray(start, end + 1);
return new Response(chunk, {
status: 206, // Partial Content
headers: {
"Content-Range": `bytes ${start}-${end}/${asset.length}`,
"Accept-Ranges": "bytes",
"Content-Length": chunk.length.toString(),
"Content-type": metadata.contentType,
},
});
} else {
return new Response(asset, {
status: 200,
headers: {
"Content-Length": asset.length.toString(),
"Content-type": metadata.contentType,
},
});
}
}
19 changes: 19 additions & 0 deletions apps/web/components/dashboard/preview/LinkContentSection.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,20 @@ function CachedContentSection({ link }: { link: ZBookmarkedLink }) {
return <ScrollArea className="h-full">{content}</ScrollArea>;
}

function VideoSection({ link }: { link: ZBookmarkedLink }) {
return (
<div className="relative h-full w-full overflow-hidden">
<div className="absolute inset-0 h-full w-full">
{/* eslint-disable-next-line jsx-a11y/media-has-caption -- captions not (yet) available */}
<video className="m-auto max-h-full max-w-full" controls>
<source src={`/api/assets/${link.videoAssetId}`} />
Not supported by your browser
</video>
</div>
</div>
);
}

export default function LinkContentSection({
bookmark,
}: {
Expand All @@ -76,6 +90,8 @@ export default function LinkContentSection({
content = <CachedContentSection link={bookmark.content} />;
} else if (section === "archive") {
content = <FullPageArchiveSection link={bookmark.content} />;
} else if (section === "video") {
content = <VideoSection link={bookmark.content} />;
} else {
content = <ScreenshotSection link={bookmark.content} />;
}
Expand All @@ -101,6 +117,9 @@ export default function LinkContentSection({
>
Archive
</SelectItem>
<SelectItem value="video" disabled={!bookmark.content.videoAssetId}>
Video
</SelectItem>
</SelectGroup>
</SelectContent>
</Select>
Expand Down
94 changes: 17 additions & 77 deletions apps/workers/crawlerWorker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,23 +23,16 @@ import AdblockerPlugin from "puppeteer-extra-plugin-adblocker";
import StealthPlugin from "puppeteer-extra-plugin-stealth";
import { withTimeout } from "utils";

import type { ZCrawlLinkRequest } from "@hoarder/shared/queues";
import { db, HoarderDBTransaction } from "@hoarder/db";
import {
assets,
AssetTypes,
bookmarkAssets,
bookmarkLinks,
bookmarks,
} from "@hoarder/db/schema";
import { db } from "@hoarder/db";
import { bookmarkAssets, bookmarkLinks, bookmarks } from "@hoarder/db/schema";
import { DequeuedJob, Runner } from "@hoarder/queue";
import {
ASSET_TYPES,
deleteAsset,
IMAGE_ASSET_TYPES,
newAssetId,
saveAsset,
saveAssetFromFile,
silentDeleteAsset,
SUPPORTED_UPLOAD_ASSET_TYPES,
} from "@hoarder/shared/assetdb";
import serverConfig from "@hoarder/shared/config";
Expand All @@ -48,9 +41,14 @@ import {
LinkCrawlerQueue,
OpenAIQueue,
triggerSearchReindex,
triggerVideoWorker,
ZCrawlLinkRequest,
zCrawlLinkRequestSchema,
} from "@hoarder/shared/queues";
import { BookmarkTypes } from "@hoarder/shared/types/bookmarks";
import { DBAssetTypes } from "@hoarder/shared/utils/bookmarkUtils";

import { getBookmarkDetails, updateAsset } from "./workerUtils";

const metascraperParser = metascraper([
metascraperAmazon(),
Expand Down Expand Up @@ -198,33 +196,6 @@ async function changeBookmarkStatus(
.where(eq(bookmarkLinks.id, bookmarkId));
}

async function getBookmarkDetails(bookmarkId: string) {
const bookmark = await db.query.bookmarks.findFirst({
where: eq(bookmarks.id, bookmarkId),
with: {
link: true,
assets: true,
},
});

if (!bookmark || !bookmark.link) {
throw new Error("The bookmark either doesn't exist or not a link");
}
return {
url: bookmark.link.url,
userId: bookmark.userId,
screenshotAssetId: bookmark.assets.find(
(a) => a.assetType == AssetTypes.LINK_SCREENSHOT,
)?.id,
imageAssetId: bookmark.assets.find(
(a) => a.assetType == AssetTypes.LINK_BANNER_IMAGE,
)?.id,
fullPageArchiveAssetId: bookmark.assets.find(
(a) => a.assetType == AssetTypes.LINK_FULL_PAGE_ARCHIVE,
)?.id,
};
}

/**
* This provides some "basic" protection from malicious URLs. However, all of those
* can be easily circumvented by pointing dns of origin to localhost, or with
Expand Down Expand Up @@ -549,26 +520,22 @@ async function crawlAndParseUrl(
screenshotAssetId,
oldScreenshotAssetId,
bookmarkId,
AssetTypes.LINK_SCREENSHOT,
DBAssetTypes.LINK_SCREENSHOT,
txn,
);
await updateAsset(
imageAssetId,
oldImageAssetId,
bookmarkId,
AssetTypes.LINK_BANNER_IMAGE,
DBAssetTypes.LINK_BANNER_IMAGE,
txn,
);
});

// Delete the old assets if any
await Promise.all([
oldScreenshotAssetId
? deleteAsset({ userId, assetId: oldScreenshotAssetId }).catch(() => ({}))
: {},
oldImageAssetId
? deleteAsset({ userId, assetId: oldImageAssetId }).catch(() => ({}))
: {},
silentDeleteAsset(userId, oldScreenshotAssetId),
silentDeleteAsset(userId, oldImageAssetId),
]);

return async () => {
Expand All @@ -585,14 +552,12 @@ async function crawlAndParseUrl(
fullPageArchiveAssetId,
oldFullPageArchiveAssetId,
bookmarkId,
AssetTypes.LINK_FULL_PAGE_ARCHIVE,
DBAssetTypes.LINK_FULL_PAGE_ARCHIVE,
txn,
);
});
if (oldFullPageArchiveAssetId) {
await deleteAsset({ userId, assetId: oldFullPageArchiveAssetId }).catch(
() => ({}),
);
await silentDeleteAsset(userId, oldFullPageArchiveAssetId);
}
}
};
Expand Down Expand Up @@ -659,35 +624,10 @@ async function runCrawler(job: DequeuedJob<ZCrawlLinkRequest>) {
}

// Update the search index
triggerSearchReindex(bookmarkId);
await triggerSearchReindex(bookmarkId);
// Trigger a potential download of a video from the URL
await triggerVideoWorker(bookmarkId, url);

// Do the archival as a separate last step as it has the potential for failure
await archivalLogic();
}

/**
* Removes the old asset and adds a new one instead
* @param newAssetId the new assetId to add
* @param oldAssetId the old assetId to remove (if it exists)
* @param bookmarkId the id of the bookmark the asset belongs to
* @param assetType the type of the asset
* @param txn the transaction where this update should happen in
*/
async function updateAsset(
newAssetId: string | null,
oldAssetId: string | undefined,
bookmarkId: string,
assetType: AssetTypes,
txn: HoarderDBTransaction,
) {
if (newAssetId) {
if (oldAssetId) {
await txn.delete(assets).where(eq(assets.id, oldAssetId));
}
await txn.insert(assets).values({
id: newAssetId,
assetType,
bookmarkId,
});
}
}
6 changes: 4 additions & 2 deletions apps/workers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,21 @@ import { CrawlerWorker } from "./crawlerWorker";
import { shutdownPromise } from "./exit";
import { OpenAiWorker } from "./openaiWorker";
import { SearchIndexingWorker } from "./searchWorker";
import { VideoWorker } from "./videoWorker";

async function main() {
logger.info(`Workers version: ${serverConfig.serverVersion ?? "not set"}`);
runQueueDBMigrations();

const [crawler, openai, search] = [
const [crawler, openai, search, video] = [
await CrawlerWorker.build(),
OpenAiWorker.build(),
SearchIndexingWorker.build(),
await VideoWorker.build(),
];

await Promise.any([
Promise.all([crawler.run(), openai.run(), search.run()]),
Promise.all([crawler.run(), openai.run(), search.run(), video?.run()]),
shutdownPromise,
]);
}
Expand Down
2 changes: 1 addition & 1 deletion apps/workers/openaiWorker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -397,5 +397,5 @@ async function runOpenAI(job: DequeuedJob<ZOpenAIRequest>) {
await connectTags(bookmarkId, tags, bookmark.userId);

// Update the search index
triggerSearchReindex(bookmarkId);
await triggerSearchReindex(bookmarkId);
}
1 change: 1 addition & 0 deletions apps/workers/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
"puppeteer-extra-plugin-stealth": "^2.11.2",
"tsx": "^4.7.1",
"typescript": "^5.3.3",
"yt-dlp-wrap": "^2.3.12",
"zod": "^3.22.4"
},
"devDependencies": {
Expand Down
Loading
Loading