aboutsummaryrefslogtreecommitdiffstats
path: root/packages/trpc/routers
diff options
context:
space:
mode:
Diffstat (limited to 'packages/trpc/routers')
-rw-r--r--packages/trpc/routers/_app.ts2
-rw-r--r--packages/trpc/routers/admin.test.ts265
-rw-r--r--packages/trpc/routers/admin.ts248
-rw-r--r--packages/trpc/routers/apiKeys.ts5
-rw-r--r--packages/trpc/routers/assets.ts160
-rw-r--r--packages/trpc/routers/bookmarks.test.ts192
-rw-r--r--packages/trpc/routers/bookmarks.ts574
-rw-r--r--packages/trpc/routers/config.ts10
-rw-r--r--packages/trpc/routers/feeds.test.ts154
-rw-r--r--packages/trpc/routers/importSessions.test.ts233
-rw-r--r--packages/trpc/routers/importSessions.ts109
-rw-r--r--packages/trpc/routers/lists.test.ts382
-rw-r--r--packages/trpc/routers/lists.ts2
-rw-r--r--packages/trpc/routers/tags.ts1
-rw-r--r--packages/trpc/routers/users.test.ts111
-rw-r--r--packages/trpc/routers/users.ts46
-rw-r--r--packages/trpc/routers/webhooks.test.ts22
17 files changed, 2048 insertions, 468 deletions
diff --git a/packages/trpc/routers/_app.ts b/packages/trpc/routers/_app.ts
index bae69130..9e20bb7e 100644
--- a/packages/trpc/routers/_app.ts
+++ b/packages/trpc/routers/_app.ts
@@ -4,6 +4,7 @@ import { apiKeysAppRouter } from "./apiKeys";
import { assetsAppRouter } from "./assets";
import { backupsAppRouter } from "./backups";
import { bookmarksAppRouter } from "./bookmarks";
+import { configAppRouter } from "./config";
import { feedsAppRouter } from "./feeds";
import { highlightsAppRouter } from "./highlights";
import { importSessionsRouter } from "./importSessions";
@@ -35,6 +36,7 @@ export const appRouter = router({
invites: invitesAppRouter,
publicBookmarks: publicBookmarks,
subscriptions: subscriptionsRouter,
+ config: configAppRouter,
});
// export type definition of API
export type AppRouter = typeof appRouter;
diff --git a/packages/trpc/routers/admin.test.ts b/packages/trpc/routers/admin.test.ts
new file mode 100644
index 00000000..2f80d9c0
--- /dev/null
+++ b/packages/trpc/routers/admin.test.ts
@@ -0,0 +1,265 @@
+import { eq } from "drizzle-orm";
+import { assert, beforeEach, describe, expect, test } from "vitest";
+
+import { bookmarkLinks, users } from "@karakeep/db/schema";
+import { BookmarkTypes } from "@karakeep/shared/types/bookmarks";
+
+import type { CustomTestContext } from "../testUtils";
+import { buildTestContext, getApiCaller } from "../testUtils";
+
+beforeEach<CustomTestContext>(async (context) => {
+ const testContext = await buildTestContext(true);
+ Object.assign(context, testContext);
+});
+
+describe("Admin Routes", () => {
+ describe("getBookmarkDebugInfo", () => {
+ test<CustomTestContext>("admin can access bookmark debug info for link bookmark", async ({
+ apiCallers,
+ db,
+ }) => {
+ // Create an admin user
+ const adminUser = await db
+ .insert(users)
+ .values({
+ name: "Admin User",
+ email: "admin@test.com",
+ role: "admin",
+ })
+ .returning();
+ const adminApi = getApiCaller(
+ db,
+ adminUser[0].id,
+ adminUser[0].email,
+ "admin",
+ );
+
+ // Create a bookmark as a regular user
+ const bookmark = await apiCallers[0].bookmarks.createBookmark({
+ url: "https://example.com",
+ type: BookmarkTypes.LINK,
+ });
+
+ // Update the bookmark link with some metadata
+ await db
+ .update(bookmarkLinks)
+ .set({
+ crawlStatus: "success",
+ crawlStatusCode: 200,
+ crawledAt: new Date(),
+ htmlContent: "<html><body>Test content</body></html>",
+ title: "Test Title",
+ description: "Test Description",
+ })
+ .where(eq(bookmarkLinks.id, bookmark.id));
+
+ // Admin should be able to access debug info
+ const debugInfo = await adminApi.admin.getBookmarkDebugInfo({
+ bookmarkId: bookmark.id,
+ });
+
+ expect(debugInfo.id).toEqual(bookmark.id);
+ expect(debugInfo.type).toEqual(BookmarkTypes.LINK);
+ expect(debugInfo.linkInfo).toBeDefined();
+ assert(debugInfo.linkInfo);
+ expect(debugInfo.linkInfo.url).toEqual("https://example.com");
+ expect(debugInfo.linkInfo.crawlStatus).toEqual("success");
+ expect(debugInfo.linkInfo.crawlStatusCode).toEqual(200);
+ expect(debugInfo.linkInfo.hasHtmlContent).toEqual(true);
+ expect(debugInfo.linkInfo.htmlContentPreview).toBeDefined();
+ expect(debugInfo.linkInfo.htmlContentPreview).toContain("Test content");
+ });
+
+ test<CustomTestContext>("admin can access bookmark debug info for text bookmark", async ({
+ apiCallers,
+ db,
+ }) => {
+ // Create an admin user
+ const adminUser = await db
+ .insert(users)
+ .values({
+ name: "Admin User",
+ email: "admin@test.com",
+ role: "admin",
+ })
+ .returning();
+ const adminApi = getApiCaller(
+ db,
+ adminUser[0].id,
+ adminUser[0].email,
+ "admin",
+ );
+
+ // Create a text bookmark
+ const bookmark = await apiCallers[0].bookmarks.createBookmark({
+ text: "This is a test text bookmark",
+ type: BookmarkTypes.TEXT,
+ });
+
+ // Admin should be able to access debug info
+ const debugInfo = await adminApi.admin.getBookmarkDebugInfo({
+ bookmarkId: bookmark.id,
+ });
+
+ expect(debugInfo.id).toEqual(bookmark.id);
+ expect(debugInfo.type).toEqual(BookmarkTypes.TEXT);
+ expect(debugInfo.textInfo).toBeDefined();
+ assert(debugInfo.textInfo);
+ expect(debugInfo.textInfo.hasText).toEqual(true);
+ });
+
+ test<CustomTestContext>("admin can see bookmark tags in debug info", async ({
+ apiCallers,
+ db,
+ }) => {
+ // Create an admin user
+ const adminUser = await db
+ .insert(users)
+ .values({
+ name: "Admin User",
+ email: "admin@test.com",
+ role: "admin",
+ })
+ .returning();
+ const adminApi = getApiCaller(
+ db,
+ adminUser[0].id,
+ adminUser[0].email,
+ "admin",
+ );
+
+ // Create a bookmark with tags
+ const bookmark = await apiCallers[0].bookmarks.createBookmark({
+ url: "https://example.com",
+ type: BookmarkTypes.LINK,
+ });
+
+ // Add tags to the bookmark
+ await apiCallers[0].bookmarks.updateTags({
+ bookmarkId: bookmark.id,
+ attach: [{ tagName: "test-tag-1" }, { tagName: "test-tag-2" }],
+ detach: [],
+ });
+
+ // Admin should be able to see tags in debug info
+ const debugInfo = await adminApi.admin.getBookmarkDebugInfo({
+ bookmarkId: bookmark.id,
+ });
+
+ expect(debugInfo.tags).toHaveLength(2);
+ expect(debugInfo.tags.map((t) => t.name).sort()).toEqual([
+ "test-tag-1",
+ "test-tag-2",
+ ]);
+ expect(debugInfo.tags[0].attachedBy).toEqual("human");
+ });
+
+ test<CustomTestContext>("non-admin user cannot access bookmark debug info", async ({
+ apiCallers,
+ }) => {
+ // Create a bookmark
+ const bookmark = await apiCallers[0].bookmarks.createBookmark({
+ url: "https://example.com",
+ type: BookmarkTypes.LINK,
+ });
+
+ // Non-admin user should not be able to access debug info
+ // The admin procedure itself will throw FORBIDDEN
+ await expect(() =>
+ apiCallers[0].admin.getBookmarkDebugInfo({ bookmarkId: bookmark.id }),
+ ).rejects.toThrow(/FORBIDDEN/);
+ });
+
+ test<CustomTestContext>("debug info includes asset URLs with signed tokens", async ({
+ apiCallers,
+ db,
+ }) => {
+ // Create an admin user
+ const adminUser = await db
+ .insert(users)
+ .values({
+ name: "Admin User",
+ email: "admin@test.com",
+ role: "admin",
+ })
+ .returning();
+ const adminApi = getApiCaller(
+ db,
+ adminUser[0].id,
+ adminUser[0].email,
+ "admin",
+ );
+
+ // Create a bookmark
+ const bookmark = await apiCallers[0].bookmarks.createBookmark({
+ url: "https://example.com",
+ type: BookmarkTypes.LINK,
+ });
+
+ // Get debug info
+ const debugInfo = await adminApi.admin.getBookmarkDebugInfo({
+ bookmarkId: bookmark.id,
+ });
+
+ // Check that assets array is present
+ expect(debugInfo.assets).toBeDefined();
+ expect(Array.isArray(debugInfo.assets)).toBe(true);
+
+ // If there are assets, check that they have signed URLs
+ if (debugInfo.assets.length > 0) {
+ const asset = debugInfo.assets[0];
+ expect(asset.url).toBeDefined();
+ expect(asset.url).toContain("/api/public/assets/");
+ expect(asset.url).toContain("token=");
+ }
+ });
+
+ test<CustomTestContext>("debug info truncates HTML content preview", async ({
+ apiCallers,
+ db,
+ }) => {
+ // Create an admin user
+ const adminUser = await db
+ .insert(users)
+ .values({
+ name: "Admin User",
+ email: "admin@test.com",
+ role: "admin",
+ })
+ .returning();
+ const adminApi = getApiCaller(
+ db,
+ adminUser[0].id,
+ adminUser[0].email,
+ "admin",
+ );
+
+ // Create a bookmark
+ const bookmark = await apiCallers[0].bookmarks.createBookmark({
+ url: "https://example.com",
+ type: BookmarkTypes.LINK,
+ });
+
+ // Create a large HTML content
+ const largeContent = "<html><body>" + "x".repeat(2000) + "</body></html>";
+ await db
+ .update(bookmarkLinks)
+ .set({
+ htmlContent: largeContent,
+ })
+ .where(eq(bookmarkLinks.id, bookmark.id));
+
+ // Get debug info
+ const debugInfo = await adminApi.admin.getBookmarkDebugInfo({
+ bookmarkId: bookmark.id,
+ });
+
+ // Check that HTML preview is truncated to 1000 characters
+ assert(debugInfo.linkInfo);
+ expect(debugInfo.linkInfo.htmlContentPreview).toBeDefined();
+ expect(debugInfo.linkInfo.htmlContentPreview!.length).toBeLessThanOrEqual(
+ 1000,
+ );
+ });
+ });
+});
diff --git a/packages/trpc/routers/admin.ts b/packages/trpc/routers/admin.ts
index 463d2ddf..f64e071a 100644
--- a/packages/trpc/routers/admin.ts
+++ b/packages/trpc/routers/admin.ts
@@ -9,7 +9,9 @@ import {
AssetPreprocessingQueue,
FeedQueue,
LinkCrawlerQueue,
+ LowPriorityCrawlerQueue,
OpenAIQueue,
+ QueuePriority,
SearchIndexingQueue,
triggerSearchReindex,
VideoWorkerQueue,
@@ -17,6 +19,7 @@ import {
zAdminMaintenanceTaskSchema,
} from "@karakeep/shared-server";
import serverConfig from "@karakeep/shared/config";
+import logger from "@karakeep/shared/logger";
import { PluginManager, PluginType } from "@karakeep/shared/plugins";
import { getSearchClient } from "@karakeep/shared/search";
import {
@@ -24,9 +27,11 @@ import {
updateUserSchema,
zAdminCreateUserSchema,
} from "@karakeep/shared/types/admin";
+import { BookmarkTypes } from "@karakeep/shared/types/bookmarks";
import { generatePasswordSalt, hashPassword } from "../auth";
import { adminProcedure, router } from "../index";
+import { Bookmark } from "../models/bookmarks";
import { User } from "../models/users";
export const adminAppRouter = router({
@@ -86,6 +91,7 @@ export const adminAppRouter = router({
const [
// Crawls
queuedCrawls,
+ queuedLowPriorityCrawls,
[{ value: pendingCrawls }],
[{ value: failedCrawls }],
@@ -114,6 +120,7 @@ export const adminAppRouter = router({
] = await Promise.all([
// Crawls
LinkCrawlerQueue.stats(),
+ LowPriorityCrawlerQueue.stats(),
ctx.db
.select({ value: count() })
.from(bookmarkLinks)
@@ -165,7 +172,11 @@ export const adminAppRouter = router({
return {
crawlStats: {
- queued: queuedCrawls.pending + queuedCrawls.pending_retry,
+ queued:
+ queuedCrawls.pending +
+ queuedCrawls.pending_retry +
+ queuedLowPriorityCrawls.pending +
+ queuedLowPriorityCrawls.pending_retry,
pending: pendingCrawls,
failed: failedCrawls,
},
@@ -201,7 +212,7 @@ export const adminAppRouter = router({
recrawlLinks: adminProcedure
.input(
z.object({
- crawlStatus: z.enum(["success", "failure", "all"]),
+ crawlStatus: z.enum(["success", "failure", "pending", "all"]),
runInference: z.boolean(),
}),
)
@@ -217,10 +228,15 @@ export const adminAppRouter = router({
await Promise.all(
bookmarkIds.map((b) =>
- LinkCrawlerQueue.enqueue({
- bookmarkId: b.id,
- runInference: input.runInference,
- }),
+ LowPriorityCrawlerQueue.enqueue(
+ {
+ bookmarkId: b.id,
+ runInference: input.runInference,
+ },
+ {
+ priority: QueuePriority.Low,
+ },
+ ),
),
);
}),
@@ -233,7 +249,13 @@ export const adminAppRouter = router({
},
});
- await Promise.all(bookmarkIds.map((b) => triggerSearchReindex(b.id)));
+ await Promise.all(
+ bookmarkIds.map((b) =>
+ triggerSearchReindex(b.id, {
+ priority: QueuePriority.Low,
+ }),
+ ),
+ );
}),
reprocessAssetsFixMode: adminProcedure.mutation(async ({ ctx }) => {
const bookmarkIds = await ctx.db.query.bookmarkAssets.findMany({
@@ -244,10 +266,15 @@ export const adminAppRouter = router({
await Promise.all(
bookmarkIds.map((b) =>
- AssetPreprocessingQueue.enqueue({
- bookmarkId: b.id,
- fixMode: true,
- }),
+ AssetPreprocessingQueue.enqueue(
+ {
+ bookmarkId: b.id,
+ fixMode: true,
+ },
+ {
+ priority: QueuePriority.Low,
+ },
+ ),
),
);
}),
@@ -255,7 +282,7 @@ export const adminAppRouter = router({
.input(
z.object({
type: z.enum(["tag", "summarize"]),
- status: z.enum(["success", "failure", "all"]),
+ status: z.enum(["success", "failure", "pending", "all"]),
}),
)
.mutation(async ({ input, ctx }) => {
@@ -277,7 +304,12 @@ export const adminAppRouter = router({
await Promise.all(
bookmarkIds.map((b) =>
- OpenAIQueue.enqueue({ bookmarkId: b.id, type: input.type }),
+ OpenAIQueue.enqueue(
+ { bookmarkId: b.id, type: input.type },
+ {
+ priority: QueuePriority.Low,
+ },
+ ),
),
);
}),
@@ -537,4 +569,194 @@ export const adminAppRouter = router({
queue: queueStatus,
};
}),
+ getBookmarkDebugInfo: adminProcedure
+ .input(z.object({ bookmarkId: z.string() }))
+ .output(
+ z.object({
+ id: z.string(),
+ type: z.enum([
+ BookmarkTypes.LINK,
+ BookmarkTypes.TEXT,
+ BookmarkTypes.ASSET,
+ ]),
+ source: z
+ .enum([
+ "api",
+ "web",
+ "extension",
+ "cli",
+ "mobile",
+ "singlefile",
+ "rss",
+ "import",
+ ])
+ .nullable(),
+ createdAt: z.date(),
+ modifiedAt: z.date().nullable(),
+ title: z.string().nullable(),
+ summary: z.string().nullable(),
+ taggingStatus: z.enum(["pending", "failure", "success"]).nullable(),
+ summarizationStatus: z
+ .enum(["pending", "failure", "success"])
+ .nullable(),
+ userId: z.string(),
+ linkInfo: z
+ .object({
+ url: z.string(),
+ crawlStatus: z.enum(["pending", "failure", "success"]),
+ crawlStatusCode: z.number().nullable(),
+ crawledAt: z.date().nullable(),
+ hasHtmlContent: z.boolean(),
+ hasContentAsset: z.boolean(),
+ htmlContentPreview: z.string().nullable(),
+ })
+ .nullable(),
+ textInfo: z
+ .object({
+ hasText: z.boolean(),
+ sourceUrl: z.string().nullable(),
+ })
+ .nullable(),
+ assetInfo: z
+ .object({
+ assetType: z.enum(["image", "pdf"]),
+ hasContent: z.boolean(),
+ fileName: z.string().nullable(),
+ })
+ .nullable(),
+ tags: z.array(
+ z.object({
+ id: z.string(),
+ name: z.string(),
+ attachedBy: z.enum(["ai", "human"]),
+ }),
+ ),
+ assets: z.array(
+ z.object({
+ id: z.string(),
+ assetType: z.string(),
+ size: z.number(),
+ url: z.string().nullable(),
+ }),
+ ),
+ }),
+ )
+ .query(async ({ input, ctx }) => {
+ logger.info(
+ `[admin] Admin ${ctx.user.id} accessed debug info for bookmark ${input.bookmarkId}`,
+ );
+
+ return await Bookmark.buildDebugInfo(ctx, input.bookmarkId);
+ }),
+ adminRecrawlBookmark: adminProcedure
+ .input(z.object({ bookmarkId: z.string() }))
+ .mutation(async ({ input, ctx }) => {
+ // Verify bookmark exists and is a link
+ const bookmark = await ctx.db.query.bookmarks.findFirst({
+ where: eq(bookmarks.id, input.bookmarkId),
+ });
+
+ if (!bookmark) {
+ throw new TRPCError({
+ code: "NOT_FOUND",
+ message: "Bookmark not found",
+ });
+ }
+
+ if (bookmark.type !== BookmarkTypes.LINK) {
+ throw new TRPCError({
+ code: "BAD_REQUEST",
+ message: "Only link bookmarks can be recrawled",
+ });
+ }
+
+ await LowPriorityCrawlerQueue.enqueue(
+ {
+ bookmarkId: input.bookmarkId,
+ },
+ {
+ priority: QueuePriority.Low,
+ groupId: "admin",
+ },
+ );
+ }),
+ adminReindexBookmark: adminProcedure
+ .input(z.object({ bookmarkId: z.string() }))
+ .mutation(async ({ input, ctx }) => {
+ // Verify bookmark exists
+ const bookmark = await ctx.db.query.bookmarks.findFirst({
+ where: eq(bookmarks.id, input.bookmarkId),
+ });
+
+ if (!bookmark) {
+ throw new TRPCError({
+ code: "NOT_FOUND",
+ message: "Bookmark not found",
+ });
+ }
+
+ await triggerSearchReindex(input.bookmarkId, {
+ priority: QueuePriority.Low,
+ groupId: "admin",
+ });
+ }),
+ adminRetagBookmark: adminProcedure
+ .input(z.object({ bookmarkId: z.string() }))
+ .mutation(async ({ input, ctx }) => {
+ // Verify bookmark exists
+ const bookmark = await ctx.db.query.bookmarks.findFirst({
+ where: eq(bookmarks.id, input.bookmarkId),
+ });
+
+ if (!bookmark) {
+ throw new TRPCError({
+ code: "NOT_FOUND",
+ message: "Bookmark not found",
+ });
+ }
+
+ await OpenAIQueue.enqueue(
+ {
+ bookmarkId: input.bookmarkId,
+ type: "tag",
+ },
+ {
+ priority: QueuePriority.Low,
+ groupId: "admin",
+ },
+ );
+ }),
+ adminResummarizeBookmark: adminProcedure
+ .input(z.object({ bookmarkId: z.string() }))
+ .mutation(async ({ input, ctx }) => {
+ // Verify bookmark exists and is a link
+ const bookmark = await ctx.db.query.bookmarks.findFirst({
+ where: eq(bookmarks.id, input.bookmarkId),
+ });
+
+ if (!bookmark) {
+ throw new TRPCError({
+ code: "NOT_FOUND",
+ message: "Bookmark not found",
+ });
+ }
+
+ if (bookmark.type !== BookmarkTypes.LINK) {
+ throw new TRPCError({
+ code: "BAD_REQUEST",
+ message: "Only link bookmarks can be summarized",
+ });
+ }
+
+ await OpenAIQueue.enqueue(
+ {
+ bookmarkId: input.bookmarkId,
+ type: "summarize",
+ },
+ {
+ priority: QueuePriority.Low,
+ groupId: "admin",
+ },
+ );
+ }),
});
diff --git a/packages/trpc/routers/apiKeys.ts b/packages/trpc/routers/apiKeys.ts
index 763bc23a..90de824a 100644
--- a/packages/trpc/routers/apiKeys.ts
+++ b/packages/trpc/routers/apiKeys.ts
@@ -1,5 +1,5 @@
import { TRPCError } from "@trpc/server";
-import { and, eq } from "drizzle-orm";
+import { and, desc, eq } from "drizzle-orm";
import { z } from "zod";
import { apiKeys } from "@karakeep/db/schema";
@@ -83,6 +83,7 @@ export const apiKeysAppRouter = router({
name: z.string(),
createdAt: z.date(),
keyId: z.string(),
+ lastUsedAt: z.date().nullish(),
}),
),
}),
@@ -94,8 +95,10 @@ export const apiKeysAppRouter = router({
id: true,
name: true,
createdAt: true,
+ lastUsedAt: true,
keyId: true,
},
+ orderBy: desc(apiKeys.createdAt),
});
return { keys: resp };
}),
diff --git a/packages/trpc/routers/assets.ts b/packages/trpc/routers/assets.ts
index 7be85446..c75f1e2e 100644
--- a/packages/trpc/routers/assets.ts
+++ b/packages/trpc/routers/assets.ts
@@ -1,57 +1,20 @@
-import { TRPCError } from "@trpc/server";
-import { and, desc, eq, sql } from "drizzle-orm";
import { z } from "zod";
-import { assets, bookmarks } from "@karakeep/db/schema";
-import { deleteAsset } from "@karakeep/shared/assetdb";
import {
zAssetSchema,
zAssetTypesSchema,
} from "@karakeep/shared/types/bookmarks";
-import { authedProcedure, Context, router } from "../index";
-import {
- isAllowedToAttachAsset,
- isAllowedToDetachAsset,
- mapDBAssetTypeToUserType,
- mapSchemaAssetTypeToDB,
-} from "../lib/attachments";
+import { authedProcedure, router } from "../index";
+import { Asset } from "../models/assets";
import { ensureBookmarkOwnership } from "./bookmarks";
-export const ensureAssetOwnership = async (opts: {
- ctx: Context;
- assetId: string;
-}) => {
- const asset = await opts.ctx.db.query.assets.findFirst({
- where: eq(bookmarks.id, opts.assetId),
- });
- if (!opts.ctx.user) {
- throw new TRPCError({
- code: "UNAUTHORIZED",
- message: "User is not authorized",
- });
- }
- if (!asset) {
- throw new TRPCError({
- code: "NOT_FOUND",
- message: "Asset not found",
- });
- }
- if (asset.userId != opts.ctx.user.id) {
- throw new TRPCError({
- code: "FORBIDDEN",
- message: "User is not allowed to access resource",
- });
- }
- return asset;
-};
-
export const assetsAppRouter = router({
list: authedProcedure
.input(
z.object({
limit: z.number().min(1).max(100).default(20),
- cursor: z.number().nullish(), // page number
+ cursor: z.number().nullish(),
}),
)
.output(
@@ -71,29 +34,10 @@ export const assetsAppRouter = router({
}),
)
.query(async ({ input, ctx }) => {
- const page = input.cursor ?? 1;
- const [results, totalCount] = await Promise.all([
- ctx.db
- .select()
- .from(assets)
- .where(eq(assets.userId, ctx.user.id))
- .orderBy(desc(assets.size))
- .limit(input.limit)
- .offset((page - 1) * input.limit),
- ctx.db
- .select({ count: sql<number>`count(*)` })
- .from(assets)
- .where(eq(assets.userId, ctx.user.id)),
- ]);
-
- return {
- assets: results.map((a) => ({
- ...a,
- assetType: mapDBAssetTypeToUserType(a.assetType),
- })),
- nextCursor: page * input.limit < totalCount[0].count ? page + 1 : null,
- totalCount: totalCount[0].count,
- };
+ return await Asset.list(ctx, {
+ limit: input.limit,
+ cursor: input.cursor ?? null,
+ });
}),
attachAsset: authedProcedure
.input(
@@ -108,29 +52,7 @@ export const assetsAppRouter = router({
.output(zAssetSchema)
.use(ensureBookmarkOwnership)
.mutation(async ({ input, ctx }) => {
- await ensureAssetOwnership({ ctx, assetId: input.asset.id });
- if (!isAllowedToAttachAsset(input.asset.assetType)) {
- throw new TRPCError({
- code: "BAD_REQUEST",
- message: "You can't attach this type of asset",
- });
- }
- const [updatedAsset] = await ctx.db
- .update(assets)
- .set({
- assetType: mapSchemaAssetTypeToDB(input.asset.assetType),
- bookmarkId: input.bookmarkId,
- })
- .where(
- and(eq(assets.id, input.asset.id), eq(assets.userId, ctx.user.id)),
- )
- .returning();
-
- return {
- id: updatedAsset.id,
- assetType: mapDBAssetTypeToUserType(updatedAsset.assetType),
- fileName: updatedAsset.fileName,
- };
+ return await Asset.attachAsset(ctx, input);
}),
replaceAsset: authedProcedure
.input(
@@ -143,41 +65,7 @@ export const assetsAppRouter = router({
.output(z.void())
.use(ensureBookmarkOwnership)
.mutation(async ({ input, ctx }) => {
- await Promise.all([
- ensureAssetOwnership({ ctx, assetId: input.oldAssetId }),
- ensureAssetOwnership({ ctx, assetId: input.newAssetId }),
- ]);
- const [oldAsset] = await ctx.db
- .select()
- .from(assets)
- .where(
- and(eq(assets.id, input.oldAssetId), eq(assets.userId, ctx.user.id)),
- )
- .limit(1);
- if (
- !isAllowedToAttachAsset(mapDBAssetTypeToUserType(oldAsset.assetType))
- ) {
- throw new TRPCError({
- code: "BAD_REQUEST",
- message: "You can't attach this type of asset",
- });
- }
-
- await ctx.db.transaction(async (tx) => {
- await tx.delete(assets).where(eq(assets.id, input.oldAssetId));
- await tx
- .update(assets)
- .set({
- bookmarkId: input.bookmarkId,
- assetType: oldAsset.assetType,
- })
- .where(eq(assets.id, input.newAssetId));
- });
-
- await deleteAsset({
- userId: ctx.user.id,
- assetId: input.oldAssetId,
- }).catch(() => ({}));
+ await Asset.replaceAsset(ctx, input);
}),
detachAsset: authedProcedure
.input(
@@ -189,34 +77,6 @@ export const assetsAppRouter = router({
.output(z.void())
.use(ensureBookmarkOwnership)
.mutation(async ({ input, ctx }) => {
- await ensureAssetOwnership({ ctx, assetId: input.assetId });
- const [oldAsset] = await ctx.db
- .select()
- .from(assets)
- .where(
- and(eq(assets.id, input.assetId), eq(assets.userId, ctx.user.id)),
- );
- if (
- !isAllowedToDetachAsset(mapDBAssetTypeToUserType(oldAsset.assetType))
- ) {
- throw new TRPCError({
- code: "BAD_REQUEST",
- message: "You can't deattach this type of asset",
- });
- }
- const result = await ctx.db
- .delete(assets)
- .where(
- and(
- eq(assets.id, input.assetId),
- eq(assets.bookmarkId, input.bookmarkId),
- ),
- );
- if (result.changes == 0) {
- throw new TRPCError({ code: "NOT_FOUND" });
- }
- await deleteAsset({ userId: ctx.user.id, assetId: input.assetId }).catch(
- () => ({}),
- );
+ await Asset.detachAsset(ctx, input);
}),
});
diff --git a/packages/trpc/routers/bookmarks.test.ts b/packages/trpc/routers/bookmarks.test.ts
index c272e015..aaee5447 100644
--- a/packages/trpc/routers/bookmarks.test.ts
+++ b/packages/trpc/routers/bookmarks.test.ts
@@ -331,6 +331,198 @@ describe("Bookmark Routes", () => {
).rejects.toThrow(/You must provide either a tagId or a tagName/);
});
+ test<CustomTestContext>("update tags - comprehensive edge cases", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0].bookmarks;
+
+ // Create two bookmarks
+ const bookmark1 = await api.createBookmark({
+ url: "https://bookmark1.com",
+ type: BookmarkTypes.LINK,
+ });
+ const bookmark2 = await api.createBookmark({
+ url: "https://bookmark2.com",
+ type: BookmarkTypes.LINK,
+ });
+
+ // Test 1: Attach tags by name to bookmark1 (creates new tags)
+ await api.updateTags({
+ bookmarkId: bookmark1.id,
+ attach: [{ tagName: "existing-tag" }, { tagName: "shared-tag" }],
+ detach: [],
+ });
+
+ let b1 = await api.getBookmark({ bookmarkId: bookmark1.id });
+ expect(b1.tags.map((t) => t.name).sort()).toEqual([
+ "existing-tag",
+ "shared-tag",
+ ]);
+
+ const existingTagId = b1.tags.find((t) => t.name === "existing-tag")!.id;
+ const sharedTagId = b1.tags.find((t) => t.name === "shared-tag")!.id;
+
+ // Test 2: Attach existing tag by ID to bookmark2 (tag already exists in DB from bookmark1)
+ await api.updateTags({
+ bookmarkId: bookmark2.id,
+ attach: [{ tagId: existingTagId }],
+ detach: [],
+ });
+
+ let b2 = await api.getBookmark({ bookmarkId: bookmark2.id });
+ expect(b2.tags.map((t) => t.name)).toEqual(["existing-tag"]);
+
+ // Test 3: Attach existing tag by NAME to bookmark2 (tag already exists in DB)
+ await api.updateTags({
+ bookmarkId: bookmark2.id,
+ attach: [{ tagName: "shared-tag" }],
+ detach: [],
+ });
+
+ b2 = await api.getBookmark({ bookmarkId: bookmark2.id });
+ expect(b2.tags.map((t) => t.name).sort()).toEqual([
+ "existing-tag",
+ "shared-tag",
+ ]);
+
+ // Test 4: Re-attaching the same tag (idempotency) - should be no-op
+ await api.updateTags({
+ bookmarkId: bookmark2.id,
+ attach: [{ tagId: existingTagId }],
+ detach: [],
+ });
+
+ b2 = await api.getBookmark({ bookmarkId: bookmark2.id });
+ expect(b2.tags.map((t) => t.name).sort()).toEqual([
+ "existing-tag",
+ "shared-tag",
+ ]);
+
+ // Test 5: Detach non-existent tag by name (should be no-op)
+ await api.updateTags({
+ bookmarkId: bookmark2.id,
+ attach: [],
+ detach: [{ tagName: "non-existent-tag" }],
+ });
+
+ b2 = await api.getBookmark({ bookmarkId: bookmark2.id });
+ expect(b2.tags.map((t) => t.name).sort()).toEqual([
+ "existing-tag",
+ "shared-tag",
+ ]);
+
+ // Test 6: Mixed attach/detach with pre-existing tags
+ await api.updateTags({
+ bookmarkId: bookmark2.id,
+ attach: [{ tagName: "new-tag" }, { tagId: sharedTagId }], // sharedTagId already attached
+ detach: [{ tagName: "existing-tag" }],
+ });
+
+ b2 = await api.getBookmark({ bookmarkId: bookmark2.id });
+ expect(b2.tags.map((t) => t.name).sort()).toEqual([
+ "new-tag",
+ "shared-tag",
+ ]);
+
+ // Test 7: Detach by ID and re-attach by name in same operation
+ await api.updateTags({
+ bookmarkId: bookmark2.id,
+ attach: [{ tagName: "new-tag" }], // Already exists, should be idempotent
+ detach: [{ tagId: sharedTagId }],
+ });
+
+ b2 = await api.getBookmark({ bookmarkId: bookmark2.id });
+ expect(b2.tags.map((t) => t.name).sort()).toEqual(["new-tag"]);
+
+ // Verify bookmark1 still has its original tags (operations on bookmark2 didn't affect it)
+ b1 = await api.getBookmark({ bookmarkId: bookmark1.id });
+ expect(b1.tags.map((t) => t.name).sort()).toEqual([
+ "existing-tag",
+ "shared-tag",
+ ]);
+
+ // Test 8: Attach same tag multiple times in one operation (deduplication)
+ await api.updateTags({
+ bookmarkId: bookmark1.id,
+ attach: [{ tagName: "duplicate-test" }, { tagName: "duplicate-test" }],
+ detach: [],
+ });
+
+ b1 = await api.getBookmark({ bookmarkId: bookmark1.id });
+ const duplicateTagCount = b1.tags.filter(
+ (t) => t.name === "duplicate-test",
+ ).length;
+ expect(duplicateTagCount).toEqual(1); // Should only be attached once
+ });
+
+ test<CustomTestContext>("updateTags with attachedBy field", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0].bookmarks;
+ const bookmark = await api.createBookmark({
+ url: "https://bookmark.com",
+ type: BookmarkTypes.LINK,
+ });
+
+ // Test 1: Attach tags with different attachedBy values
+ await api.updateTags({
+ bookmarkId: bookmark.id,
+ attach: [
+ { tagName: "ai-tag", attachedBy: "ai" },
+ { tagName: "human-tag", attachedBy: "human" },
+ { tagName: "default-tag" }, // Should default to "human"
+ ],
+ detach: [],
+ });
+
+ let b = await api.getBookmark({ bookmarkId: bookmark.id });
+ expect(b.tags.length).toEqual(3);
+
+ const aiTag = b.tags.find((t) => t.name === "ai-tag");
+ const humanTag = b.tags.find((t) => t.name === "human-tag");
+ const defaultTag = b.tags.find((t) => t.name === "default-tag");
+
+ expect(aiTag?.attachedBy).toEqual("ai");
+ expect(humanTag?.attachedBy).toEqual("human");
+ expect(defaultTag?.attachedBy).toEqual("human");
+
+ // Test 2: Attach existing tag by ID with different attachedBy
+ // First detach the ai-tag
+ await api.updateTags({
+ bookmarkId: bookmark.id,
+ attach: [],
+ detach: [{ tagId: aiTag!.id }],
+ });
+
+ // Re-attach the same tag but as human
+ await api.updateTags({
+ bookmarkId: bookmark.id,
+ attach: [{ tagId: aiTag!.id, attachedBy: "human" }],
+ detach: [],
+ });
+
+ b = await api.getBookmark({ bookmarkId: bookmark.id });
+ const reAttachedTag = b.tags.find((t) => t.id === aiTag!.id);
+ expect(reAttachedTag?.attachedBy).toEqual("human");
+
+ // Test 3: Attach existing tag by name with AI attachedBy
+ const bookmark2 = await api.createBookmark({
+ url: "https://bookmark2.com",
+ type: BookmarkTypes.LINK,
+ });
+
+ await api.updateTags({
+ bookmarkId: bookmark2.id,
+ attach: [{ tagName: "ai-tag", attachedBy: "ai" }],
+ detach: [],
+ });
+
+ const b2 = await api.getBookmark({ bookmarkId: bookmark2.id });
+ const aiTagOnB2 = b2.tags.find((t) => t.name === "ai-tag");
+ expect(aiTagOnB2?.attachedBy).toEqual("ai");
+ expect(aiTagOnB2?.id).toEqual(aiTag!.id); // Should be the same tag
+ });
+
test<CustomTestContext>("update bookmark text", async ({ apiCallers }) => {
const api = apiCallers[0].bookmarks;
const createdBookmark = await api.createBookmark({
diff --git a/packages/trpc/routers/bookmarks.ts b/packages/trpc/routers/bookmarks.ts
index 15ded2bd..782566cf 100644
--- a/packages/trpc/routers/bookmarks.ts
+++ b/packages/trpc/routers/bookmarks.ts
@@ -1,6 +1,5 @@
import { experimental_trpcMiddleware, TRPCError } from "@trpc/server";
import { and, eq, gt, inArray, lt, or } from "drizzle-orm";
-import invariant from "tiny-invariant";
import { z } from "zod";
import type { ZBookmarkContent } from "@karakeep/shared/types/bookmarks";
@@ -15,11 +14,14 @@ import {
bookmarkTexts,
customPrompts,
tagsOnBookmarks,
+ users,
} from "@karakeep/db/schema";
import {
AssetPreprocessingQueue,
LinkCrawlerQueue,
+ LowPriorityCrawlerQueue,
OpenAIQueue,
+ QueuePriority,
QuotaService,
triggerRuleEngineOnEvent,
triggerSearchReindex,
@@ -28,7 +30,7 @@ import {
import { SUPPORTED_BOOKMARK_ASSET_TYPES } from "@karakeep/shared/assetdb";
import serverConfig from "@karakeep/shared/config";
import { InferenceClientFactory } from "@karakeep/shared/inference";
-import { buildSummaryPrompt } from "@karakeep/shared/prompts";
+import { buildSummaryPrompt } from "@karakeep/shared/prompts.server";
import { EnqueueOptions } from "@karakeep/shared/queueing";
import { FilterQuery, getSearchClient } from "@karakeep/shared/search";
import { parseSearchQuery } from "@karakeep/shared/searchQueryParser";
@@ -49,9 +51,8 @@ import { normalizeTagName } from "@karakeep/shared/utils/tag";
import type { AuthedContext } from "../index";
import { authedProcedure, createRateLimitMiddleware, router } from "../index";
import { getBookmarkIdsFromMatcher } from "../lib/search";
+import { Asset } from "../models/assets";
import { BareBookmark, Bookmark } from "../models/bookmarks";
-import { ImportSession } from "../models/importSessions";
-import { ensureAssetOwnership } from "./assets";
export const ensureBookmarkOwnership = experimental_trpcMiddleware<{
ctx: AuthedContext;
@@ -121,173 +122,173 @@ export const bookmarksAppRouter = router({
// This doesn't 100% protect from duplicates because of races, but it's more than enough for this usecase.
const alreadyExists = await attemptToDedupLink(ctx, input.url);
if (alreadyExists) {
- if (input.importSessionId) {
- const session = await ImportSession.fromId(
- ctx,
- input.importSessionId,
- );
- await session.attachBookmark(alreadyExists.id);
- }
return { ...alreadyExists, alreadyExists: true };
}
}
- // Check user quota
- const quotaResult = await QuotaService.canCreateBookmark(
- ctx.db,
- ctx.user.id,
- );
- if (!quotaResult.result) {
- throw new TRPCError({
- code: "FORBIDDEN",
- message: quotaResult.error,
- });
- }
-
- const bookmark = await ctx.db.transaction(async (tx) => {
- const bookmark = (
- await tx
- .insert(bookmarks)
- .values({
- userId: ctx.user.id,
- title: input.title,
- type: input.type,
- archived: input.archived,
- favourited: input.favourited,
- note: input.note,
- summary: input.summary,
- createdAt: input.createdAt,
- source: input.source,
- })
- .returning()
- )[0];
+ const bookmark = await ctx.db.transaction(
+ async (tx) => {
+ // Check user quota
+ const quotaResult = await QuotaService.canCreateBookmark(
+ tx,
+ ctx.user.id,
+ );
+ if (!quotaResult.result) {
+ throw new TRPCError({
+ code: "FORBIDDEN",
+ message: quotaResult.error,
+ });
+ }
+ const bookmark = (
+ await tx
+ .insert(bookmarks)
+ .values({
+ userId: ctx.user.id,
+ title: input.title,
+ type: input.type,
+ archived: input.archived,
+ favourited: input.favourited,
+ note: input.note,
+ summary: input.summary,
+ createdAt: input.createdAt,
+ source: input.source,
+ // Only links currently support summarization. Let's set the status to null for other types for now.
+ summarizationStatus:
+ input.type === BookmarkTypes.LINK ? "pending" : null,
+ })
+ .returning()
+ )[0];
- let content: ZBookmarkContent;
+ let content: ZBookmarkContent;
- switch (input.type) {
- case BookmarkTypes.LINK: {
- const link = (
- await tx
- .insert(bookmarkLinks)
+ switch (input.type) {
+ case BookmarkTypes.LINK: {
+ const link = (
+ await tx
+ .insert(bookmarkLinks)
+ .values({
+ id: bookmark.id,
+ url: input.url.trim(),
+ })
+ .returning()
+ )[0];
+ if (input.precrawledArchiveId) {
+ await Asset.ensureOwnership(ctx, input.precrawledArchiveId);
+ await tx
+ .update(assets)
+ .set({
+ bookmarkId: bookmark.id,
+ assetType: AssetTypes.LINK_PRECRAWLED_ARCHIVE,
+ })
+ .where(
+ and(
+ eq(assets.id, input.precrawledArchiveId),
+ eq(assets.userId, ctx.user.id),
+ ),
+ );
+ }
+ content = {
+ type: BookmarkTypes.LINK,
+ ...link,
+ };
+ break;
+ }
+ case BookmarkTypes.TEXT: {
+ const text = (
+ await tx
+ .insert(bookmarkTexts)
+ .values({
+ id: bookmark.id,
+ text: input.text,
+ sourceUrl: input.sourceUrl,
+ })
+ .returning()
+ )[0];
+ content = {
+ type: BookmarkTypes.TEXT,
+ text: text.text ?? "",
+ sourceUrl: text.sourceUrl,
+ };
+ break;
+ }
+ case BookmarkTypes.ASSET: {
+ const [asset] = await tx
+ .insert(bookmarkAssets)
.values({
id: bookmark.id,
- url: input.url.trim(),
+ assetType: input.assetType,
+ assetId: input.assetId,
+ content: null,
+ metadata: null,
+ fileName: input.fileName ?? null,
+ sourceUrl: null,
})
- .returning()
- )[0];
- if (input.precrawledArchiveId) {
- await ensureAssetOwnership({
- ctx,
- assetId: input.precrawledArchiveId,
- });
+ .returning();
+ const uploadedAsset = await Asset.fromId(ctx, input.assetId);
+ uploadedAsset.ensureOwnership();
+ if (
+ !uploadedAsset.asset.contentType ||
+ !SUPPORTED_BOOKMARK_ASSET_TYPES.has(
+ uploadedAsset.asset.contentType,
+ )
+ ) {
+ throw new TRPCError({
+ code: "BAD_REQUEST",
+ message: "Unsupported asset type",
+ });
+ }
await tx
.update(assets)
.set({
bookmarkId: bookmark.id,
- assetType: AssetTypes.LINK_PRECRAWLED_ARCHIVE,
+ assetType: AssetTypes.BOOKMARK_ASSET,
})
.where(
and(
- eq(assets.id, input.precrawledArchiveId),
+ eq(assets.id, input.assetId),
eq(assets.userId, ctx.user.id),
),
);
+ content = {
+ type: BookmarkTypes.ASSET,
+ assetType: asset.assetType,
+ assetId: asset.assetId,
+ };
+ break;
}
- content = {
- type: BookmarkTypes.LINK,
- ...link,
- };
- break;
}
- case BookmarkTypes.TEXT: {
- const text = (
- await tx
- .insert(bookmarkTexts)
- .values({
- id: bookmark.id,
- text: input.text,
- sourceUrl: input.sourceUrl,
- })
- .returning()
- )[0];
- content = {
- type: BookmarkTypes.TEXT,
- text: text.text ?? "",
- sourceUrl: text.sourceUrl,
- };
- break;
- }
- case BookmarkTypes.ASSET: {
- const [asset] = await tx
- .insert(bookmarkAssets)
- .values({
- id: bookmark.id,
- assetType: input.assetType,
- assetId: input.assetId,
- content: null,
- metadata: null,
- fileName: input.fileName ?? null,
- sourceUrl: null,
- })
- .returning();
- const uploadedAsset = await ensureAssetOwnership({
- ctx,
- assetId: input.assetId,
- });
- if (
- !uploadedAsset.contentType ||
- !SUPPORTED_BOOKMARK_ASSET_TYPES.has(uploadedAsset.contentType)
- ) {
- throw new TRPCError({
- code: "BAD_REQUEST",
- message: "Unsupported asset type",
- });
- }
- await tx
- .update(assets)
- .set({
- bookmarkId: bookmark.id,
- assetType: AssetTypes.BOOKMARK_ASSET,
- })
- .where(
- and(
- eq(assets.id, input.assetId),
- eq(assets.userId, ctx.user.id),
- ),
- );
- content = {
- type: BookmarkTypes.ASSET,
- assetType: asset.assetType,
- assetId: asset.assetId,
- };
- break;
- }
- }
- return {
- alreadyExists: false,
- tags: [] as ZBookmarkTags[],
- assets: [],
- content,
- ...bookmark,
- };
- });
-
- if (input.importSessionId) {
- const session = await ImportSession.fromId(ctx, input.importSessionId);
- await session.attachBookmark(bookmark.id);
- }
+ return {
+ alreadyExists: false,
+ tags: [] as ZBookmarkTags[],
+ assets: [],
+ content,
+ ...bookmark,
+ };
+ },
+ {
+ behavior: "immediate",
+ },
+ );
const enqueueOpts: EnqueueOptions = {
// The lower the priority number, the sooner the job will be processed
- priority: input.crawlPriority === "low" ? 50 : 0,
+ priority:
+ input.crawlPriority === "low"
+ ? QueuePriority.Low
+ : QueuePriority.Default,
groupId: ctx.user.id,
};
switch (bookmark.content.type) {
case BookmarkTypes.LINK: {
// The crawling job triggers openai when it's done
- await LinkCrawlerQueue.enqueue(
+ // Use a separate queue for low priority crawling to avoid impacting main queue parallelism
+ const crawlerQueue =
+ input.crawlPriority === "low"
+ ? LowPriorityCrawlerQueue
+ : LinkCrawlerQueue;
+ await crawlerQueue.enqueue(
{
bookmarkId: bookmark.id,
},
@@ -317,22 +318,24 @@ export const bookmarksAppRouter = router({
}
}
- await triggerRuleEngineOnEvent(
- bookmark.id,
- [
- {
- type: "bookmarkAdded",
- },
- ],
- enqueueOpts,
- );
- await triggerSearchReindex(bookmark.id, enqueueOpts);
- await triggerWebhook(
- bookmark.id,
- "created",
- /* userId */ undefined,
- enqueueOpts,
- );
+ await Promise.all([
+ triggerRuleEngineOnEvent(
+ bookmark.id,
+ [
+ {
+ type: "bookmarkAdded",
+ },
+ ],
+ enqueueOpts,
+ ),
+ triggerSearchReindex(bookmark.id, enqueueOpts),
+ triggerWebhook(
+ bookmark.id,
+ "created",
+ /* userId */ undefined,
+ enqueueOpts,
+ ),
+ ]);
return bookmark;
}),
@@ -487,13 +490,14 @@ export const bookmarksAppRouter = router({
})),
);
}
- // Trigger re-indexing and webhooks
- await triggerSearchReindex(input.bookmarkId, {
- groupId: ctx.user.id,
- });
- await triggerWebhook(input.bookmarkId, "edited", ctx.user.id, {
- groupId: ctx.user.id,
- });
+ await Promise.all([
+ triggerSearchReindex(input.bookmarkId, {
+ groupId: ctx.user.id,
+ }),
+ triggerWebhook(input.bookmarkId, "edited", ctx.user.id, {
+ groupId: ctx.user.id,
+ }),
+ ]);
return updatedBookmark;
}),
@@ -532,12 +536,14 @@ export const bookmarksAppRouter = router({
),
);
});
- await triggerSearchReindex(input.bookmarkId, {
- groupId: ctx.user.id,
- });
- await triggerWebhook(input.bookmarkId, "edited", ctx.user.id, {
- groupId: ctx.user.id,
- });
+ await Promise.all([
+ triggerSearchReindex(input.bookmarkId, {
+ groupId: ctx.user.id,
+ }),
+ triggerWebhook(input.bookmarkId, "edited", ctx.user.id, {
+ groupId: ctx.user.id,
+ }),
+ ]);
}),
deleteBookmark: authedProcedure
@@ -559,24 +565,20 @@ export const bookmarksAppRouter = router({
z.object({
bookmarkId: z.string(),
archiveFullPage: z.boolean().optional().default(false),
+ storePdf: z.boolean().optional().default(false),
}),
)
.use(ensureBookmarkOwnership)
.mutation(async ({ input, ctx }) => {
- await ctx.db
- .update(bookmarkLinks)
- .set({
- crawlStatus: "pending",
- crawlStatusCode: null,
- })
- .where(eq(bookmarkLinks.id, input.bookmarkId));
- await LinkCrawlerQueue.enqueue(
+ await LowPriorityCrawlerQueue.enqueue(
{
bookmarkId: input.bookmarkId,
archiveFullPage: input.archiveFullPage,
+ storePdf: input.storePdf,
},
{
groupId: ctx.user.id,
+ priority: QueuePriority.Low,
},
);
}),
@@ -711,36 +713,109 @@ export const bookmarksAppRouter = router({
)
.use(ensureBookmarkOwnership)
.mutation(async ({ input, ctx }) => {
- const res = await ctx.db.transaction(async (tx) => {
- // Detaches
- const idsToRemove: string[] = [];
- if (input.detach.length > 0) {
- const namesToRemove: string[] = [];
- input.detach.forEach((detachInfo) => {
- if (detachInfo.tagId) {
- idsToRemove.push(detachInfo.tagId);
- }
- if (detachInfo.tagName) {
- namesToRemove.push(detachInfo.tagName);
- }
- });
+ // Helper function to fetch tag IDs and their names from a list of tag identifiers
+ const fetchTagIdsWithNames = async (
+ tagIdentifiers: { tagId?: string; tagName?: string }[],
+ ): Promise<{ id: string; name: string }[]> => {
+ const tagIds = tagIdentifiers.flatMap((t) =>
+ t.tagId ? [t.tagId] : [],
+ );
+ const tagNames = tagIdentifiers.flatMap((t) =>
+ t.tagName ? [t.tagName] : [],
+ );
- if (namesToRemove.length > 0) {
- (
- await tx.query.bookmarkTags.findMany({
- where: and(
- eq(bookmarkTags.userId, ctx.user.id),
- inArray(bookmarkTags.name, namesToRemove),
- ),
- columns: {
- id: true,
- },
- })
- ).forEach((tag) => {
- idsToRemove.push(tag.id);
- });
+ // Fetch tag IDs in parallel
+ const [byIds, byNames] = await Promise.all([
+ tagIds.length > 0
+ ? ctx.db
+ .select({ id: bookmarkTags.id, name: bookmarkTags.name })
+ .from(bookmarkTags)
+ .where(
+ and(
+ eq(bookmarkTags.userId, ctx.user.id),
+ inArray(bookmarkTags.id, tagIds),
+ ),
+ )
+ : Promise.resolve([]),
+ tagNames.length > 0
+ ? ctx.db
+ .select({ id: bookmarkTags.id, name: bookmarkTags.name })
+ .from(bookmarkTags)
+ .where(
+ and(
+ eq(bookmarkTags.userId, ctx.user.id),
+ inArray(bookmarkTags.name, tagNames),
+ ),
+ )
+ : Promise.resolve([]),
+ ]);
+
+ // Union results and deduplicate by tag ID
+ const seen = new Set<string>();
+ const results: { id: string; name: string }[] = [];
+
+ for (const tag of [...byIds, ...byNames]) {
+ if (!seen.has(tag.id)) {
+ seen.add(tag.id);
+ results.push({ id: tag.id, name: tag.name });
}
+ }
+
+ return results;
+ };
+
+ // Normalize tag names and create new tags outside transaction to reduce transaction duration
+ const normalizedAttachTags = input.attach.map((tag) => ({
+ tagId: tag.tagId,
+ tagName: tag.tagName ? normalizeTagName(tag.tagName) : undefined,
+ attachedBy: tag.attachedBy,
+ }));
+
+ {
+ // Create new tags
+ const toAddTagNames = normalizedAttachTags
+ .flatMap((i) => (i.tagName ? [i.tagName] : []))
+ .filter((n) => n.length > 0); // drop empty results
+
+ if (toAddTagNames.length > 0) {
+ await ctx.db
+ .insert(bookmarkTags)
+ .values(
+ toAddTagNames.map((name) => ({ name, userId: ctx.user.id })),
+ )
+ .onConflictDoNothing();
+ }
+ }
+
+ // Fetch tag IDs for attachment/detachment now that we know that they all exist
+ const [attachTagsWithNames, detachTagsWithNames] = await Promise.all([
+ fetchTagIdsWithNames(normalizedAttachTags),
+ fetchTagIdsWithNames(input.detach),
+ ]);
+
+ // Build the attachedBy map from the fetched results
+ const tagIdToAttachedBy = new Map<string, "ai" | "human">();
+
+ for (const fetchedTag of attachTagsWithNames) {
+ // Find the corresponding input tag
+ const inputTag = normalizedAttachTags.find(
+ (t) =>
+ (t.tagId && t.tagId === fetchedTag.id) ||
+ (t.tagName && t.tagName === fetchedTag.name),
+ );
+
+ if (inputTag) {
+ tagIdToAttachedBy.set(fetchedTag.id, inputTag.attachedBy);
+ }
+ }
+
+ // Extract just the IDs for the transaction
+ const allIdsToAttach = attachTagsWithNames.map((t) => t.id);
+ const idsToRemove = detachTagsWithNames.map((t) => t.id);
+ const res = await ctx.db.transaction(async (tx) => {
+ // Detaches
+ if (idsToRemove.length > 0) {
await tx
.delete(tagsOnBookmarks)
.where(
@@ -751,67 +826,21 @@ export const bookmarksAppRouter = router({
);
}
- if (input.attach.length == 0) {
- return {
- bookmarkId: input.bookmarkId,
- attached: [],
- detached: idsToRemove,
- };
- }
-
- const toAddTagNames = input.attach
- .flatMap((i) => (i.tagName ? [i.tagName] : []))
- .map(normalizeTagName) // strip leading #
- .filter((n) => n.length > 0); // drop empty results
-
- const toAddTagIds = input.attach.flatMap((i) =>
- i.tagId ? [i.tagId] : [],
- );
-
- // New Tags
- if (toAddTagNames.length > 0) {
+ // Attach tags
+ if (allIdsToAttach.length > 0) {
await tx
- .insert(bookmarkTags)
+ .insert(tagsOnBookmarks)
.values(
- toAddTagNames.map((name) => ({ name, userId: ctx.user.id })),
+ allIdsToAttach.map((i) => ({
+ tagId: i,
+ bookmarkId: input.bookmarkId,
+ attachedBy: tagIdToAttachedBy.get(i) ?? "human",
+ })),
)
- .onConflictDoNothing()
- .returning();
+ .onConflictDoNothing();
}
- // If there is nothing to add, the "or" statement will become useless and
- // the query below will simply select all the existing tags for this user and assign them to the bookmark
- invariant(toAddTagNames.length > 0 || toAddTagIds.length > 0);
- const allIds = (
- await tx.query.bookmarkTags.findMany({
- where: and(
- eq(bookmarkTags.userId, ctx.user.id),
- or(
- toAddTagIds.length > 0
- ? inArray(bookmarkTags.id, toAddTagIds)
- : undefined,
- toAddTagNames.length > 0
- ? inArray(bookmarkTags.name, toAddTagNames)
- : undefined,
- ),
- ),
- columns: {
- id: true,
- },
- })
- ).map((t) => t.id);
-
- await tx
- .insert(tagsOnBookmarks)
- .values(
- allIds.map((i) => ({
- tagId: i,
- bookmarkId: input.bookmarkId,
- attachedBy: "human" as const,
- userId: ctx.user.id,
- })),
- )
- .onConflictDoNothing();
+ // Update bookmark modified timestamp
await tx
.update(bookmarks)
.set({ modifiedAt: new Date() })
@@ -824,7 +853,7 @@ export const bookmarksAppRouter = router({
return {
bookmarkId: input.bookmarkId,
- attached: allIds,
+ attached: allIdsToAttach,
detached: idsToRemove,
};
});
@@ -958,8 +987,15 @@ Author: ${bookmark.author ?? ""}
},
});
+ const userSettings = await ctx.db.query.users.findFirst({
+ where: eq(users.id, ctx.user.id),
+ columns: {
+ inferredTagLang: true,
+ },
+ });
+
const summaryPrompt = await buildSummaryPrompt(
- serverConfig.inference.inferredTagLang,
+ userSettings?.inferredTagLang ?? serverConfig.inference.inferredTagLang,
prompts.map((p) => p.text),
bookmarkDetails,
serverConfig.inference.contextLength,
@@ -981,12 +1017,14 @@ Author: ${bookmark.author ?? ""}
summary: summary.response,
})
.where(eq(bookmarks.id, input.bookmarkId));
- await triggerSearchReindex(input.bookmarkId, {
- groupId: ctx.user.id,
- });
- await triggerWebhook(input.bookmarkId, "edited", ctx.user.id, {
- groupId: ctx.user.id,
- });
+ await Promise.all([
+ triggerSearchReindex(input.bookmarkId, {
+ groupId: ctx.user.id,
+ }),
+ triggerWebhook(input.bookmarkId, "edited", ctx.user.id, {
+ groupId: ctx.user.id,
+ }),
+ ]);
return {
bookmarkId: input.bookmarkId,
diff --git a/packages/trpc/routers/config.ts b/packages/trpc/routers/config.ts
new file mode 100644
index 00000000..8d09a2ce
--- /dev/null
+++ b/packages/trpc/routers/config.ts
@@ -0,0 +1,10 @@
+import { clientConfig } from "@karakeep/shared/config";
+import { zClientConfigSchema } from "@karakeep/shared/types/config";
+
+import { publicProcedure, router } from "../index";
+
+export const configAppRouter = router({
+ clientConfig: publicProcedure
+ .output(zClientConfigSchema)
+ .query(() => clientConfig),
+});
diff --git a/packages/trpc/routers/feeds.test.ts b/packages/trpc/routers/feeds.test.ts
new file mode 100644
index 00000000..e80aab0a
--- /dev/null
+++ b/packages/trpc/routers/feeds.test.ts
@@ -0,0 +1,154 @@
+import { beforeEach, describe, expect, test } from "vitest";
+
+import type { CustomTestContext } from "../testUtils";
+import { defaultBeforeEach } from "../testUtils";
+
+beforeEach<CustomTestContext>(defaultBeforeEach(true));
+
+describe("Feed Routes", () => {
+ test<CustomTestContext>("create feed", async ({ apiCallers }) => {
+ const api = apiCallers[0].feeds;
+ const newFeed = await api.create({
+ name: "Test Feed",
+ url: "https://example.com/feed.xml",
+ enabled: true,
+ });
+
+ expect(newFeed).toBeDefined();
+ expect(newFeed.name).toEqual("Test Feed");
+ expect(newFeed.url).toEqual("https://example.com/feed.xml");
+ expect(newFeed.enabled).toBe(true);
+ });
+
+ test<CustomTestContext>("update feed", async ({ apiCallers }) => {
+ const api = apiCallers[0].feeds;
+
+ // First, create a feed to update
+ const createdFeed = await api.create({
+ name: "Test Feed",
+ url: "https://example.com/feed.xml",
+ enabled: true,
+ });
+
+ // Update it
+ const updatedFeed = await api.update({
+ feedId: createdFeed.id,
+ name: "Updated Feed",
+ url: "https://updated-example.com/feed.xml",
+ enabled: false,
+ });
+
+ expect(updatedFeed.name).toEqual("Updated Feed");
+ expect(updatedFeed.url).toEqual("https://updated-example.com/feed.xml");
+ expect(updatedFeed.enabled).toBe(false);
+
+ // Test updating a non-existent feed
+ await expect(() =>
+ api.update({
+ feedId: "non-existent-id",
+ name: "Fail",
+ url: "https://fail.com",
+ enabled: true,
+ }),
+ ).rejects.toThrow(/Feed not found/);
+ });
+
+ test<CustomTestContext>("list feeds", async ({ apiCallers }) => {
+ const api = apiCallers[0].feeds;
+
+ // Create a couple of feeds
+ await api.create({
+ name: "Feed 1",
+ url: "https://example1.com/feed.xml",
+ enabled: true,
+ });
+ await api.create({
+ name: "Feed 2",
+ url: "https://example2.com/feed.xml",
+ enabled: true,
+ });
+
+ const result = await api.list();
+ expect(result.feeds).toBeDefined();
+ expect(result.feeds.length).toBeGreaterThanOrEqual(2);
+ expect(result.feeds.some((f) => f.name === "Feed 1")).toBe(true);
+ expect(result.feeds.some((f) => f.name === "Feed 2")).toBe(true);
+ });
+
+ test<CustomTestContext>("delete feed", async ({ apiCallers }) => {
+ const api = apiCallers[0].feeds;
+
+ // Create a feed to delete
+ const createdFeed = await api.create({
+ name: "Test Feed",
+ url: "https://example.com/feed.xml",
+ enabled: true,
+ });
+
+ // Delete it
+ await api.delete({ feedId: createdFeed.id });
+
+ // Verify it's deleted
+ await expect(() =>
+ api.update({
+ feedId: createdFeed.id,
+ name: "Updated",
+ url: "https://updated.com",
+ enabled: true,
+ }),
+ ).rejects.toThrow(/Feed not found/);
+ });
+
+ test<CustomTestContext>("privacy for feeds", async ({ apiCallers }) => {
+ const user1Feed = await apiCallers[0].feeds.create({
+ name: "User 1 Feed",
+ url: "https://user1-feed.com/feed.xml",
+ enabled: true,
+ });
+ const user2Feed = await apiCallers[1].feeds.create({
+ name: "User 2 Feed",
+ url: "https://user2-feed.com/feed.xml",
+ enabled: true,
+ });
+
+ // User 1 should not access User 2's feed
+ await expect(() =>
+ apiCallers[0].feeds.delete({ feedId: user2Feed.id }),
+ ).rejects.toThrow(/User is not allowed to access resource/);
+ await expect(() =>
+ apiCallers[0].feeds.update({
+ feedId: user2Feed.id,
+ name: "Fail",
+ url: "https://fail.com",
+ enabled: true,
+ }),
+ ).rejects.toThrow(/User is not allowed to access resource/);
+
+ // List should only show the correct user's feeds
+ const user1List = await apiCallers[0].feeds.list();
+ expect(user1List.feeds.some((f) => f.id === user1Feed.id)).toBe(true);
+ expect(user1List.feeds.some((f) => f.id === user2Feed.id)).toBe(false);
+ });
+
+ test<CustomTestContext>("feed limit enforcement", async ({ apiCallers }) => {
+ const api = apiCallers[0].feeds;
+
+ // Create 1000 feeds (the maximum)
+ for (let i = 0; i < 1000; i++) {
+ await api.create({
+ name: `Feed ${i}`,
+ url: `https://example${i}.com/feed.xml`,
+ enabled: true,
+ });
+ }
+
+ // The 1001st feed should fail
+ await expect(() =>
+ api.create({
+ name: "Feed 1001",
+ url: "https://example1001.com/feed.xml",
+ enabled: true,
+ }),
+ ).rejects.toThrow(/Maximum number of RSS feeds \(1000\) reached/);
+ });
+});
diff --git a/packages/trpc/routers/importSessions.test.ts b/packages/trpc/routers/importSessions.test.ts
index 9ef0de6f..f257ad3b 100644
--- a/packages/trpc/routers/importSessions.test.ts
+++ b/packages/trpc/routers/importSessions.test.ts
@@ -1,12 +1,13 @@
-import { eq } from "drizzle-orm";
import { beforeEach, describe, expect, test } from "vitest";
import { z } from "zod";
-import { bookmarks } from "@karakeep/db/schema";
import {
- BookmarkTypes,
- zNewBookmarkRequestSchema,
-} from "@karakeep/shared/types/bookmarks";
+ bookmarkLinks,
+ bookmarks,
+ bookmarkTexts,
+ importStagingBookmarks,
+} from "@karakeep/db/schema";
+import { BookmarkTypes } from "@karakeep/shared/types/bookmarks";
import {
zCreateImportSessionRequestSchema,
zDeleteImportSessionRequestSchema,
@@ -20,17 +21,6 @@ import { defaultBeforeEach } from "../testUtils";
beforeEach<CustomTestContext>(defaultBeforeEach(true));
describe("ImportSessions Routes", () => {
- async function createTestBookmark(api: APICallerType, sessionId: string) {
- const newBookmarkInput: z.infer<typeof zNewBookmarkRequestSchema> = {
- type: BookmarkTypes.TEXT,
- text: "Test bookmark text",
- importSessionId: sessionId,
- };
- const createdBookmark =
- await api.bookmarks.createBookmark(newBookmarkInput);
- return createdBookmark.id;
- }
-
async function createTestList(api: APICallerType) {
const newListInput: z.infer<typeof zNewBookmarkListSchema> = {
name: "Test Import List",
@@ -98,8 +88,15 @@ describe("ImportSessions Routes", () => {
const session = await api.importSessions.createImportSession({
name: "Test Import Session",
});
- await createTestBookmark(api, session.id);
- await createTestBookmark(api, session.id);
+
+ // Stage bookmarks using the staging flow
+ await api.importSessions.stageImportedBookmarks({
+ importSessionId: session.id,
+ bookmarks: [
+ { type: "text", content: "Test bookmark 1", tags: [], listIds: [] },
+ { type: "text", content: "Test bookmark 2", tags: [], listIds: [] },
+ ],
+ });
const statsInput: z.infer<typeof zGetImportSessionStatsRequestSchema> = {
importSessionId: session.id,
@@ -110,7 +107,7 @@ describe("ImportSessions Routes", () => {
expect(stats).toMatchObject({
id: session.id,
name: "Test Import Session",
- status: "in_progress",
+ status: "staging",
totalBookmarks: 2,
pendingBookmarks: 2,
completedBookmarks: 0,
@@ -119,31 +116,191 @@ describe("ImportSessions Routes", () => {
});
});
- test<CustomTestContext>("marks text-only imports as completed when tagging succeeds", async ({
+ test<CustomTestContext>("stats reflect crawl and tagging status for completed staging bookmarks", async ({
apiCallers,
db,
}) => {
const api = apiCallers[0];
+
const session = await api.importSessions.createImportSession({
- name: "Text Import Session",
+ name: "Test Import Session",
});
- const bookmarkId = await createTestBookmark(api, session.id);
- await db
- .update(bookmarks)
- .set({ taggingStatus: "success" })
- .where(eq(bookmarks.id, bookmarkId));
+ // Create bookmarks with different crawl/tag statuses
+ const user = (await db.query.users.findFirst())!;
+
+ // 1. Link bookmark: crawl success, tag success -> completed
+ const [completedLinkBookmark] = await db
+ .insert(bookmarks)
+ .values({
+ userId: user.id,
+ type: BookmarkTypes.LINK,
+ taggingStatus: "success",
+ })
+ .returning();
+ await db.insert(bookmarkLinks).values({
+ id: completedLinkBookmark.id,
+ url: "https://example.com/1",
+ crawlStatus: "success",
+ });
+
+ // 2. Link bookmark: crawl pending, tag success -> processing
+ const [crawlPendingBookmark] = await db
+ .insert(bookmarks)
+ .values({
+ userId: user.id,
+ type: BookmarkTypes.LINK,
+ taggingStatus: "success",
+ })
+ .returning();
+ await db.insert(bookmarkLinks).values({
+ id: crawlPendingBookmark.id,
+ url: "https://example.com/2",
+ crawlStatus: "pending",
+ });
+
+ // 3. Text bookmark: tag pending -> processing
+ const [tagPendingBookmark] = await db
+ .insert(bookmarks)
+ .values({
+ userId: user.id,
+ type: BookmarkTypes.TEXT,
+ taggingStatus: "pending",
+ })
+ .returning();
+ await db.insert(bookmarkTexts).values({
+ id: tagPendingBookmark.id,
+ text: "Test text",
+ });
+
+ // 4. Link bookmark: crawl failure -> failed
+ const [crawlFailedBookmark] = await db
+ .insert(bookmarks)
+ .values({
+ userId: user.id,
+ type: BookmarkTypes.LINK,
+ taggingStatus: "success",
+ })
+ .returning();
+ await db.insert(bookmarkLinks).values({
+ id: crawlFailedBookmark.id,
+ url: "https://example.com/3",
+ crawlStatus: "failure",
+ });
+
+ // 5. Text bookmark: tag failure -> failed
+ const [tagFailedBookmark] = await db
+ .insert(bookmarks)
+ .values({
+ userId: user.id,
+ type: BookmarkTypes.TEXT,
+ taggingStatus: "failure",
+ })
+ .returning();
+ await db.insert(bookmarkTexts).values({
+ id: tagFailedBookmark.id,
+ text: "Test text 2",
+ });
+
+ // 6. Text bookmark: tag success (no crawl needed) -> completed
+ const [completedTextBookmark] = await db
+ .insert(bookmarks)
+ .values({
+ userId: user.id,
+ type: BookmarkTypes.TEXT,
+ taggingStatus: "success",
+ })
+ .returning();
+ await db.insert(bookmarkTexts).values({
+ id: completedTextBookmark.id,
+ text: "Test text 3",
+ });
+
+ // Create staging bookmarks in different states
+ // Note: With the new import worker design, items stay in "processing" until
+ // crawl/tag is done. Only then do they move to "completed".
+ await db.insert(importStagingBookmarks).values([
+ // Staging pending -> pendingBookmarks
+ {
+ importSessionId: session.id,
+ type: "text",
+ content: "pending staging",
+ status: "pending",
+ },
+ // Staging processing (no bookmark yet) -> processingBookmarks
+ {
+ importSessionId: session.id,
+ type: "text",
+ content: "processing staging",
+ status: "processing",
+ },
+ // Staging failed -> failedBookmarks
+ {
+ importSessionId: session.id,
+ type: "text",
+ content: "failed staging",
+ status: "failed",
+ },
+ // Staging completed + crawl/tag success -> completedBookmarks
+ {
+ importSessionId: session.id,
+ type: "link",
+ url: "https://example.com/1",
+ status: "completed",
+ resultBookmarkId: completedLinkBookmark.id,
+ },
+ // Staging processing + crawl pending -> processingBookmarks (waiting for crawl)
+ {
+ importSessionId: session.id,
+ type: "link",
+ url: "https://example.com/2",
+ status: "processing",
+ resultBookmarkId: crawlPendingBookmark.id,
+ },
+ // Staging processing + tag pending -> processingBookmarks (waiting for tag)
+ {
+ importSessionId: session.id,
+ type: "text",
+ content: "tag pending",
+ status: "processing",
+ resultBookmarkId: tagPendingBookmark.id,
+ },
+ // Staging completed + crawl failure -> completedBookmarks (failure is terminal)
+ {
+ importSessionId: session.id,
+ type: "link",
+ url: "https://example.com/3",
+ status: "completed",
+ resultBookmarkId: crawlFailedBookmark.id,
+ },
+ // Staging completed + tag failure -> completedBookmarks (failure is terminal)
+ {
+ importSessionId: session.id,
+ type: "text",
+ content: "tag failed",
+ status: "completed",
+ resultBookmarkId: tagFailedBookmark.id,
+ },
+ // Staging completed + tag success (text, no crawl) -> completedBookmarks
+ {
+ importSessionId: session.id,
+ type: "text",
+ content: "completed text",
+ status: "completed",
+ resultBookmarkId: completedTextBookmark.id,
+ },
+ ]);
const stats = await api.importSessions.getImportSessionStats({
importSessionId: session.id,
});
expect(stats).toMatchObject({
- completedBookmarks: 1,
- pendingBookmarks: 0,
- failedBookmarks: 0,
- totalBookmarks: 1,
- status: "completed",
+ totalBookmarks: 9,
+ pendingBookmarks: 1, // staging pending
+ processingBookmarks: 3, // staging processing (no bookmark) + crawl pending + tag pending
+ completedBookmarks: 4, // link success + text success + crawl failure + tag failure
+ failedBookmarks: 1, // staging failed
});
});
@@ -215,7 +372,7 @@ describe("ImportSessions Routes", () => {
).rejects.toThrow("Import session not found");
});
- test<CustomTestContext>("cannot attach other user's bookmark", async ({
+ test<CustomTestContext>("cannot stage other user's session", async ({
apiCallers,
}) => {
const api1 = apiCallers[0];
@@ -228,7 +385,17 @@ describe("ImportSessions Routes", () => {
// User 1 tries to attach User 2's bookmark
await expect(
- createTestBookmark(api2, session.id), // User 2's bookmark
+ api2.importSessions.stageImportedBookmarks({
+ importSessionId: session.id,
+ bookmarks: [
+ {
+ type: "text",
+ content: "Test bookmark",
+ tags: [],
+ listIds: [],
+ },
+ ],
+ }),
).rejects.toThrow("Import session not found");
});
});
diff --git a/packages/trpc/routers/importSessions.ts b/packages/trpc/routers/importSessions.ts
index 4bdc4f29..62263bdd 100644
--- a/packages/trpc/routers/importSessions.ts
+++ b/packages/trpc/routers/importSessions.ts
@@ -1,5 +1,8 @@
+import { experimental_trpcMiddleware } from "@trpc/server";
+import { and, eq, gt } from "drizzle-orm";
import { z } from "zod";
+import { importStagingBookmarks } from "@karakeep/db/schema";
import {
zCreateImportSessionRequestSchema,
zDeleteImportSessionRequestSchema,
@@ -9,9 +12,26 @@ import {
zListImportSessionsResponseSchema,
} from "@karakeep/shared/types/importSessions";
+import type { AuthedContext } from "../index";
import { authedProcedure, router } from "../index";
import { ImportSession } from "../models/importSessions";
+const ensureImportSessionAccess = experimental_trpcMiddleware<{
+ ctx: AuthedContext;
+ input: { importSessionId: string };
+}>().create(async (opts) => {
+ const importSession = await ImportSession.fromId(
+ opts.ctx,
+ opts.input.importSessionId,
+ );
+ return opts.next({
+ ctx: {
+ ...opts.ctx,
+ importSession,
+ },
+ });
+});
+
export const importSessionsRouter = router({
createImportSession: authedProcedure
.input(zCreateImportSessionRequestSchema)
@@ -45,4 +65,93 @@ export const importSessionsRouter = router({
await session.delete();
return { success: true };
}),
+
+ stageImportedBookmarks: authedProcedure
+ .input(
+ z.object({
+ importSessionId: z.string(),
+ bookmarks: z
+ .array(
+ z.object({
+ type: z.enum(["link", "text", "asset"]),
+ url: z.string().optional(),
+ title: z.string().optional(),
+ content: z.string().optional(),
+ note: z.string().optional(),
+ tags: z.array(z.string()).default([]),
+ listIds: z.array(z.string()).default([]),
+ sourceAddedAt: z.date().optional(),
+ }),
+ )
+ .max(50),
+ }),
+ )
+ .use(ensureImportSessionAccess)
+ .mutation(async ({ input, ctx }) => {
+ await ctx.importSession.stageBookmarks(input.bookmarks);
+ }),
+
+ finalizeImportStaging: authedProcedure
+ .input(z.object({ importSessionId: z.string() }))
+ .use(ensureImportSessionAccess)
+ .mutation(async ({ ctx }) => {
+ await ctx.importSession.finalize();
+ }),
+
+ pauseImportSession: authedProcedure
+ .input(z.object({ importSessionId: z.string() }))
+ .use(ensureImportSessionAccess)
+ .mutation(async ({ ctx }) => {
+ await ctx.importSession.pause();
+ }),
+
+ resumeImportSession: authedProcedure
+ .input(z.object({ importSessionId: z.string() }))
+ .use(ensureImportSessionAccess)
+ .mutation(async ({ ctx }) => {
+ await ctx.importSession.resume();
+ }),
+
+ getImportSessionResults: authedProcedure
+ .input(
+ z.object({
+ importSessionId: z.string(),
+ filter: z
+ .enum(["all", "accepted", "rejected", "skipped_duplicate", "pending"])
+ .optional(),
+ cursor: z.string().optional(),
+ limit: z.number().default(50),
+ }),
+ )
+ .use(ensureImportSessionAccess)
+ .query(async ({ ctx, input }) => {
+ const results = await ctx.db
+ .select()
+ .from(importStagingBookmarks)
+ .where(
+ and(
+ eq(
+ importStagingBookmarks.importSessionId,
+ ctx.importSession.session.id,
+ ),
+ input.filter && input.filter !== "all"
+ ? input.filter === "pending"
+ ? eq(importStagingBookmarks.status, "pending")
+ : eq(importStagingBookmarks.result, input.filter)
+ : undefined,
+ input.cursor
+ ? gt(importStagingBookmarks.id, input.cursor)
+ : undefined,
+ ),
+ )
+ .orderBy(importStagingBookmarks.id)
+ .limit(input.limit + 1);
+
+ // Return with pagination info
+ const hasMore = results.length > input.limit;
+ return {
+ items: results.slice(0, input.limit),
+ nextCursor: hasMore ? results[input.limit - 1].id : null,
+ };
+ }),
});
diff --git a/packages/trpc/routers/lists.test.ts b/packages/trpc/routers/lists.test.ts
index 8797b35e..214df32a 100644
--- a/packages/trpc/routers/lists.test.ts
+++ b/packages/trpc/routers/lists.test.ts
@@ -594,3 +594,385 @@ describe("recursive delete", () => {
expect(lists.lists.find((l) => l.id === child.id)).toBeUndefined();
});
});
+
+describe("Nested smart lists", () => {
+ test<CustomTestContext>("smart list can reference another smart list", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0];
+
+ // Create a bookmark that is favourited
+ const bookmark1 = await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Favourited bookmark",
+ });
+ await api.bookmarks.updateBookmark({
+ bookmarkId: bookmark1.id,
+ favourited: true,
+ });
+
+ // Create a bookmark that is not favourited
+ const bookmark2 = await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Non-favourited bookmark",
+ });
+
+ // Create a smart list that matches favourited bookmarks
+ await api.lists.create({
+ name: "Favourites",
+ type: "smart",
+ query: "is:fav",
+ icon: "⭐",
+ });
+
+ // Create a smart list that references the first smart list
+ const smartListB = await api.lists.create({
+ name: "From Favourites",
+ type: "smart",
+ query: "list:Favourites",
+ icon: "📋",
+ });
+
+ // Get bookmarks from the nested smart list
+ const bookmarksInSmartListB = await api.bookmarks.getBookmarks({
+ listId: smartListB.id,
+ });
+
+ // Should contain the favourited bookmark
+ expect(bookmarksInSmartListB.bookmarks.length).toBe(1);
+ expect(bookmarksInSmartListB.bookmarks[0].id).toBe(bookmark1.id);
+
+ // Verify bookmark2 is not in the nested smart list
+ expect(
+ bookmarksInSmartListB.bookmarks.find((b) => b.id === bookmark2.id),
+ ).toBeUndefined();
+ });
+
+ test<CustomTestContext>("nested smart lists with multiple levels", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0];
+
+ // Create a bookmark that is archived
+ const bookmark = await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Archived bookmark",
+ });
+ await api.bookmarks.updateBookmark({
+ bookmarkId: bookmark.id,
+ archived: true,
+ });
+
+ // Create smart list A: matches archived bookmarks
+ await api.lists.create({
+ name: "Archived",
+ type: "smart",
+ query: "is:archived",
+ icon: "đŸ“Ļ",
+ });
+
+ // Create smart list B: references list A
+ await api.lists.create({
+ name: "Level1",
+ type: "smart",
+ query: "list:Archived",
+ icon: "1ī¸âƒŖ",
+ });
+
+ // Create smart list C: references list B (3 levels deep)
+ const smartListC = await api.lists.create({
+ name: "Level2",
+ type: "smart",
+ query: "list:Level1",
+ icon: "2ī¸âƒŖ",
+ });
+
+ // Get bookmarks from the deepest nested smart list
+ const bookmarksInSmartListC = await api.bookmarks.getBookmarks({
+ listId: smartListC.id,
+ });
+
+ // Should contain the archived bookmark
+ expect(bookmarksInSmartListC.bookmarks.length).toBe(1);
+ expect(bookmarksInSmartListC.bookmarks[0].id).toBe(bookmark.id);
+ });
+
+ test<CustomTestContext>("smart list with inverse reference to another smart list", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0];
+
+ // Create two bookmarks
+ const favouritedBookmark = await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Favourited bookmark",
+ });
+ await api.bookmarks.updateBookmark({
+ bookmarkId: favouritedBookmark.id,
+ favourited: true,
+ });
+
+ const normalBookmark = await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Normal bookmark",
+ });
+
+ // Create a smart list that matches favourited bookmarks
+ await api.lists.create({
+ name: "Favourites",
+ type: "smart",
+ query: "is:fav",
+ icon: "⭐",
+ });
+
+ // Create a smart list with negative reference to Favourites
+ const notInFavourites = await api.lists.create({
+ name: "Not In Favourites",
+ type: "smart",
+ query: "-list:Favourites",
+ icon: "❌",
+ });
+
+ // Get bookmarks from the smart list
+ const bookmarksNotInFav = await api.bookmarks.getBookmarks({
+ listId: notInFavourites.id,
+ });
+
+ // Should contain only the non-favourited bookmark
+ expect(bookmarksNotInFav.bookmarks.length).toBe(1);
+ expect(bookmarksNotInFav.bookmarks[0].id).toBe(normalBookmark.id);
+ });
+
+ test<CustomTestContext>("circular reference between smart lists returns empty", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0];
+
+ // Create a bookmark
+ const bookmark = await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Test bookmark",
+ });
+ await api.bookmarks.updateBookmark({
+ bookmarkId: bookmark.id,
+ favourited: true,
+ });
+
+ // Create smart list A that references smart list B
+ const smartListA = await api.lists.create({
+ name: "ListA",
+ type: "smart",
+ query: "list:ListB",
+ icon: "đŸ…°ī¸",
+ });
+
+ // Create smart list B that references smart list A (circular!)
+ await api.lists.create({
+ name: "ListB",
+ type: "smart",
+ query: "list:ListA",
+ icon: "đŸ…ąī¸",
+ });
+
+ // Querying ListA should return empty because of the circular reference
+ const bookmarksInListA = await api.bookmarks.getBookmarks({
+ listId: smartListA.id,
+ });
+
+ // Should be empty due to circular reference detection
+ expect(bookmarksInListA.bookmarks.length).toBe(0);
+ });
+
+ test<CustomTestContext>("self-referencing smart list returns empty", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0];
+
+ // Create a bookmark
+ await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Test bookmark",
+ });
+
+ // Create a smart list that references itself
+ const selfRefList = await api.lists.create({
+ name: "SelfRef",
+ type: "smart",
+ query: "list:SelfRef",
+ icon: "🔄",
+ });
+
+ // Querying should return empty because of self-reference
+ const bookmarks = await api.bookmarks.getBookmarks({
+ listId: selfRefList.id,
+ });
+
+ expect(bookmarks.bookmarks.length).toBe(0);
+ });
+
+ test<CustomTestContext>("three-way circular reference returns empty", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0];
+
+ // Create a bookmark
+ await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Test bookmark",
+ });
+
+ // Create three smart lists with circular references: A -> B -> C -> A
+ const listA = await api.lists.create({
+ name: "CircularA",
+ type: "smart",
+ query: "list:CircularB",
+ icon: "đŸ…°ī¸",
+ });
+
+ await api.lists.create({
+ name: "CircularB",
+ type: "smart",
+ query: "list:CircularC",
+ icon: "đŸ…ąī¸",
+ });
+
+ await api.lists.create({
+ name: "CircularC",
+ type: "smart",
+ query: "list:CircularA",
+ icon: "ÂŠī¸",
+ });
+
+ // Querying any of them should return empty due to circular reference
+ const bookmarksInListA = await api.bookmarks.getBookmarks({
+ listId: listA.id,
+ });
+
+ expect(bookmarksInListA.bookmarks.length).toBe(0);
+ });
+
+ test<CustomTestContext>("smart list traversal above max visited lists returns empty", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0];
+
+ const bookmark = await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Depth test bookmark",
+ });
+
+ const manualList = await api.lists.create({
+ name: "DepthBaseManual",
+ type: "manual",
+ icon: "📋",
+ });
+ await api.lists.addToList({
+ listId: manualList.id,
+ bookmarkId: bookmark.id,
+ });
+
+ const maxVisitedLists = 30;
+ const overLimitChainLength = maxVisitedLists + 1;
+
+ for (let i = overLimitChainLength; i >= 2; i--) {
+ await api.lists.create({
+ name: `DepthL${i}`,
+ type: "smart",
+ query:
+ i === overLimitChainLength
+ ? "list:DepthBaseManual"
+ : `list:DepthL${i + 1}`,
+ icon: "D",
+ });
+ }
+
+ const depthRoot = await api.lists.create({
+ name: "DepthL1",
+ type: "smart",
+ query: "list:DepthL2",
+ icon: "D",
+ });
+
+ const bookmarksInRoot = await api.bookmarks.getBookmarks({
+ listId: depthRoot.id,
+ });
+
+ expect(bookmarksInRoot.bookmarks.length).toBe(0);
+ });
+
+ test<CustomTestContext>("smart list references non-existent list returns empty", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0];
+
+ // Create a bookmark
+ await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Test bookmark",
+ });
+
+ // Create a smart list that references a non-existent list
+ const smartList = await api.lists.create({
+ name: "RefNonExistent",
+ type: "smart",
+ query: "list:NonExistentList",
+ icon: "❓",
+ });
+
+ // Should return empty since the referenced list doesn't exist
+ const bookmarks = await api.bookmarks.getBookmarks({
+ listId: smartList.id,
+ });
+
+ expect(bookmarks.bookmarks.length).toBe(0);
+ });
+
+ test<CustomTestContext>("smart list can reference manual list", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0];
+
+ // Create bookmarks
+ const bookmark1 = await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Bookmark in manual list",
+ });
+ const bookmark2 = await api.bookmarks.createBookmark({
+ type: BookmarkTypes.TEXT,
+ text: "Bookmark not in list",
+ });
+
+ // Create a manual list and add bookmark1
+ const manualList = await api.lists.create({
+ name: "ManualList",
+ type: "manual",
+ icon: "📋",
+ });
+ await api.lists.addToList({
+ listId: manualList.id,
+ bookmarkId: bookmark1.id,
+ });
+
+ // Create a smart list that references the manual list
+ const smartList = await api.lists.create({
+ name: "SmartRefManual",
+ type: "smart",
+ query: "list:ManualList",
+ icon: "🔗",
+ });
+
+ // Get bookmarks from the smart list
+ const bookmarksInSmartList = await api.bookmarks.getBookmarks({
+ listId: smartList.id,
+ });
+
+ // Should contain only bookmark1
+ expect(bookmarksInSmartList.bookmarks.length).toBe(1);
+ expect(bookmarksInSmartList.bookmarks[0].id).toBe(bookmark1.id);
+
+ // Verify bookmark2 is not in the smart list
+ expect(
+ bookmarksInSmartList.bookmarks.find((b) => b.id === bookmark2.id),
+ ).toBeUndefined();
+ });
+});
diff --git a/packages/trpc/routers/lists.ts b/packages/trpc/routers/lists.ts
index 296679f3..bca3dc53 100644
--- a/packages/trpc/routers/lists.ts
+++ b/packages/trpc/routers/lists.ts
@@ -302,6 +302,7 @@ export const listsAppRouter = router({
id: z.string(),
name: z.string(),
email: z.string().nullable(),
+ image: z.string().nullable(),
}),
}),
),
@@ -310,6 +311,7 @@ export const listsAppRouter = router({
id: z.string(),
name: z.string(),
email: z.string().nullable(),
+ image: z.string().nullable(),
})
.nullable(),
}),
diff --git a/packages/trpc/routers/tags.ts b/packages/trpc/routers/tags.ts
index d4cfbe8c..5713c192 100644
--- a/packages/trpc/routers/tags.ts
+++ b/packages/trpc/routers/tags.ts
@@ -102,6 +102,7 @@ export const tagsAppRouter = router({
.query(async ({ ctx, input }) => {
return await Tag.getAll(ctx, {
nameContains: input.nameContains,
+ ids: input.ids,
attachedBy: input.attachedBy,
sortBy: input.sortBy,
pagination: input.limit
diff --git a/packages/trpc/routers/users.test.ts b/packages/trpc/routers/users.test.ts
index a2f2be9f..d8ec90f9 100644
--- a/packages/trpc/routers/users.test.ts
+++ b/packages/trpc/routers/users.test.ts
@@ -158,6 +158,18 @@ describe("User Routes", () => {
backupsEnabled: false,
backupsFrequency: "weekly",
backupsRetentionDays: 30,
+
+ // Reader settings
+ readerFontFamily: null,
+ readerFontSize: null,
+ readerLineHeight: null,
+
+ // AI Settings
+ autoSummarizationEnabled: null,
+ autoTaggingEnabled: null,
+ curatedTagIds: null,
+ inferredTagLang: null,
+ tagStyle: "titlecase-spaces",
});
// Update settings
@@ -166,6 +178,17 @@ describe("User Routes", () => {
backupsEnabled: true,
backupsFrequency: "daily",
backupsRetentionDays: 7,
+
+ // Reader settings
+ readerFontFamily: "serif",
+ readerFontSize: 12,
+ readerLineHeight: 1.5,
+
+ // AI Settings
+ autoSummarizationEnabled: true,
+ autoTaggingEnabled: true,
+ inferredTagLang: "en",
+ tagStyle: "lowercase-underscores",
});
// Verify updated settings
@@ -177,6 +200,18 @@ describe("User Routes", () => {
backupsEnabled: true,
backupsFrequency: "daily",
backupsRetentionDays: 7,
+
+ // Reader settings
+ readerFontFamily: "serif",
+ readerFontSize: 12,
+ readerLineHeight: 1.5,
+
+ // AI Settings
+ autoSummarizationEnabled: true,
+ autoTaggingEnabled: true,
+ curatedTagIds: null,
+ inferredTagLang: "en",
+ tagStyle: "lowercase-underscores",
});
// Test invalid update (e.g., empty input, if schema enforces it)
@@ -915,6 +950,81 @@ describe("User Routes", () => {
});
});
+ describe("Update Avatar", () => {
+ test<CustomTestContext>("updateAvatar - promotes unknown asset", async ({
+ db,
+ unauthedAPICaller,
+ }) => {
+ const user = await unauthedAPICaller.users.create({
+ name: "Avatar Reject",
+ email: "avatar-reject@test.com",
+ password: "pass1234",
+ confirmPassword: "pass1234",
+ });
+ const caller = getApiCaller(db, user.id, user.email, user.role || "user");
+
+ await db.insert(assets).values({
+ id: "avatar-asset-2",
+ assetType: AssetTypes.UNKNOWN,
+ userId: user.id,
+ contentType: "image/png",
+ size: 12,
+ fileName: "avatar.png",
+ bookmarkId: null,
+ });
+
+ await caller.users.updateAvatar({ assetId: "avatar-asset-2" });
+
+ const updatedAsset = await db
+ .select()
+ .from(assets)
+ .where(eq(assets.id, "avatar-asset-2"))
+ .then((rows) => rows[0]);
+
+ expect(updatedAsset?.assetType).toBe(AssetTypes.AVATAR);
+ });
+
+ test<CustomTestContext>("updateAvatar - deletes avatar asset", async ({
+ db,
+ unauthedAPICaller,
+ }) => {
+ const user = await unauthedAPICaller.users.create({
+ name: "Avatar Delete",
+ email: "avatar-delete@test.com",
+ password: "pass1234",
+ confirmPassword: "pass1234",
+ });
+ const caller = getApiCaller(db, user.id, user.email, user.role || "user");
+
+ await db.insert(assets).values({
+ id: "avatar-asset-3",
+ assetType: AssetTypes.UNKNOWN,
+ userId: user.id,
+ contentType: "image/png",
+ size: 12,
+ fileName: "avatar.png",
+ bookmarkId: null,
+ });
+
+ await caller.users.updateAvatar({ assetId: "avatar-asset-3" });
+ await caller.users.updateAvatar({ assetId: null });
+
+ const updatedUser = await db
+ .select()
+ .from(users)
+ .where(eq(users.id, user.id))
+ .then((rows) => rows[0]);
+ const remainingAsset = await db
+ .select()
+ .from(assets)
+ .where(eq(assets.id, "avatar-asset-3"))
+ .then((rows) => rows[0]);
+
+ expect(updatedUser?.image).toBeNull();
+ expect(remainingAsset).toBeUndefined();
+ });
+ });
+
describe("Who Am I", () => {
test<CustomTestContext>("whoami - returns user info", async ({
db,
@@ -1008,6 +1118,7 @@ describe("User Routes", () => {
"resend@test.com",
"Test User",
expect.any(String), // token
+ undefined, // redirectUrl
);
});
diff --git a/packages/trpc/routers/users.ts b/packages/trpc/routers/users.ts
index d3bc06d9..c11a0ffd 100644
--- a/packages/trpc/routers/users.ts
+++ b/packages/trpc/routers/users.ts
@@ -9,7 +9,9 @@ import {
zUserSettingsSchema,
zUserStatsResponseSchema,
zWhoAmIResponseSchema,
+ zWrappedStatsResponseSchema,
} from "@karakeep/shared/types/users";
+import { validateRedirectUrl } from "@karakeep/shared/utils/redirectUrl";
import {
adminProcedure,
@@ -30,7 +32,7 @@ export const usersAppRouter = router({
maxRequests: 3,
}),
)
- .input(zSignUpSchema)
+ .input(zSignUpSchema.and(z.object({ redirectUrl: z.string().optional() })))
.output(
z.object({
id: z.string(),
@@ -64,7 +66,11 @@ export const usersAppRouter = router({
});
}
}
- const user = await User.create(ctx, input);
+ const validatedRedirectUrl = validateRedirectUrl(input.redirectUrl);
+ const user = await User.create(ctx, {
+ ...input,
+ redirectUrl: validatedRedirectUrl,
+ });
return {
id: user.id,
name: user.name,
@@ -136,6 +142,24 @@ export const usersAppRouter = router({
const user = await User.fromCtx(ctx);
return await user.getStats();
}),
+ wrapped: authedProcedure
+ .output(zWrappedStatsResponseSchema)
+ .query(async ({ ctx }) => {
+ throw new TRPCError({
+ code: "BAD_REQUEST",
+ message: "This endpoint is currently disabled",
+ });
+ const user = await User.fromCtx(ctx);
+ return await user.getWrappedStats(2025);
+ }),
+ hasWrapped: authedProcedure.output(z.boolean()).query(async ({ ctx }) => {
+ throw new TRPCError({
+ code: "BAD_REQUEST",
+ message: "This endpoint is currently disabled",
+ });
+ const user = await User.fromCtx(ctx);
+ return await user.hasWrapped();
+ }),
settings: authedProcedure
.output(zUserSettingsSchema)
.query(async ({ ctx }) => {
@@ -148,6 +172,16 @@ export const usersAppRouter = router({
const user = await User.fromCtx(ctx);
await user.updateSettings(input);
}),
+ updateAvatar: authedProcedure
+ .input(
+ z.object({
+ assetId: z.string().nullable(),
+ }),
+ )
+ .mutation(async ({ input, ctx }) => {
+ const user = await User.fromCtx(ctx);
+ await user.updateAvatar(input.assetId);
+ }),
verifyEmail: publicProcedure
.use(
createRateLimitMiddleware({
@@ -177,10 +211,16 @@ export const usersAppRouter = router({
.input(
z.object({
email: z.string().email(),
+ redirectUrl: z.string().optional(),
}),
)
.mutation(async ({ input, ctx }) => {
- await User.resendVerificationEmail(ctx, input.email);
+ const validatedRedirectUrl = validateRedirectUrl(input.redirectUrl);
+ await User.resendVerificationEmail(
+ ctx,
+ input.email,
+ validatedRedirectUrl,
+ );
return { success: true };
}),
forgotPassword: publicProcedure
diff --git a/packages/trpc/routers/webhooks.test.ts b/packages/trpc/routers/webhooks.test.ts
index 5a136a31..de27b11e 100644
--- a/packages/trpc/routers/webhooks.test.ts
+++ b/packages/trpc/routers/webhooks.test.ts
@@ -125,4 +125,26 @@ describe("Webhook Routes", () => {
false,
);
});
+
+ test<CustomTestContext>("webhook limit enforcement", async ({
+ apiCallers,
+ }) => {
+ const api = apiCallers[0].webhooks;
+
+ // Create 100 webhooks (the maximum)
+ for (let i = 0; i < 100; i++) {
+ await api.create({
+ url: `https://example${i}.com/webhook`,
+ events: ["created"],
+ });
+ }
+
+ // The 101st webhook should fail
+ await expect(() =>
+ api.create({
+ url: "https://example101.com/webhook",
+ events: ["created"],
+ }),
+ ).rejects.toThrow(/Maximum number of webhooks \(100\) reached/);
+ });
});