Kenni
Kenni
CCConvex Community
Created by too_easy on 11/25/2024 in #support-community
ConvexAuthNextjsServerProvider not exported
"@convex-dev/auth": "^0.0.74",
12 replies
CCConvex Community
Created by too_easy on 11/25/2024 in #support-community
ConvexAuthNextjsServerProvider not exported
import { ConvexAuthNextjsServerProvider } from "@convex-dev/auth/nextjs/server"; <ConvexAuthNextjsServerProvider>
<ConvexClientProvider>
{children} </ConvexClientProvider> </ConvexAuthNextjsServerProvider>
12 replies
CCConvex Community
Created by Kenni on 11/20/2024 in #support-community
Handling "Too many reads" Error During Mass Deletion in Convex
If anyone ever has the same problem, here my simplified version:
// Main deletion function that initiates the process
export const deleteResource = mutation({
args: { resourceId: v.id("resources") },
handler: async (ctx, { resourceId }) => {
const userId = await ctx.auth.getUserId();
if (!userId) throw new Error("Authentication required");

// Verify permissions
const resource = await ctx.db.get(resourceId);
if (!resource) throw new Error("Resource not found");
if (!await hasPermission(ctx, userId, resourceId, "delete")) {
throw new Error("Not authorized");
}

// Start batch deletion process
await ctx.scheduler.runAfter(0, internal.batchDelete, {
resourceId,
userId,
cursor: null,
phase: "items"
});

return { success: true };
}
});

// Internal batch deletion handler
const batchDelete = internalMutation({
args: {
resourceId: v.id("resources"),
userId: v.string(),
cursor: v.optional(v.string()),
phase: v.union(v.literal("items"), v.literal("metadata"), v.literal("cleanup"))
},
handler: async (ctx, { resourceId, userId, cursor, phase }) => {
const BATCH_SIZE = 100;

try {
switch (phase) {
case "items": {
const items = await getBatch(ctx, "items", resourceId, BATCH_SIZE, cursor);
await deleteItems(ctx, items.page);

if (items.isDone) {
await nextPhase(ctx, resourceId, userId, "metadata");
} else {
await continueBatch(ctx, resourceId, userId, items.cursor, "items");
}
break;
}

case "metadata": {
const metadata = await getBatch(ctx, "metadata", resourceId, BATCH_SIZE, cursor);
await deleteMetadata(ctx, metadata.page);

if (metadata.isDone) {
await nextPhase(ctx, resourceId, userId, "cleanup");
} else {
await continueBatch(ctx, resourceId, userId, metadata.cursor, "metadata");
}
break;
}

case "cleanup": {
await ctx.db.delete(resourceId);
break;
}
}
} catch (error) {
await logError(ctx, resourceId, `Failed during ${phase}: ${error.message}`);
throw error;
}
}
});

// Helper functions
async function getBatch(ctx, table, resourceId, size, cursor) {
return await ctx.db
.query(table)
.withIndex("by_resource", q => q.eq("resourceId", resourceId))
.paginate({ numItems: size, cursor: cursor ?? null });
}

async function nextPhase(ctx, resourceId, userId, nextPhase) {
await ctx.scheduler.runAfter(0, internal.batchDelete, {
resourceId,
userId,
cursor: null,
phase: nextPhase
});
}

async function continueBatch(ctx, resourceId, userId, cursor, currentPhase) {
await ctx.scheduler.runAfter(0, internal.batchDelete, {
resourceId,
userId,
cursor,
phase: currentPhase
});
}
// Main deletion function that initiates the process
export const deleteResource = mutation({
args: { resourceId: v.id("resources") },
handler: async (ctx, { resourceId }) => {
const userId = await ctx.auth.getUserId();
if (!userId) throw new Error("Authentication required");

// Verify permissions
const resource = await ctx.db.get(resourceId);
if (!resource) throw new Error("Resource not found");
if (!await hasPermission(ctx, userId, resourceId, "delete")) {
throw new Error("Not authorized");
}

// Start batch deletion process
await ctx.scheduler.runAfter(0, internal.batchDelete, {
resourceId,
userId,
cursor: null,
phase: "items"
});

return { success: true };
}
});

// Internal batch deletion handler
const batchDelete = internalMutation({
args: {
resourceId: v.id("resources"),
userId: v.string(),
cursor: v.optional(v.string()),
phase: v.union(v.literal("items"), v.literal("metadata"), v.literal("cleanup"))
},
handler: async (ctx, { resourceId, userId, cursor, phase }) => {
const BATCH_SIZE = 100;

try {
switch (phase) {
case "items": {
const items = await getBatch(ctx, "items", resourceId, BATCH_SIZE, cursor);
await deleteItems(ctx, items.page);

if (items.isDone) {
await nextPhase(ctx, resourceId, userId, "metadata");
} else {
await continueBatch(ctx, resourceId, userId, items.cursor, "items");
}
break;
}

case "metadata": {
const metadata = await getBatch(ctx, "metadata", resourceId, BATCH_SIZE, cursor);
await deleteMetadata(ctx, metadata.page);

if (metadata.isDone) {
await nextPhase(ctx, resourceId, userId, "cleanup");
} else {
await continueBatch(ctx, resourceId, userId, metadata.cursor, "metadata");
}
break;
}

case "cleanup": {
await ctx.db.delete(resourceId);
break;
}
}
} catch (error) {
await logError(ctx, resourceId, `Failed during ${phase}: ${error.message}`);
throw error;
}
}
});

// Helper functions
async function getBatch(ctx, table, resourceId, size, cursor) {
return await ctx.db
.query(table)
.withIndex("by_resource", q => q.eq("resourceId", resourceId))
.paginate({ numItems: size, cursor: cursor ?? null });
}

async function nextPhase(ctx, resourceId, userId, nextPhase) {
await ctx.scheduler.runAfter(0, internal.batchDelete, {
resourceId,
userId,
cursor: null,
phase: nextPhase
});
}

async function continueBatch(ctx, resourceId, userId, cursor, currentPhase) {
await ctx.scheduler.runAfter(0, internal.batchDelete, {
resourceId,
userId,
cursor,
phase: currentPhase
});
}
7 replies
CCConvex Community
Created by Kenni on 11/20/2024 in #support-community
Handling "Too many reads" Error During Mass Deletion in Convex
So something like: await ctx.scheduler.runAfter(0, "deleteCompanyBatch", {}); ?
7 replies