diff --git a/README.md b/README.md index b08be906..2c99c9ba 100644 --- a/README.md +++ b/README.md @@ -80,6 +80,7 @@ Visit [http://localhost:3000](http://localhost:3000) to get started. - **AI Gateway**: Generate Text, Generate Image +- **Apify**: Run Actor, Scrape Single URL - **Firecrawl**: Scrape URL, Search Web - **Linear**: Create Ticket, Find Issues - **Resend**: Send Email @@ -262,6 +263,30 @@ const searchResult = await firecrawlSearchStep({ }); ``` +### Apify (Web Scraping) + +```typescript +import { + scrapeSingleUrlStep, + apifyRunActorStep, +} from "@/lib/steps/apify"; + +// Scrape a URL +const scrapeResult = await scrapeSingleUrlStep({ + url: "https://example.com", + crawlerType: "playwright:adaptive", +}); + +// Run an Actor from Apify Store +const searchMapsResults = await apifyRunActorStep({ + actorId: "compass/crawler-google-places", + actorInput: { + searchStringsArray: [ "restaurants in San Francisco" ] + }, +}); +``` + + ## Tech Stack - **Framework**: Next.js 16 with React 19 diff --git a/app/api/integrations/[integrationId]/test/route.ts b/app/api/integrations/[integrationId]/test/route.ts index 0247e731..3085f617 100644 --- a/app/api/integrations/[integrationId]/test/route.ts +++ b/app/api/integrations/[integrationId]/test/route.ts @@ -2,6 +2,7 @@ import { LinearClient } from "@linear/sdk"; import FirecrawlApp from "@mendable/firecrawl-js"; import { WebClient } from "@slack/web-api"; import { createGateway } from "ai"; +import { ApifyClient } from "apify-client"; import { NextResponse } from "next/server"; import postgres from "postgres"; import { Resend } from "resend"; @@ -68,6 +69,9 @@ export async function POST( integration.config.firecrawlApiKey ); break; + case "apify": + result = await testApifyConnection(integration.config.apifyApiToken); + break; default: return NextResponse.json( { error: "Invalid integration type" }, @@ -281,3 +285,36 @@ async function testFirecrawlConnection( }; } } + +async function testApifyConnection( + apiKey?: string +): Promise { + try { + if (!apiKey) { + return { + status: "error", + message: "Apify API Token is not configured", + }; + } + + const client = new ApifyClient({ token: apiKey }); + const user = await client.user("me").get(); + + if (!user.username) { + return { + status: "error", + message: "Failed to verify API token", + }; + } + + return { + status: "success", + message: `Connected as ${user.username}`, + }; + } catch (error) { + return { + status: "error", + message: error instanceof Error ? error.message : "Connection failed", + }; + } +} diff --git a/components/ui/template-badge-json.tsx b/components/ui/template-badge-json.tsx new file mode 100644 index 00000000..223a88a2 --- /dev/null +++ b/components/ui/template-badge-json.tsx @@ -0,0 +1,151 @@ +"use client"; + +import { useEffect, useRef, useState } from "react"; +import { cn } from "@/lib/utils"; +import { TemplateBadgeTextarea } from "./template-badge-textarea"; + +export interface TemplateBadgeJsonProps { + value?: string; + onChange?: (value: string) => void; + placeholder?: string; + disabled?: boolean; + className?: string; + id?: string; + rows?: number; +} + +/** + * A textarea component that validates JSON input in real-time + * Wraps TemplateBadgeTextarea and adds JSON validation and formatting + */ +export function TemplateBadgeJson({ + value = "", + onChange, + placeholder, + disabled, + className, + id, + rows = 3, +}: TemplateBadgeJsonProps) { + const [jsonError, setJsonError] = useState(null); + const [isFocused, setIsFocused] = useState(false); + const formatTimeoutRef = useRef(null); + const lastFormattedValueRef = useRef(""); + + // Validate JSON on value change + useEffect(() => { + if (!value || typeof value !== "string") { + setJsonError(null); + return; + } + + // If empty or only whitespace, no error + if (!value.trim()) { + setJsonError(null); + return; + } + + // Ensure that parsable values (not object) throws + if (!/^\s*\{[\s\S]*\}\s*$/.test(value)) { + setJsonError("Value must be a JSON object"); + return; + } + + // Parse JSON directly - template variables will be treated as normal strings + try { + JSON.parse(value); + setJsonError(null); + } catch (error) { + setJsonError( + error instanceof Error ? error.message : "Invalid JSON format" + ); + } + }, [value]); + + // Format JSON when it becomes valid (debounced to avoid formatting while typing) + useEffect(() => { + // Clear any pending format timeout + if (formatTimeoutRef.current) { + clearTimeout(formatTimeoutRef.current); + } + + // Don't format if there's an error, field is focused, or value is empty + if (jsonError || isFocused || !value || typeof value !== "string") { + return; + } + + if (!value.trim()) { + return; + } + + // Debounce formatting - wait 500ms after user stops typing + formatTimeoutRef.current = setTimeout(() => { + try { + // Parse JSON directly - template variables are treated as normal strings + const parsed = JSON.parse(value); + const formatted = JSON.stringify(parsed, null, 2); + + // Only format if different from current value and we haven't already formatted this value + if (formatted !== value && formatted !== lastFormattedValueRef.current) { + lastFormattedValueRef.current = formatted; + onChange?.(formatted); + } + } catch { + // If parsing fails, don't format + } + }, 500); + + return () => { + if (formatTimeoutRef.current) { + clearTimeout(formatTimeoutRef.current); + } + }; + }, [value, isFocused, jsonError, onChange]); + + // Track focus state by listening to focus/blur events on the wrapper + const handleWrapperFocus = () => { + setIsFocused(true); + }; + + const handleWrapperBlur = () => { + setIsFocused(false); + // Format immediately on blur if JSON is valid + if (!jsonError && value && typeof value === "string" && value.trim()) { + try { + // Parse JSON directly - template variables are treated as normal strings + const parsed = JSON.parse(value); + const formatted = JSON.stringify(parsed, null, 2); + + if (formatted !== value) { + onChange?.(formatted); + } + } catch { + // If parsing fails, don't format + } + } + }; + + return ( +
+ + {jsonError && ( +

{jsonError}

+ )} +
+ ); +} diff --git a/components/workflow/config/action-config.tsx b/components/workflow/config/action-config.tsx index 3931cf05..152d01f0 100644 --- a/components/workflow/config/action-config.tsx +++ b/components/workflow/config/action-config.tsx @@ -18,6 +18,7 @@ import { getActionsByCategory, getAllIntegrations, } from "@/plugins"; +import { RunActorConfigFields } from "@/plugins/apify/steps/run-actor/config"; import { ActionConfigRenderer } from "./action-config-renderer"; import { SchemaBuilder, type SchemaField } from "./schema-builder"; @@ -390,14 +391,22 @@ export function ActionConfig({ )} {/* Plugin actions - declarative config fields */} - {pluginAction && !SYSTEM_ACTION_IDS.includes(actionType) && ( - - )} + {pluginAction && + !SYSTEM_ACTION_IDS.includes(actionType) && + (actionType === "apify/run-actor" ? ( + + ) : ( + + ))} ); } diff --git a/lib/step-registry.ts b/lib/step-registry.ts index 6a71649b..48876bc9 100644 --- a/lib/step-registry.ts +++ b/lib/step-registry.ts @@ -7,7 +7,7 @@ * This registry enables dynamic step imports that are statically analyzable * by the bundler. Each action type maps to its step importer function. * - * Generated entries: 10 + * Generated entries: 12 */ import "server-only"; @@ -41,6 +41,14 @@ export const PLUGIN_STEP_IMPORTERS: Record = { importer: () => import("@/plugins/ai-gateway/steps/generate-image"), stepFunction: "generateImageStep", }, + "apify/run-actor": { + importer: () => import("@/plugins/apify/steps/run-actor/step"), + stepFunction: "apifyRunActorStep", + }, + "apify/scrape-single-url": { + importer: () => import("@/plugins/apify/steps/scrape-single-url/step"), + stepFunction: "scrapeSingleUrlStep", + }, "firecrawl/scrape": { importer: () => import("@/plugins/firecrawl/steps/scrape"), stepFunction: "firecrawlScrapeStep", @@ -114,6 +122,8 @@ export const PLUGIN_STEP_IMPORTERS: Record = { export const ACTION_LABELS: Record = { "ai-gateway/generate-text": "Generate Text", "ai-gateway/generate-image": "Generate Image", + "apify/run-actor": "Run Actor", + "apify/scrape-single-url": "Scrape Single URL", "firecrawl/scrape": "Scrape URL", "firecrawl/search": "Search Web", "linear/create-ticket": "Create Ticket", diff --git a/lib/steps/index.ts b/lib/steps/index.ts index 3ffb4d2d..32959688 100644 --- a/lib/steps/index.ts +++ b/lib/steps/index.ts @@ -6,6 +6,8 @@ import type { generateImageStep } from "../../plugins/ai-gateway/steps/generate-image"; import type { generateTextStep } from "../../plugins/ai-gateway/steps/generate-text"; +import type { apifyRunActorStep } from "../../plugins/apify/steps/run-actor/step"; +import type { scrapeSingleUrlStep } from "../../plugins/apify/steps/scrape-single-url/step"; import type { firecrawlScrapeStep } from "../../plugins/firecrawl/steps/scrape"; import type { firecrawlSearchStep } from "../../plugins/firecrawl/steps/search"; import type { createTicketStep } from "../../plugins/linear/steps/create-ticket"; @@ -64,6 +66,14 @@ export const stepRegistry: Record = { (await import("../../plugins/firecrawl/steps/search")).firecrawlSearchStep( input as Parameters[0] ), + "Run Actor": async (input) => + ( + await import("../../plugins/apify/steps/run-actor/step") + ).apifyRunActorStep(input as Parameters[0]), + "Scrape Single URL": async (input) => + ( + await import("../../plugins/apify/steps/scrape-single-url/step") + ).scrapeSingleUrlStep(input as Parameters[0]), }; // Helper to check if a step exists diff --git a/lib/types/integration.ts b/lib/types/integration.ts index cd146253..551ce9b2 100644 --- a/lib/types/integration.ts +++ b/lib/types/integration.ts @@ -9,12 +9,13 @@ * 2. Add a system integration to SYSTEM_INTEGRATION_TYPES in discover-plugins.ts * 3. Run: pnpm discover-plugins * - * Generated types: ai-gateway, database, firecrawl, linear, resend, slack, v0 + * Generated types: ai-gateway, apify, database, firecrawl, linear, resend, slack, v0 */ // Integration type union - plugins + system integrations export type IntegrationType = | "ai-gateway" + | "apify" | "database" | "firecrawl" | "linear" diff --git a/package.json b/package.json index 65473cf2..94ebdeff 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,7 @@ "@vercel/speed-insights": "^1.2.0", "@xyflow/react": "^12.9.2", "ai": "^5.0.102", + "apify-client": "^2.20.0", "better-auth": "^1.3.34", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", diff --git a/plugins/apify/codegen/run-actor.ts b/plugins/apify/codegen/run-actor.ts new file mode 100644 index 00000000..9306ef3f --- /dev/null +++ b/plugins/apify/codegen/run-actor.ts @@ -0,0 +1,60 @@ +/** + * Code generation template for Run Actor action + * This template is used when exporting workflows to standalone Next.js projects + * It uses environment variables instead of integrationId + */ +export const runActorCodegenTemplate = `import { ApifyClient } from "apify-client"; + +export async function apifyRunActorStep(input: { + actorId: string; + actorInput?: string | Record | null; +}) { + "use step"; + + const apiKey = process.env.APIFY_API_TOKEN; + + if (!apiKey) { + throw new Error("Apify API Token is not configured. Set APIFY_API_TOKEN environment variable."); + } + + let parsedActorInput: Record = {}; + if (input.actorInput) { + // If it's already an object, use it directly + if (typeof input.actorInput === "object" && !Array.isArray(input.actorInput)) { + parsedActorInput = input.actorInput; + } else if (typeof input.actorInput === "string") { + // If it's a string, parse it + try { + parsedActorInput = JSON.parse(input.actorInput); + } catch (err) { + throw new Error(\`Cannot parse Actor input: \${err instanceof Error ? err.message : String(err)}\`); + } + } + } + + try { + const client = new ApifyClient({ token: apiKey }); + const actorClient = client.actor(input.actorId); + + // Run synchronously and wait for completion + const runData = await actorClient.call(parsedActorInput); + + // Get dataset items + let datasetItems: unknown[] = []; + if (runData.defaultDatasetId) { + const dataset = await client + .dataset(runData.defaultDatasetId) + .listItems(); + datasetItems = dataset.items; + } + + return { + runId: runData.id || "unknown", + status: runData.status || "SUCCEEDED", + datasetId: runData.defaultDatasetId, + data: datasetItems, + }; + } catch (error) { + throw new Error(\`Failed to run Actor: \${error instanceof Error ? error.message : String(error)}\`); + } +}`; diff --git a/plugins/apify/codegen/scrape-single-url.ts b/plugins/apify/codegen/scrape-single-url.ts new file mode 100644 index 00000000..cc4d5d82 --- /dev/null +++ b/plugins/apify/codegen/scrape-single-url.ts @@ -0,0 +1,68 @@ +/** + * Code generation template for Scrape Single URL action + * This template is used when exporting workflows to standalone Next.js projects + * It uses environment variables instead of integrationId + */ +export const scrapeSingleUrlCodegenTemplate = `import { ApifyClient } from "apify-client"; + +export async function scrapeSingleUrlStep(input: { + url: string; + crawlerType?: string; +}) { + "use step"; + + const apiKey = process.env.APIFY_API_TOKEN; + + if (!apiKey) { + throw new Error("Apify API Token is not configured. Set APIFY_API_TOKEN environment variable."); + } + + if (!input.url) { + throw new Error("URL is required."); + } + + try { + const client = new ApifyClient({ token: apiKey }); + const actorClient = client.actor("apify/website-content-crawler"); + const crawlerType = input.crawlerType || "playwright:adaptive"; + + // Prepare actor input + const actorInput = { + startUrls: [{ url: input.url }], + crawlerType, + maxCrawlDepth: 0, + maxCrawlPages: 1, + maxResults: 1, + proxyConfiguration: { + useApifyProxy: true, + }, + removeCookieWarnings: true, + saveMarkdown: true, + }; + + // Run synchronously and wait for completion + const runData = await actorClient.call(actorInput); + + // Get dataset items + let markdown: string | undefined; + if (runData.defaultDatasetId) { + const datasetItems = await client + .dataset(runData.defaultDatasetId) + .listItems(); + + // Extract markdown from the first item + if (datasetItems.items && datasetItems.items.length > 0) { + const firstItem = datasetItems.items[0] as Record; + markdown = firstItem.markdown as string; + } + } + + return { + runId: runData.id || "unknown", + status: runData.status || "SUCCEEDED", + markdown, + }; + } catch (error) { + throw new Error(\`Failed to scrape URL: \${error instanceof Error ? error.message : String(error)}\`); + } +}`; diff --git a/plugins/apify/icon.tsx b/plugins/apify/icon.tsx new file mode 100644 index 00000000..6329437e --- /dev/null +++ b/plugins/apify/icon.tsx @@ -0,0 +1,17 @@ +import Image from "next/image"; + +/** + * Apify Icon Component + * Used as the icon for Run Actor action + */ +export function ApifyIcon({ className }: { className?: string }) { + return ( + Apify logo + ); +} diff --git a/plugins/apify/index.tsx b/plugins/apify/index.tsx new file mode 100644 index 00000000..faab5338 --- /dev/null +++ b/plugins/apify/index.tsx @@ -0,0 +1,111 @@ +import type { IntegrationPlugin } from "../registry"; +import { registerIntegration } from "../registry"; +import { runActorCodegenTemplate } from "./codegen/run-actor"; +import { scrapeSingleUrlCodegenTemplate } from "./codegen/scrape-single-url"; +import { ApifyIcon } from "./icon"; + +const apifyPlugin: IntegrationPlugin = { + type: "apify", + label: "Apify", + description: "Run web scraping and automation Actors", + + icon: ApifyIcon, + + formFields: [ + { + id: "apifyApiToken", + label: "Apify API Token", + type: "password", + placeholder: "apify_api_...", + configKey: "apifyApiToken", + envVar: "APIFY_API_TOKEN", + helpText: "Get your API token from ", + helpLink: { + text: "Apify Console", + url: "https://console.apify.com/account/integrations", + }, + }, + ], + + testConfig: { + getTestFunction: async () => { + const { testApify } = await import("./test"); + return testApify; + }, + }, + + actions: [ + { + slug: "run-actor", + label: "Run Actor", + description: "Run an Apify Actor and get results", + category: "Apify", + stepFunction: "apifyRunActorStep", + stepImportPath: "run-actor/step", + configFields: [ + { + key: "actorId", + label: "Actor (ID or name)", + type: "template-input", + placeholder: "apify/website-content-crawler or {{NodeName.actorId}}", + example: "apify/website-content-crawler", + required: true, + }, + { + key: "actorInput", + label: "Actor Input (JSON)", + type: "template-textarea", + placeholder: '{"startUrls": [{"url": "https://example.com"}]}', + rows: 6, + example: '{"startUrls": [{"url": "https://example.com"}]}', + required: true, + }, + ], + codegenTemplate: runActorCodegenTemplate, + }, + { + slug: "scrape-single-url", + label: "Scrape Single URL", + description: "Scrape a single URL and get markdown output", + category: "Apify", + stepFunction: "scrapeSingleUrlStep", + stepImportPath: "scrape-single-url/step", + configFields: [ + { + key: "url", + label: "URL", + type: "template-input", + placeholder: "https://example.com or {{NodeName.url}}", + example: "https://example.com", + required: true, + }, + { + key: "crawlerType", + label: "Crawler Type", + type: "select", + defaultValue: "playwright:adaptive", + options: [ + { + value: "playwright:adaptive", + label: "Adaptive switching between browser and raw HTTP", + }, + { + value: "playwright:firefox", + label: "Headless browser (Firefox+Playwright)", + }, + { + value: "cheerio", + label: "Raw HTTP client (Cheerio)", + }, + ], + }, + ], + codegenTemplate: scrapeSingleUrlCodegenTemplate, + }, + ], +}; + +// Auto-register on import +registerIntegration(apifyPlugin); + +export default apifyPlugin; diff --git a/plugins/apify/steps/run-actor/config.tsx b/plugins/apify/steps/run-actor/config.tsx new file mode 100644 index 00000000..e085da9e --- /dev/null +++ b/plugins/apify/steps/run-actor/config.tsx @@ -0,0 +1,58 @@ +import { Label } from "@/components/ui/label"; +import { TemplateBadgeInput } from "@/components/ui/template-badge-input"; +import { TemplateBadgeJson } from "@/components/ui/template-badge-json"; + +/** + * Run Actor Config Fields Component + * UI for configuring the run actor action + */ +export function RunActorConfigFields({ + config, + onUpdateConfig, + disabled, +}: { + config: Record; + onUpdateConfig: (key: string, value: unknown) => void; + disabled?: boolean; +}) { + return ( +
+
+ + onUpdateConfig("actorId", value)} + placeholder="apify/web-scraper or {{NodeName.actorId}}" + value={(config?.actorId as string) || ""} + /> +

+ Enter an Actor ID or name (e.g., apify/website-content-crawler). Browse all available Actors in the + Apify Store + . +

+
+ +
+ + onUpdateConfig("actorInput", value)} + value={(config?.actorInput as string) || ""} + /> +

+ JSON input for the Actor. Check the Actor's documentation for required + fields. +

+
+
+ ); +} diff --git a/plugins/apify/steps/run-actor/step.ts b/plugins/apify/steps/run-actor/step.ts new file mode 100644 index 00000000..44a586f6 --- /dev/null +++ b/plugins/apify/steps/run-actor/step.ts @@ -0,0 +1,93 @@ +import "server-only"; + +import { ApifyClient } from "apify-client"; +import { fetchCredentials } from "@/lib/credential-fetcher"; +import { getErrorMessage } from "@/lib/utils"; +import { type StepInput, withStepLogging } from "@/lib/steps/step-handler"; + +type ApifyRunActorResult = + | { + success: true; + runId: string; + status: string; + datasetId?: string; + data?: unknown[]; + } + | { success: false; error: string }; + +/** + * Run Actor Step + * Runs an Apify Actor and optionally waits for results + */ +export async function apifyRunActorStep( + input: { + integrationId?: string; + actorId: string; + actorInput?: string | Record | null; + } & StepInput +): Promise { + "use step"; + + return withStepLogging(input, async () => { + const credentials = input.integrationId + ? await fetchCredentials(input.integrationId) + : {}; + + const apiKey = credentials.APIFY_API_TOKEN; + + if (!apiKey) { + return { + success: false, + error: "Apify API Token is not configured.", + }; + } + + let parsedActorInput: Record = {}; + if (input?.actorInput) { + // If it's already an object, use it directly + if (typeof input.actorInput === "object" && !Array.isArray(input.actorInput)) { + parsedActorInput = input.actorInput; + } else if (typeof input.actorInput === "string") { + // If it's a string, parse it + try { + parsedActorInput = JSON.parse(input.actorInput); + } catch (err) { + return { + success: false, + error: `Cannot parse Actor input: ${getErrorMessage(err)}`, + }; + } + } + } + + try { + const client = new ApifyClient({ token: apiKey }); + const actorClient = client.actor(input.actorId); + + // Run synchronously and wait for completion + const runData = await actorClient.call(parsedActorInput); + + // Get dataset items + let datasetItems: unknown[] = []; + if (runData.defaultDatasetId) { + const dataset = await client + .dataset(runData.defaultDatasetId) + .listItems(); + datasetItems = dataset.items; + } + + return { + success: true, + runId: runData.id || "unknown", + status: runData.status || "SUCCEEDED", + datasetId: runData.defaultDatasetId, + data: datasetItems, + }; + } catch (error) { + return { + success: false, + error: `Failed to run Actor: ${getErrorMessage(error)}`, + }; + } + }); +} diff --git a/plugins/apify/steps/scrape-single-url/step.ts b/plugins/apify/steps/scrape-single-url/step.ts new file mode 100644 index 00000000..09c9f966 --- /dev/null +++ b/plugins/apify/steps/scrape-single-url/step.ts @@ -0,0 +1,102 @@ +import "server-only"; + +import { ApifyClient } from "apify-client"; +import { fetchCredentials } from "@/lib/credential-fetcher"; +import { getErrorMessage } from "@/lib/utils"; +import { type StepInput, withStepLogging } from "@/lib/steps/step-handler"; + +type ScrapeSingleUrlResult = + | { + success: true; + runId: string; + status: string; + markdown?: string; + } + | { success: false; error: string }; + +/** + * Scrape Single URL Step + * Scrapes a single URL using apify/website-content-crawler and returns markdown + */ +export async function scrapeSingleUrlStep( + input: { + integrationId?: string; + url: string; + crawlerType?: string; + } & StepInput +): Promise { + "use step"; + + return withStepLogging(input, async () => { + const credentials = input.integrationId + ? await fetchCredentials(input.integrationId) + : {}; + + const apiKey = credentials.APIFY_API_TOKEN; + + if (!apiKey) { + return { + success: false, + error: "Apify API Token is not configured.", + }; + } + + if (!input.url) { + return { + success: false, + error: "URL is required.", + }; + } + + try { + const client = new ApifyClient({ token: apiKey }); + const actorClient = client.actor("apify/website-content-crawler"); + const crawlerType = input.crawlerType || "playwright:adaptive"; + + // Prepare actor input + const actorInput = { + startUrls: [{ url: input.url }], + crawlerType, + maxCrawlDepth: 0, + maxCrawlPages: 1, + maxResults: 1, + proxyConfiguration: { + useApifyProxy: true, + }, + removeCookieWarnings: true, + saveMarkdown: true, + }; + + // Run synchronously and wait for completion (waits indefinitely if waitSecs not specified) + const runData = await actorClient.call(actorInput); + + // Get dataset items + let markdown: string | undefined; + if (runData.defaultDatasetId) { + const datasetItems = await client + .dataset(runData.defaultDatasetId) + .listItems(); + + // Extract markdown from the first item + if (datasetItems.items && datasetItems.items.length > 0) { + const firstItem = datasetItems.items[0] as Record; + markdown = (firstItem.markdown as string); + } + } + + const result: ScrapeSingleUrlResult = { + success: true, + runId: runData.id || "unknown", + status: runData.status || "SUCCEEDED", + markdown, + }; + + return result; + } catch (error) { + return { + success: false, + error: `Failed to scrape URL: ${getErrorMessage(error)}`, + }; + } + }); +} diff --git a/plugins/apify/test.ts b/plugins/apify/test.ts new file mode 100644 index 00000000..729bf786 --- /dev/null +++ b/plugins/apify/test.ts @@ -0,0 +1,14 @@ +import { ApifyClient } from "apify-client"; + +export async function testApify(credentials: Record) { + try { + const client = new ApifyClient({ token: credentials.APIFY_API_TOKEN }); + await client.user("me").get(); + return { success: true }; + } catch (error) { + return { + success: false, + error: error instanceof Error ? error.message : String(error), + }; + } +} diff --git a/plugins/index.ts b/plugins/index.ts index c1eaf29e..4bb824ef 100644 --- a/plugins/index.ts +++ b/plugins/index.ts @@ -13,10 +13,11 @@ * 1. Delete the plugin directory * 2. Run: pnpm discover-plugins (or it runs automatically on build) * - * Discovered plugins: ai-gateway, firecrawl, linear, resend, slack, v0 + * Discovered plugins: ai-gateway, apify, firecrawl, linear, resend, slack, v0 */ import "./ai-gateway"; +import "./apify"; import "./firecrawl"; import "./linear"; import "./resend"; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 01d512da..714149b8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -44,6 +44,9 @@ importers: ai: specifier: ^5.0.102 version: 5.0.102(zod@4.1.12) + apify-client: + specifier: ^2.20.0 + version: 2.20.0 better-auth: specifier: ^1.3.34 version: 1.3.34(next@16.0.1(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(react-dom@19.2.0(react@19.2.0))(react@19.2.0) @@ -182,6 +185,15 @@ packages: resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==} engines: {node: '>=10'} + '@apify/consts@2.48.0': + resolution: {integrity: sha512-a0HeYDxAbbkRxc9z2N6beMFAmAJSgBw8WuKUwV+KmCuPyGUVLp54fYzjQ63p9Gv5IVFC88/HMXpAzI29ARgO5w==} + + '@apify/log@2.5.28': + resolution: {integrity: sha512-jU8qIvU+Crek8glBjFl3INjJQWWDR9n2z9Dr0WvUI8KJi0LG9fMdTvV+Aprf9z1b37CbHXgiZkA1iPlNYxKOEQ==} + + '@apify/utilities@2.23.4': + resolution: {integrity: sha512-1tLXOJBJR1SUSp/iEj6kcvV+9B5dn1mvIWDtRYwevJXXURyJdPwzJApi0F0DZz/Vk2HeCC381gnSqASzXN8MLA==} + '@aws-crypto/sha256-browser@5.2.0': resolution: {integrity: sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==} @@ -401,6 +413,10 @@ packages: '@clack/prompts@0.11.0': resolution: {integrity: sha512-pMN5FcrEw9hUkZA4f+zLlzivQSeQf5dRGJjSUbvVYDLvpKCdQx5OaknvKzgbtXOizhP+SJJJjqEbOe55uKKfAw==} + '@crawlee/types@3.15.3': + resolution: {integrity: sha512-RvgVPXrsQw4GQIUXrC1z1aNOedUPJnZ/U/8n+jZ0fu1Iw9moJVMuiuIxSI8q1P6BA84aWZdalyfDWBZ3FMjsiw==} + engines: {node: '>=16.0.0'} + '@drizzle-team/brocli@0.10.2': resolution: {integrity: sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w==} @@ -2013,6 +2029,10 @@ packages: resolution: {integrity: sha512-HcWLW28yTMGXpwE9VLx9J+N2KEUaELadLrkPEEI9tpI5la70xNEVEsu/C+m3u7uoq4FulLqZQhgBCzR9IZhFpA==} engines: {node: '>=20.0.0'} + '@sindresorhus/is@4.6.0': + resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==} + engines: {node: '>=10'} + '@sindresorhus/merge-streams@4.0.0': resolution: {integrity: sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==} engines: {node: '>=18'} @@ -2596,6 +2616,10 @@ packages: ansi-align@3.0.1: resolution: {integrity: sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==} + ansi-colors@4.1.3: + resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==} + engines: {node: '>=6'} + ansi-escapes@4.3.2: resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} engines: {node: '>=8'} @@ -2624,6 +2648,9 @@ packages: resolution: {integrity: sha512-0qWUglt9JEqLFr3w1I1pbrChn1grhaiAR2ocX1PP/flRmxgtwTzPFFFnfIlD6aMOLQZgSuCRlidD70lvx8yhzg==} engines: {node: '>=14'} + apify-client@2.20.0: + resolution: {integrity: sha512-oEMTImVVRZ5n8JkFV6dgbBFL3Xqz+GTwjUCjn/hwSNkow31Q8VNGk4qYDfRjkoqNQJ3ZirhtCwTnhkSXn1Tf+g==} + argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} @@ -2642,6 +2669,9 @@ packages: resolution: {integrity: sha512-UOCGPYbl0tv8+006qks/dTgV9ajs97X2p0FAbyS2iyCRrmLSRolDaHdp+v/CLgnzHc3fVB+CwYiUmei7ndFcgA==} engines: {node: '>=12.0.0'} + async-retry@1.3.3: + resolution: {integrity: sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==} + async@3.2.6: resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} @@ -2823,6 +2853,10 @@ packages: resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==} engines: {node: ^14.18.0 || >=16.10.0} + content-type@1.0.5: + resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} + engines: {node: '>= 0.6'} + convert-source-map@2.0.0: resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} @@ -2941,6 +2975,10 @@ packages: dompurify@3.1.7: resolution: {integrity: sha512-VaTstWtsneJY8xzy7DekmYWEOZcmzIe3Qb3zPd4STve1OBTa+e+WmS1ITQec1fZYXI3HCsOZZiSMpG6oxoWMWQ==} + dot-prop@6.0.1: + resolution: {integrity: sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==} + engines: {node: '>=10'} + dotenv@17.2.3: resolution: {integrity: sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==} engines: {node: '>=12'} @@ -3406,6 +3444,10 @@ packages: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} + is-obj@2.0.0: + resolution: {integrity: sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==} + engines: {node: '>=8'} + is-plain-obj@4.1.0: resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} engines: {node: '>=12'} @@ -3608,6 +3650,10 @@ packages: resolution: {integrity: sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + lodash.isequal@4.5.0: + resolution: {integrity: sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==} + deprecated: This package is deprecated. Use require('node:util').isDeepStrictEqual instead. + log-symbols@6.0.0: resolution: {integrity: sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw==} engines: {node: '>=18'} @@ -3833,6 +3879,10 @@ packages: resolution: {integrity: sha512-wrAwOeXp1RRMFfQY8Sy7VaGVmPocaLwSFOYCGKSyo8qmJ+/yaafCl5BCA1IQZWqFSRBrKDYFeR9d/VyQzfH/jg==} engines: {node: '>= 6.0'} + ow@0.28.2: + resolution: {integrity: sha512-dD4UpyBh/9m4X2NVjA+73/ZPBRF+uF4zIMFvvQsabMiEK8x41L3rQ8EENOi35kyyoaJwNxEeJcP6Fj1H4U409Q==} + engines: {node: '>=12'} + oxc-resolver@11.13.2: resolution: {integrity: sha512-1SXVyYQ9bqMX3uZo8Px81EG7jhZkO9PvvR5X9roY5TLYVm4ZA7pbPDNlYaDBBeF9U+YO3OeMNoHde52hrcCu8w==} @@ -4409,6 +4459,10 @@ packages: resolution: {integrity: sha512-4MYBu2UuYq6wwNtqlOTUobeUYjXH+RzpSFRQKWOlRw18T47mjGq5Tp4odGS0GK7OGnUwxKG2Cm6JkLx6RLWmBA==} engines: {node: '>=22', pnpm: '>=9'} + vali-date@1.0.0: + resolution: {integrity: sha512-sgECfZthyaCKW10N0fm27cg8HYTFK5qMWgypqkXMQ4Wbl/zZKx7xZICgcoxIIE+WFAP/MBL2EFwC/YvLxw3Zeg==} + engines: {node: '>=0.10.0'} + vaul@1.1.2: resolution: {integrity: sha512-ZFkClGpWyI2WUQjdLJ/BaGuV6AVQiJ3uELGk3OYtP+B6yCO7Cmn9vPFXVJkRaGkOJu3m8bQMgtyzNHixULceQA==} peerDependencies: @@ -4558,6 +4612,18 @@ snapshots: '@alloc/quick-lru@5.2.0': {} + '@apify/consts@2.48.0': {} + + '@apify/log@2.5.28': + dependencies: + '@apify/consts': 2.48.0 + ansi-colors: 4.1.3 + + '@apify/utilities@2.23.4': + dependencies: + '@apify/consts': 2.48.0 + '@apify/log': 2.5.28 + '@aws-crypto/sha256-browser@5.2.0': dependencies: '@aws-crypto/sha256-js': 5.2.0 @@ -5088,6 +5154,10 @@ snapshots: picocolors: 1.1.1 sisteransi: 1.0.5 + '@crawlee/types@3.15.3': + dependencies: + tslib: 2.8.1 + '@drizzle-team/brocli@0.10.2': {} '@emnapi/core@1.7.1': @@ -6605,6 +6675,8 @@ snapshots: '@peculiar/asn1-x509': 2.5.0 '@peculiar/x509': 1.14.0 + '@sindresorhus/is@4.6.0': {} + '@sindresorhus/merge-streams@4.0.0': {} '@slack/logger@4.0.0': @@ -7355,6 +7427,8 @@ snapshots: dependencies: string-width: 4.2.3 + ansi-colors@4.1.3: {} + ansi-escapes@4.3.2: dependencies: type-fest: 0.21.3 @@ -7375,6 +7449,22 @@ snapshots: ansis@3.17.0: {} + apify-client@2.20.0: + dependencies: + '@apify/consts': 2.48.0 + '@apify/log': 2.5.28 + '@apify/utilities': 2.23.4 + '@crawlee/types': 3.15.3 + ansi-colors: 4.1.3 + async-retry: 1.3.3 + axios: 1.13.1 + content-type: 1.0.5 + ow: 0.28.2 + tslib: 2.8.1 + type-fest: 4.41.0 + transitivePeerDependencies: + - debug + argparse@2.0.1: {} aria-hidden@1.2.6: @@ -7391,6 +7481,10 @@ snapshots: pvutils: 1.1.5 tslib: 2.8.1 + async-retry@1.3.3: + dependencies: + retry: 0.13.1 + async@3.2.6: {} asynckit@0.4.0: {} @@ -7566,6 +7660,8 @@ snapshots: consola@3.4.2: {} + content-type@1.0.5: {} + convert-source-map@2.0.0: optional: true @@ -7666,6 +7762,10 @@ snapshots: dompurify@3.1.7: {} + dot-prop@6.0.1: + dependencies: + is-obj: 2.0.0 + dotenv@17.2.3: {} drizzle-kit@0.31.6: @@ -8063,6 +8163,8 @@ snapshots: is-number@7.0.0: {} + is-obj@2.0.0: {} + is-plain-obj@4.1.0: {} is-stream@2.0.1: {} @@ -8226,6 +8328,8 @@ snapshots: dependencies: p-locate: 6.0.0 + lodash.isequal@4.5.0: {} + log-symbols@6.0.0: dependencies: chalk: 5.6.2 @@ -8428,6 +8532,14 @@ snapshots: os-paths@4.4.0: {} + ow@0.28.2: + dependencies: + '@sindresorhus/is': 4.6.0 + callsites: 3.1.0 + dot-prop: 6.0.1 + lodash.isequal: 4.5.0 + vali-date: 1.0.0 + oxc-resolver@11.13.2: optionalDependencies: '@oxc-resolver/binding-android-arm-eabi': 11.13.2 @@ -9015,6 +9127,8 @@ snapshots: v0-sdk@0.15.1: {} + vali-date@1.0.0: {} + vaul@1.1.2(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0): dependencies: '@radix-ui/react-dialog': 1.1.15(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) diff --git a/public/integrations/apify.svg b/public/integrations/apify.svg new file mode 100644 index 00000000..c8894c84 --- /dev/null +++ b/public/integrations/apify.svg @@ -0,0 +1,12 @@ + + + + + + + + + + + +