fix(firecrawl): updated output for firecrawl extract (#2333)

* fix(firecrawl): fixed optional params for firecrawl

* fix(build): fixed firecrawl tools

* ack PR comments
This commit is contained in:
Waleed
2025-12-11 23:06:34 -08:00
committed by GitHub
parent cb9b88127f
commit 3334dfeefa
7 changed files with 9 additions and 35 deletions

View File

@@ -148,7 +148,6 @@ Extract structured data from entire webpages using natural language prompts and
| --------- | ---- | ----------- |
| `success` | boolean | Whether the extraction operation was successful |
| `data` | object | Extracted structured data according to the schema or prompt |
| `sources` | array | Data sources \(only if showSources is enabled\) |

View File

@@ -65,9 +65,8 @@ export const crawlTool: ToolConfig<FirecrawlCrawlParams, FirecrawlCrawlResponse>
if (typeof params.allowSubdomains === 'boolean') body.allowSubdomains = params.allowSubdomains
if (typeof params.ignoreQueryParameters === 'boolean')
body.ignoreQueryParameters = params.ignoreQueryParameters
if (params.delay != null && params.delay !== '') body.delay = Number(params.delay)
if (params.maxConcurrency != null && params.maxConcurrency !== '')
body.maxConcurrency = Number(params.maxConcurrency)
if (params.delay) body.delay = Number(params.delay)
if (params.maxConcurrency) body.maxConcurrency = Number(params.maxConcurrency)
if (params.excludePaths) body.excludePaths = params.excludePaths
if (params.includePaths) body.includePaths = params.includePaths
if (params.webhook) body.webhook = params.webhook

View File

@@ -162,7 +162,6 @@ export const extractTool: ToolConfig<ExtractParams, ExtractResponse> = {
jobId,
success: true,
data: extractData.data || {},
warning: extractData.warning,
}
return result
}
@@ -210,20 +209,5 @@ export const extractTool: ToolConfig<ExtractParams, ExtractResponse> = {
type: 'object',
description: 'Extracted structured data according to the schema or prompt',
},
sources: {
type: 'array',
description: 'Data sources (only if showSources is enabled)',
items: {
type: 'object',
properties: {
url: { type: 'string', description: 'Source URL' },
title: { type: 'string', description: 'Source title' },
},
},
},
warning: {
type: 'string',
description: 'Warning messages from the extraction operation',
},
},
}

View File

@@ -83,8 +83,8 @@ export const mapTool: ToolConfig<MapParams, MapResponse> = {
body.includeSubdomains = params.includeSubdomains
if (typeof params.ignoreQueryParameters === 'boolean')
body.ignoreQueryParameters = params.ignoreQueryParameters
if (params.limit != null && params.limit !== '') body.limit = Number(params.limit)
if (params.timeout != null && params.timeout !== '') body.timeout = Number(params.timeout)
if (params.limit) body.limit = Number(params.limit)
if (params.timeout) body.timeout = Number(params.timeout)
if (params.location) body.location = params.location
return body

View File

@@ -45,13 +45,13 @@ export const scrapeTool: ToolConfig<ScrapeParams, ScrapeResponse> = {
if (typeof params.onlyMainContent === 'boolean') body.onlyMainContent = params.onlyMainContent
if (params.includeTags) body.includeTags = params.includeTags
if (params.excludeTags) body.excludeTags = params.excludeTags
if (params.maxAge != null && params.maxAge !== '') body.maxAge = Number(params.maxAge)
if (params.maxAge) body.maxAge = Number(params.maxAge)
if (params.headers) body.headers = params.headers
if (params.waitFor != null && params.waitFor !== '') body.waitFor = Number(params.waitFor)
if (params.waitFor) body.waitFor = Number(params.waitFor)
if (typeof params.mobile === 'boolean') body.mobile = params.mobile
if (typeof params.skipTlsVerification === 'boolean')
body.skipTlsVerification = params.skipTlsVerification
if (params.timeout != null && params.timeout !== '') body.timeout = Number(params.timeout)
if (params.timeout) body.timeout = Number(params.timeout)
if (params.parsers) body.parsers = params.parsers
if (params.actions) body.actions = params.actions
if (params.location) body.location = params.location

View File

@@ -35,13 +35,13 @@ export const searchTool: ToolConfig<SearchParams, SearchResponse> = {
}
// Add optional parameters if provided (truthy check filters empty strings, null, undefined)
if (params.limit != null && params.limit !== '') body.limit = Number(params.limit)
if (params.limit) body.limit = Number(params.limit)
if (params.sources) body.sources = params.sources
if (params.categories) body.categories = params.categories
if (params.tbs) body.tbs = params.tbs
if (params.location) body.location = params.location
if (params.country) body.country = params.country
if (params.timeout != null && params.timeout !== '') body.timeout = Number(params.timeout)
if (params.timeout) body.timeout = Number(params.timeout)
if (typeof params.ignoreInvalidURLs === 'boolean')
body.ignoreInvalidURLs = params.ignoreInvalidURLs
if (params.scrapeOptions) body.scrapeOptions = params.scrapeOptions
@@ -57,7 +57,6 @@ export const searchTool: ToolConfig<SearchParams, SearchResponse> = {
success: true,
output: {
data: data.data,
warning: data.warning,
},
}
},
@@ -81,6 +80,5 @@ export const searchTool: ToolConfig<SearchParams, SearchResponse> = {
},
},
},
warning: { type: 'string', description: 'Warning messages from the search operation' },
},
}

View File

@@ -163,7 +163,6 @@ export interface SearchResponse extends ToolResponse {
error?: string
}
}>
warning?: string
}
}
@@ -198,11 +197,6 @@ export interface ExtractResponse extends ToolResponse {
jobId: string
success: boolean
data: Record<string, any>
sources?: Array<{
url: string
title?: string
}>
warning?: string
}
}