Skip to content

Commit 2c5a59b

Browse files
committed
feat: implemented search scraper functionality
1 parent 869441b commit 2c5a59b

File tree

5 files changed

+110
-0
lines changed

5 files changed

+110
-0
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
import { getSearchScraperRequest } from 'scrapegraph-js';
2+
import 'dotenv/config';
3+
4+
const apiKey = process.env.SGAI_APIKEY;
5+
const requestId = '64801288-6e3b-41f3-9d94-07cff3829e15';
6+
7+
try {
8+
const requestInfo = await getSearchScraperRequest(apiKey, requestId);
9+
console.log(requestInfo);
10+
} catch (error) {
11+
console.error(error);
12+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
import { searchScraper } from 'scrapegraph-js';
2+
import { z } from 'zod';
3+
import 'dotenv/config';
4+
5+
const apiKey = process.env.SGAI_APIKEY;
6+
const prompt = 'What is the latest version of Python and what are its main features?';
7+
8+
const schema = z.object({
9+
version: z.string().describe('The latest version'),
10+
release_date: z.string().describe('The release date of latest version'),
11+
major_features: z.array(z.string()),
12+
});
13+
14+
try {
15+
const response = await searchScraper(apiKey, prompt, schema);
16+
console.log(response.result);
17+
} catch (error) {
18+
console.error(error);
19+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
import { searchScraper } from 'scrapegraph-js';
2+
import 'dotenv/config';
3+
4+
const apiKey = process.env.SGAI_APIKEY;
5+
const prompt = 'What is the latest version of Python and what are its main features?';
6+
7+
try {
8+
const response = await searchScraper(apiKey, prompt);
9+
console.log(response);
10+
} catch (error) {
11+
console.error(error);
12+
}

scrapegraph-js/index.js

+1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
export { smartScraper, getSmartScraperRequest } from './src/smartScraper.js';
22
export { markdownify, getMarkdownifyRequest } from './src/markdownify.js';
33
export { localScraper, getLocalScraperRequest } from './src/localScraper.js';
4+
export { searchScraper, getSearchScraperRequest } from './src/searchScraper.js';
45
export { getCredits } from './src/credits.js';
56
export { sendFeedback } from './src/feedback.js';

scrapegraph-js/src/searchScraper.js

+66
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import axios from 'axios';
2+
import handleError from './utils/handleError.js';
3+
import { ZodType } from 'zod';
4+
import { zodToJsonSchema } from 'zod-to-json-schema';
5+
6+
/**
7+
* Search and extract information from multiple web sources using AI.
8+
*
9+
* @param {string} apiKey - Your ScrapeGraph AI API key
10+
* @param {string} prompt - Natural language prompt describing what data to extract
11+
* @param {Object} [schema] - Optional schema object defining the output structure
12+
* @param {String} userAgent - the user agent like "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
13+
* @returns {Promise<string>} Extracted data in JSON format matching the provided schema
14+
* @throws - Will throw an error in case of an HTTP failure.
15+
*/
16+
export async function searchScraper(apiKey, prompt, schema = null, userAgent = null) {
17+
const endpoint = 'https://api.scrapegraphai.com/v1/searchscraper';
18+
const headers = {
19+
'accept': 'application/json',
20+
'SGAI-APIKEY': apiKey,
21+
'Content-Type': 'application/json',
22+
};
23+
24+
if (userAgent) headers['User-Agent'] = userAgent;
25+
26+
const payload = {
27+
user_prompt: prompt,
28+
};
29+
30+
if (schema) {
31+
if (schema instanceof ZodType) {
32+
payload.output_schema = zodToJsonSchema(schema);
33+
} else {
34+
throw new Error('The schema must be an instance of a valid Zod schema');
35+
}
36+
}
37+
38+
try {
39+
const response = await axios.post(endpoint, payload, { headers });
40+
return response.data;
41+
} catch (error) {
42+
handleError(error);
43+
}
44+
}
45+
46+
/**
47+
* Retrieve the status or the result of searchScraper request. It also allows you to see the result of old requests.
48+
*
49+
* @param {string} apiKey - Your ScrapeGraph AI API key
50+
* @param {string} requestId - The request ID associated with the output of a searchScraper request.
51+
* @returns {Promise<string>} Information related to the status or result of a scraping request.
52+
*/
53+
export async function getSearchScraperRequest(apiKey, requestId) {
54+
const endpoint = 'https://api.scrapegraphai.com/v1/searchscraper/' + requestId;
55+
const headers = {
56+
'accept': 'application/json',
57+
'SGAI-APIKEY': apiKey,
58+
};
59+
60+
try {
61+
const response = await axios.get(endpoint, { headers });
62+
return response.data;
63+
} catch (error) {
64+
handleError(error);
65+
}
66+
}

0 commit comments

Comments
 (0)