Loading...
Loading...
Integrate Apify into an existing JavaScript/TypeScript or Python application using the apify-client package. Use when adding web scraping, automation, or data extraction capabilities to an existing app via the Apify API.
npx skill4agent add apify/agent-skills apify-sdk-integrationapify-clientis the API client for calling Actors from your app.apify-clientis the SDK for building Actors (wrong package for this use case).apifyAlways install. Never installapify-clientfor integration work.apify
APIFY_TOKENsearch-actorsfetch-actor-details.mdnpm install apify-clientimport { ApifyClient } from 'apify-client';
const client = new ApifyClient({ token: process.env.APIFY_TOKEN });
const run = await client.actor('apify/web-scraper').call({
startUrls: [{ url: 'https://example.com' }],
maxPagesPerCrawl: 10,
});
const { items } = await client.dataset(run.defaultDatasetId).listItems();.call()const run = await client.actor('apify/web-scraper').start({
startUrls: [{ url: 'https://example.com' }],
});
// Poll for completion
const finishedRun = await client.run(run.id).waitForFinish();
// Retrieve results
const { items } = await client.dataset(finishedRun.defaultDatasetId).listItems();.start().waitForFinish()// Dataset items (structured data from pushData)
const { items } = await client.dataset(run.defaultDatasetId).listItems({
limit: 100,
offset: 0,
});
// Key-value store (files, screenshots, etc.)
const record = await client.keyValueStore(run.defaultKeyValueStoreId).getRecord('OUTPUT');try {
const run = await client.actor('apify/web-scraper').call(input);
if (run.status !== 'SUCCEEDED') {
const log = await client.log(run.id).get();
throw new Error(`Actor failed with status ${run.status}: ${log}`);
}
const { items } = await client.dataset(run.defaultDatasetId).listItems();
} catch (error) {
if (error.message?.includes('not found')) {
// Actor ID is wrong or Actor was deleted
} else if (error.statusCode === 401) {
// Invalid or missing APIFY_TOKEN
}
throw error;
}pip install apify-clientfrom apify_client import ApifyClient
import os
client = ApifyClient(token=os.environ['APIFY_TOKEN'])
run = client.actor('apify/web-scraper').call(run_input={
'startUrls': [{'url': 'https://example.com'}],
'maxPagesPerCrawl': 10,
})
items = client.dataset(run['defaultDatasetId']).list_items().itemsrun = client.actor('apify/web-scraper').start(run_input={
'startUrls': [{'url': 'https://example.com'}],
})
# Poll for completion
finished_run = client.run(run['id']).wait_for_finish()
items = client.dataset(finished_run['defaultDatasetId']).list_items().itemsfrom apify_client import ApifyClientAsync
client = ApifyClientAsync(token=os.environ['APIFY_TOKEN'])
run = await client.actor('apify/web-scraper').call(run_input={
'startUrls': [{'url': 'https://example.com'}],
})
items = (await client.dataset(run['defaultDatasetId']).list_items()).itemsPOST https://api.apify.com/v2/acts/{actorId}/runs
Authorization: Bearer <APIFY_TOKEN>
Content-Type: application/json
{ "startUrls": [{ "url": "https://example.com" }] }GET https://api.apify.com/v2/acts/{actorId}/runs/{runId}
Authorization: Bearer <APIFY_TOKEN>GET https://api.apify.com/v2/datasets/{datasetId}/items?format=json
Authorization: Bearer <APIFY_TOKEN>timeoutSecswaitSecs.call()limitoffsetApifyClientfetch-actor-details.mdsearch-apify-docsfetch-apify-docs