@@ -101,7 +101,6 @@ npm install x-crawl
101101
102102` ` ` js
103103// 1.导入模块 ES/CJS
104- import path from ' node:path'
105104import xCrawl from ' x-crawl'
106105
107106// 2.创建一个爬虫实例
@@ -125,13 +124,7 @@ myXCrawl.startPolling({ d: 1 }, () => {
125124 imgEls.forEach((item) => requestConfig.push(`https:${item.src} `))
126125
127126 // 调用 crawlFile API 爬取图片
128- myXCrawl.crawlFile({
129- requestConfig,
130- fileConfig: { storeDir: path.resolve(__dirname, ' ./upload' ) }
131- })
132-
133- // 关闭浏览器
134- browser.close ()
127+ myXCrawl.crawlFile({ requestConfig, fileConfig: { storeDir: ' ./upload' } })
135128 })
136129})
137130` ` `
@@ -259,7 +252,6 @@ myXCrawl.crawlData({ requestConfig }).then(res => {
259252通过 [crawlFile()](# crawlFile) 爬取文件数据
260253
261254` ` ` js
262- import path from ' node:path'
263255import xCrawl from ' x-crawl'
264256
265257const myXCrawl = xCrawl({
@@ -273,7 +265,7 @@ myXCrawl
273265 .crawlFile({
274266 requestConfig,
275267 fileConfig: {
276- storeDir: path.resolve(__dirname, ' ./upload' ) // 存放文件夹
268+ storeDir: ' ./upload' // 存放文件夹
277269 }
278270 })
279271 .then(( fileInfos) => {
@@ -298,8 +290,6 @@ myXCrawl.startPolling({ h: 2, m: 30 }, (count, stopPolling) => {
298290 // crawlPage/crawlData/crawlFile
299291 myXCrawl.crawlPage('https://xxx.com').then(res => {
300292 const { jsdom, browser, page } = res
301-
302- browser.close()
303293 })
304294})
305295```
@@ -413,7 +403,7 @@ const requestConfig = [ 'https://xxx.com/xxxx', 'https://xxx.com/xxxx', 'https:/
413403myXCrawl
414404 .crawlFile({
415405 requestConfig,
416- fileConfig: { storeDir: path.resolve(__dirname, './upload') }
406+ fileConfig: { storeDir: './upload' }
417407 })
418408 .then((fileInfos) => {
419409 console.log('Promise: ', fileInfos)
@@ -423,7 +413,7 @@ myXCrawl
423413myXCrawl.crawlFile(
424414 {
425415 requestConfig,
426- fileConfig: { storeDir: path.resolve(__dirname, './upload') }
416+ fileConfig: { storeDir: './upload' }
427417 },
428418 (fileInfo) => {
429419 console.log('Callback: ', fileInfo)
@@ -435,7 +425,7 @@ myXCrawl
435425 .crawlFile(
436426 {
437427 requestConfig,
438- fileConfig: { storeDir: path.resolve(__dirname, './upload') }
428+ fileConfig: { storeDir: './upload' }
439429 },
440430 (fileInfo) => {
441431 console.log('Callback: ', fileInfo)
@@ -573,7 +563,6 @@ function crawlFile: (
573563#### 示例
574564
575565```js
576- import path from 'node:path'
577566import xCrawl from 'x-crawl'
578567
579568const myXCrawl = xCrawl({
@@ -588,7 +577,7 @@ myXCrawl
588577 .crawlFile({
589578 requestConfig,
590579 fileConfig: {
591- storeDir: path.resolve(__dirname, './upload') // 存放文件夹
580+ storeDir: './upload' // 存放文件夹
592581 }
593582 })
594583 .then((fileInfos) => {
0 commit comments