|
2 | 2 |
|
3 | 3 | English | <a href="#cn" style="text-decoration: none">简体中文</a> |
4 | 4 |
|
5 | | -Crawl is a Nodejs multifunctional crawler library. Provide configuration to batch fetch HTML, JSON, images, etc. |
| 5 | +XCrawl is a Nodejs multifunctional crawler library. Provide configuration to batch fetch HTML, JSON, images, etc. |
6 | 6 |
|
7 | 7 | ## Install |
8 | 8 |
|
@@ -45,7 +45,7 @@ Create a crawler instance via new XCrawl. |
45 | 45 | class XCrawl { |
46 | 46 | private readonly baseConfig |
47 | 47 | constructor(baseConfig?: IXCrawlBaseConifg) |
48 | | - fetch<T = any>(config: IFetchConfig): Promise<T> |
| 48 | + fetch<T = any>(config: IFetchConfig): Promise<IFetch<T>> |
49 | 49 | fetchFile(config: IFetchFileConfig): Promise<IFetchFile> |
50 | 50 | fetchHTML(url: string): Promise<JSDOM> |
51 | 51 | } |
@@ -108,9 +108,9 @@ function fetchFile(config: IFetchFileConfig): Promise<IFetchFile> |
108 | 108 |
|
109 | 109 | ```js |
110 | 110 | const requestConifg = [ |
111 | | - { url: '/xxxx', method: 'GET' }, |
112 | | - { url: '/xxxx', method: 'GET' }, |
113 | | - { url: '/xxxx', method: 'GET' } |
| 111 | + { url: '/xxxx' }, |
| 112 | + { url: '/xxxx' }, |
| 113 | + { url: '/xxxx' } |
114 | 114 | ] |
115 | 115 |
|
116 | 116 | myXCrawl.fetchFile({ |
@@ -188,6 +188,16 @@ interface IFetchBaseConifg { |
188 | 188 | } |
189 | 189 | ``` |
190 | 190 |
|
| 191 | +- IFech |
| 192 | +
|
| 193 | +```ts |
| 194 | +type IFetch<T> = { |
| 195 | + statusCode: number | undefined |
| 196 | + headers: IncomingHttpHeaders // node:http |
| 197 | + data: T |
| 198 | +}[] |
| 199 | +``` |
| 200 | +
|
191 | 201 | - IFetchFile |
192 | 202 |
|
193 | 203 | ```ts |
@@ -238,7 +248,7 @@ If you have any **questions** or **needs** , please submit **Issues in** https:/ |
238 | 248 |
|
239 | 249 | <a href="#en" style="text-decoration: none">English</a> | 简体中文 |
240 | 250 |
|
241 | | -Crawl 是 Nodejs 多功能爬虫库。提供配置即可批量抓取 HTML 、JSON、图片等等。 |
| 251 | +XCrawl 是 Nodejs 多功能爬虫库。提供配置即可批量抓取 HTML 、JSON、图片等等。 |
242 | 252 |
|
243 | 253 | ## 安装 |
244 | 254 |
|
@@ -281,7 +291,7 @@ docsXCrawl.fetchHTML('/zh/get-started').then((jsdom) => { |
281 | 291 | class XCrawl { |
282 | 292 | private readonly baseConfig |
283 | 293 | constructor(baseConfig?: IXCrawlBaseConifg) |
284 | | - fetch<T = any>(config: IFetchConfig): Promise<T> |
| 294 | + fetch<T = any>(config: IFetchConfig): Promise<IFetch<T>> |
285 | 295 | fetchFile(config: IFetchFileConfig): Promise<IFetchFile> |
286 | 296 | fetchHTML(url: string): Promise<JSDOM> |
287 | 297 | } |
@@ -344,9 +354,9 @@ function fetchFile(config: IFetchFileConfig): Promise<IFetchFile> |
344 | 354 |
|
345 | 355 | ```js |
346 | 356 | const requestConifg = [ |
347 | | - { url: '/xxxx', method: 'GET' }, |
348 | | - { url: '/xxxx', method: 'GET' }, |
349 | | - { url: '/xxxx', method: 'GET' } |
| 357 | + { url: '/xxxx' }, |
| 358 | + { url: '/xxxx' }, |
| 359 | + { url: '/xxxx' } |
350 | 360 | ] |
351 | 361 |
|
352 | 362 | myXCrawl.fetchFile({ |
@@ -424,6 +434,16 @@ interface IFetchBaseConifg { |
424 | 434 | } |
425 | 435 | ``` |
426 | 436 |
|
| 437 | +- IFetch |
| 438 | + |
| 439 | +```ts |
| 440 | +type IFetch<T> = { |
| 441 | + statusCode: number | undefined |
| 442 | + headers: IncomingHttpHeaders // node:http |
| 443 | + data: T |
| 444 | +}[] |
| 445 | +``` |
| 446 | + |
427 | 447 | - IFetchFile |
428 | 448 |
|
429 | 449 | ```ts |
|
0 commit comments