Skip to content

Commit 3936bd1

Browse files
committed
update readme & cargo doc
1 parent fe682e1 commit 3936bd1

File tree

7 files changed

+32
-10
lines changed

7 files changed

+32
-10
lines changed

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "reqwest-scraper"
3-
version = "0.2.0"
3+
version = "0.2.1"
44
edition = "2021"
55
description = "Web scraping integration with reqwest"
66
license-file = "LICENSE"

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ Extends [reqwest](https://github.com/seanmonstar/reqwest) to support multiple we
1818
* add dependency
1919
```toml
2020
reqwest = { version = "0.12", features = ["json"] }
21-
reqwest-scraper="0.2.0"
21+
reqwest-scraper="0.2.1"
2222
```
2323
* use ScraperResponse
2424
```rust

src/css_selector.rs

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
//! Select elements in HTML response using CSS selector
2+
//!
13
use crate::error::Result;
24
use itertools::Itertools;
35

@@ -46,6 +48,7 @@ impl<'a, T> Selectable<'a, T> {
4648
}
4749

4850
impl<'a> Selectable<'a, scraper::Html> {
51+
/// iterator
4952
pub fn iter(&self) -> HtmlSelectIterator {
5053
HtmlSelectIterator {
5154
select: self.node.select(&self.selector),
@@ -54,6 +57,7 @@ impl<'a> Selectable<'a, scraper::Html> {
5457
}
5558

5659
impl<'a> Selectable<'a, ElementRef<'a>> {
60+
/// iterator
5761
pub fn iter(&self) -> ElementSelectIterator {
5862
ElementSelectIterator {
5963
select: self.node.select(&self.selector),
@@ -81,8 +85,11 @@ impl<'a, 'b> Iterator for ElementSelectIterator<'a, 'b> {
8185
}
8286
}
8387

88+
/// Case Sensitivity Match
8489
pub type CaseSensitivity = scraper::CaseSensitivity;
90+
/// Html element class attribute
8591
pub type Classes<'a> = scraper::node::Classes<'a>;
92+
/// Html element attributes
8693
pub type Attrs<'a> = scraper::node::Attrs<'a>;
8794

8895
impl<'a> SelectItem<'a> {
@@ -116,6 +123,7 @@ impl<'a> SelectItem<'a> {
116123
self.element.attr(attr)
117124
}
118125

126+
/// Returns the text of this element.
119127
pub fn text(&self) -> String {
120128
self.element.text().join(" ")
121129
}
@@ -137,6 +145,7 @@ impl<'a> SelectItem<'a> {
137145
.map(|e| SelectItem { element: e })
138146
}
139147

148+
/// Use CSS selector to find elements based on the current element
140149
pub fn find(&self, selector: &str) -> Result<Selectable<'a, ElementRef>> {
141150
Selectable::wrap(selector, &self.element)
142151
}

src/error.rs

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,41 +1,44 @@
1+
//! Scraping Error
2+
//!
13
use thiserror::Error;
24

5+
/// Scraping Error
36
#[derive(Error, Debug)]
47
pub enum ScraperError {
8+
/// JsonPath Error
59
#[cfg(feature = "jsonpath")]
610
#[error(transparent)]
711
JsonPathError(#[from] jsonpath_lib::JsonPathError),
812

13+
/// Json Deserialize Error
914
#[cfg(feature = "jsonpath")]
1015
#[error(transparent)]
1116
JsonDeserializeError(#[from] serde_json::Error),
1217

18+
/// Css Selector Error
1319
#[cfg(feature = "css_selector")]
1420
#[error("css selector error: {0}")]
1521
CssSelectorError(String),
1622

23+
/// Html Document Parse Error
1724
#[cfg(feature = "xpath")]
1825
#[error(transparent)]
1926
HtmlParseError(#[from] libxml::parser::XmlParseError),
2027

21-
// #[cfg(feature = "xpath")]
22-
// #[error(transparent)]
23-
// XPathParseError(#[from] sxd_xpath::ParserError),
24-
25-
// #[cfg(feature = "xpath")]
26-
// #[error(transparent)]
27-
// XPathExecutionError(#[from] sxd_xpath::ExecutionError),
28-
28+
/// XPath Evaluate Error
2929
#[cfg(feature = "xpath")]
3030
#[error("{0}")]
3131
XPathError(String),
3232

33+
/// IO Error
3334
#[error(transparent)]
3435
IOError(#[from] reqwest::Error),
3536

37+
/// Http response failed
3638
#[error("http request error:{0}, body text:{1}")]
3739
HttpError(u16, String),
3840

41+
/// Illegal Args Error
3942
#[error("illegal argument:{0}")]
4043
IllegalArgsError(String),
4144
}
@@ -47,4 +50,5 @@ impl<'a> From<scraper::error::SelectorErrorKind<'a>> for ScraperError {
4750
}
4851
}
4952

53+
/// Result
5054
pub type Result<T> = std::result::Result<T, ScraperError>;

src/jsonpath.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
//! Use JsonPath to select fields in json response
2+
//!
13
use crate::error::{Result, ScraperError};
24
use jsonpath_lib as jsonpath;
35

src/lib.rs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
#![deny(missing_docs)]
2+
3+
#![doc = include_str!("../README.md")]
4+
15
#[cfg(feature = "css_selector")]
26
pub mod css_selector;
37
pub mod error;

src/xpath.rs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
//! Evalute the value in HTML response using xpath expression
2+
//!
13
use std::collections::HashSet;
24

35
use itertools::Itertools;
@@ -19,6 +21,7 @@ pub struct XPathResult {
1921
}
2022

2123
impl XHtml {
24+
/// Using xpath to extract results from html
2225
pub fn select(&self, xpath: &str) -> Result<XPathResult> {
2326
let context = Context::new(&self.doc)
2427
.map_err(|_| ScraperError::XPathError(format!("xpath parse failed:{}", xpath)))?;

0 commit comments

Comments
 (0)