Skip to content

Commit 00bb21b

Browse files
committed
feat: add user agent
1 parent 78f2318 commit 00bb21b

File tree

8 files changed

+41
-10
lines changed

8 files changed

+41
-10
lines changed

scrapegraph-py/CHANGELOG.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,10 @@
1+
## [1.26.0](https://github.com/ScrapeGraphAI/scrapegraph-sdk/compare/v1.25.1...v1.26.0) (2025-09-11)
2+
3+
4+
### Features
5+
6+
* refactoring of the example folder ([78f2318](https://github.com/ScrapeGraphAI/scrapegraph-sdk/commit/78f23184626061b22dfde6d6b5b3c3df93f2a73a))
7+
18
## [1.25.1](https://github.com/ScrapeGraphAI/scrapegraph-sdk/compare/v1.25.0...v1.25.1) (2025-09-08)
29

310

scrapegraph-py/scrapegraph_py/models/agenticscraper.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,15 @@ class AgenticScraperRequest(BaseModel):
4848
default=False,
4949
description="Whether to use AI for data extraction from the scraped content"
5050
)
51-
51+
headers: Optional[dict[str, str]] = Field(
52+
None,
53+
example={
54+
"User-Agent": "scrapegraph-py",
55+
"Cookie": "cookie1=value1; cookie2=value2",
56+
},
57+
description="Optional headers to send with the request, including cookies "
58+
"and user agent",
59+
)
5260
mock: bool = Field(default=False, description="Whether to use mock mode for the request")
5361
@model_validator(mode="after")
5462
def validate_url(self) -> "AgenticScraperRequest":

scrapegraph-py/scrapegraph_py/models/crawl.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,15 @@ class CrawlRequest(BaseModel):
4747
sitemap: bool = Field(
4848
default=False, description="Whether to use sitemap for better page discovery"
4949
)
50+
headers: Optional[dict[str, str]] = Field(
51+
None,
52+
example={
53+
"User-Agent": "scrapegraph-py",
54+
"Cookie": "cookie1=value1; cookie2=value2",
55+
},
56+
description="Optional headers to send with the request, including cookies "
57+
"and user agent",
58+
)
5059

5160
@model_validator(mode="after")
5261
def validate_url(self) -> "CrawlRequest":

scrapegraph-py/scrapegraph_py/models/markdownify.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,7 @@ class MarkdownifyRequest(BaseModel):
1111
headers: Optional[dict[str, str]] = Field(
1212
None,
1313
example={
14-
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
15-
"AppleWebKit/537.36",
14+
"User-Agent": "scrapegraph-py",
1615
"Cookie": "cookie1=value1; cookie2=value2",
1716
},
1817
description="Optional headers to send with the request, including cookies "

scrapegraph-py/scrapegraph_py/models/scheduled_jobs.py

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,18 @@ class ScheduledJobCreate(BaseModel):
1515
job_name: str = Field(..., description="Name of the scheduled job")
1616
service_type: str = Field(..., description="Type of service (smartscraper, searchscraper, etc.)")
1717
cron_expression: str = Field(..., description="Cron expression for scheduling")
18-
job_config: Dict[str, Any] = Field(..., description="Configuration for the job")
18+
job_config: Dict[str, Any] = Field(
19+
...,
20+
example={
21+
"website_url": "https://example.com",
22+
"user_prompt": "Extract company information",
23+
"headers": {
24+
"User-Agent": "scrapegraph-py",
25+
"Cookie": "session=abc123"
26+
}
27+
},
28+
description="Configuration for the job"
29+
)
1930
is_active: bool = Field(default=True, description="Whether the job is active")
2031

2132

scrapegraph-py/scrapegraph_py/models/scrape.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,7 @@ class ScrapeRequest(BaseModel):
1515
headers: Optional[dict[str, str]] = Field(
1616
None,
1717
example={
18-
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
19-
"AppleWebKit/537.36",
18+
"User-Agent": "scrapegraph-py",
2019
"Cookie": "cookie1=value1; cookie2=value2",
2120
},
2221
description="Optional headers to send with the request, including cookies "

scrapegraph-py/scrapegraph_py/models/searchscraper.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,7 @@ class SearchScraperRequest(BaseModel):
1919
headers: Optional[dict[str, str]] = Field(
2020
None,
2121
example={
22-
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
23-
"AppleWebKit/537.36",
22+
"User-Agent": "scrapegraph-py",
2423
"Cookie": "cookie1=value1; cookie2=value2",
2524
},
2625
description="Optional headers to send with the request, including cookies "

scrapegraph-py/scrapegraph_py/models/smartscraper.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,7 @@ class SmartScraperRequest(BaseModel):
2323
headers: Optional[dict[str, str]] = Field(
2424
None,
2525
example={
26-
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
27-
"AppleWebKit/537.36",
26+
"User-Agent": "scrapegraph-py",
2827
"Cookie": "cookie1=value1; cookie2=value2",
2928
},
3029
description="Optional headers to send with the request, including cookies "

0 commit comments

Comments
 (0)