Skip to content

Commit 4ca290d

Browse files
committed
Updated Stuff
1 parent 561194e commit 4ca290d

File tree

2 files changed

+9
-9
lines changed

2 files changed

+9
-9
lines changed

WebScraper/web - Kopie.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,16 @@
66
import xlsxwriter
77

88

9-
baseurl = "https://www.brk.de/rotes-kreuz/adressen/kreisverbaende/detail/0"
10-
workbook = xlsxwriter.Workbook('Example2.xlsx')
9+
baseurl = "SETURL"
10+
workbook = xlsxwriter.Workbook('Example.xlsx')
1111
worksheet = workbook.add_worksheet()
1212
row = 0
1313
column = 0
1414

1515
i = 200
1616

1717
urls = []
18-
18+
# NICE FUNCTION SO SEARCH MULTIPLE DOMAINS
1919
while i < 300:
2020
url = str(baseurl)+str(i)
2121

@@ -27,9 +27,9 @@
2727
response = requests.get(each)
2828
soup = BeautifulSoup(response.text, "html.parser")
2929

30-
name = soup.findAll('span', {"itemprop" : "name"})
31-
telephone = soup.findAll('span', {"itemprop" : "telephone"})
32-
email = soup.findAll('span', {"itemprop" : "email"})
30+
name = soup.findAll('span', {"itemprop" : "name"}) #SEARCH FOR A HTMLTAG TO SCRAPE
31+
telephone = soup.findAll('span', {"itemprop" : "telephone"}) #SEARCH FOR A HTMLTAG TO SCRAPE
32+
email = soup.findAll('span', {"itemprop" : "email"}) #SEARCH FOR A HTMLTAG TO SCRAPE
3333

3434
#all_spans = "NAME: ", name,"\n Telefon: ", telephone ,"\n Email: ", email
3535
all_spans = name, telephone, email

WebScraper/web.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,14 @@
66
import xlsxwriter
77

88

9-
baseurl = "https://www.dgwz.de/publikationen/ihk"
10-
workbook = xlsxwriter.Workbook('Example2.xlsx')
9+
baseurl = "SET URL"
10+
workbook = xlsxwriter.Workbook('Example.xlsx')
1111
worksheet = workbook.add_worksheet()
1212

1313
response = requests.get(baseurl)
1414
soup = BeautifulSoup(response.text, "html.parser")
1515

16-
h1 = soup.findAll('h1')
16+
h1 = soup.findAll('h1') #SEARCH FOR A HTMLTAG TO SCRAPE
1717
row = 0
1818
column = 0
1919
for each in h1:

0 commit comments

Comments
 (0)