Scrap by python (https://www.worldometers.info/coronavirus/)
import requests
from bs4 import BeautifulSoup
import csv
import numpy as np
from bs4 import BeautifulSoup
import csv
import numpy as np
# useing requests
def analysis(url):
r = requests.get(url)
return r.text
r = requests.get(url)
return r.text
if __name__ == "__main__":
myData = analysis("https://www.worldometers.info/coronavirus/")
soup = BeautifulSoup(myData, "html.parser")
myData = analysis("https://www.worldometers.info/coronavirus/")
soup = BeautifulSoup(myData, "html.parser")
# Store by split using bs4
myStr = ""
for tr in soup.find_all("tbody"):
myStr += tr.get_text()
myStr = myStr[665:]
itemList = (myStr.split("\n\n\n"))
itembox = []
for item in itemList:
itemSpilt = item.split("\n")[1:]
itembox.append(itemSpilt)
print(itembox)
for tr in soup.find_all("tbody"):
myStr += tr.get_text()
myStr = myStr[665:]
itemList = (myStr.split("\n\n\n"))
itembox = []
for item in itemList:
itemSpilt = item.split("\n")[1:]
itembox.append(itemSpilt)
print(itembox)
# Create and write in csv file by file handleing
with open("datasetcovid.csv", "w", newline='', encoding='utf-8') as fp:
csvwriter = csv.writer(fp)
csvwriter.writerows(itembox)
csvwriter = csv.writer(fp)
csvwriter.writerows(itembox)


Comments
Post a Comment