-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathscrape.py
125 lines (94 loc) · 3.71 KB
/
scrape.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
import time
from selenium.webdriver.support.ui import Select
from bs4 import BeautifulSoup
from selenium.webdriver.chrome.options import Options
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver.chrome.service import Service
import json
options = Options()
options.add_argument("--headless=new")
driver = webdriver.Chrome(service=Service(ChromeDriverManager().install()),options=options)
def CentralBank()-> dict:
driver.get("https://www.cbsl.gov.lk/en/rates-and-indicators/exchange-rates/daily-buy-and-sell-exchange-rates")
driver.switch_to.frame("iFrameResizer2")
elem = driver.find_element(By.ID, "rangeType_range")
elem.click()
select = Select(driver.find_element(By.ID, "rangeValue"))
select.select_by_index(1)
submit = driver.find_element(By.NAME, "submit_button")
submit.click()
tables = driver.find_elements(By.CLASS_NAME, "table-responsive")
for table in tables:
soup = BeautifulSoup(table.get_attribute('innerHTML'), "html.parser")
usd_table = soup.find_all("table")
rates = []
for usd in usd_table:
rows = usd.find_all("td")
rates.append(rows[0:3])
cbslreturn = {
"source":"CBSL",
"sell_rate":rates[-1][2].text,
"buy_rate":rates[-1][1].text,
"ref":"https://www.cbsl.gov.lk/en/rates-and-indicators/exchange-rates/daily-buy-and-sell-exchange-rates"
}
return cbslreturn
def SampathBank()->dict:
driver.get("https://www.sampath.lk/en/exchange-rates")
table = driver.find_element(By.CLASS_NAME,"exch-rates")
samp = BeautifulSoup(table.get_attribute("innerHTML"),"html.parser")
rows = samp.find_all("tr")
dataset = rows[-1].find_all("td")
sampathreturn = {
"source":"Sampath",
"sell_rate":dataset[1].text,
"buy_rate":dataset[3].text,
"ref":"https://www.sampath.lk/en/exchange-rates"
}
return sampathreturn
def PeoplesBank()->dict:
driver.get("https://www.peoplesbank.lk/exchange-rates/")
peoples = driver.find_element(By.CLASS_NAME,"table")
people = BeautifulSoup(peoples.get_attribute("innerHTML"),"html.parser")
peoplerows = people.find_all("tr")
peoplesreturn={
"source":"Peoples",
"sell_rate":peoplerows[2].find_all("td")[1].text,
"buy_rate":peoplerows[2].find_all("td")[0].text,
"ref":"https://www.peoplesbank.lk/exchange-rates/"
}
return peoplesreturn
def NSBank()->dict:
driver.get("https://www.nsb.lk/rates-tarriffs/exchange-rates/")
nsbtable = driver.find_element(By.CLASS_NAME,"table")
nsbsoup = BeautifulSoup(nsbtable.get_attribute("innerHTML"),"html.parser")
nsbrows = nsbsoup.find_all("tr")
nsbreturn = {
"source":"NSB",
"sell_rate":nsbrows[2].find_all("td")[-1].text,
"buy_rate":nsbrows[2].find_all("td")[-2].text,
"ref":"https://www.nsb.lk/rates-tarriffs/exchange-rates/"
}
return nsbreturn
def GoogleFinance()->dict:
driver.get("https://www.google.com/finance/quote/USD-LKR")
rate = driver.find_element(By.CLASS_NAME,"kf1m0")
finance = BeautifulSoup(rate.get_attribute("innerHTML"),"html.parser")
finance.find("div")
googlereturn = {
"source":"Google",
"sell_rate":finance.text,
"buy_rate":finance.text,
"ref":"https://www.google.com/finance/quote/USD-LKR"
}
return googlereturn
def save():
google = GoogleFinance()
sampath = SampathBank()
cbsl = CentralBank()
nsb = NSBank()
peoples = PeoplesBank()
with open("export.json","w") as file:
file.write(json.dumps([google,sampath,cbsl,nsb,peoples]))