-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.py
157 lines (131 loc) · 5.64 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
import json
import requests
from bs4 import BeautifulSoup
import time
DISCORD_WEBHOOK_URL = ''
def load_exchange_rates(file_path):
with open(file_path, 'r') as file:
data = json.load(file)
return data['rates']
def convert_price(price_gbp, rate_gbp, rate_target):
return price_gbp * rate_target / rate_gbp
def load_existing_products(file_path):
try:
with open(file_path, 'r') as file:
return json.load(file)
except FileNotFoundError:
return {}
def extract_products(soup, rates):
products = []
rate_gbp = float(rates['GBP'])
rate_eur = float(rates['EUR'])
rate_chf = float(rates['CHF'])
for product_block in soup.find_all(class_='card-wrapper'):
product_info = product_block.find(class_='card-information__wrapper')
name_tag = product_info.find('h3', class_='card-information__text')
name = name_tag.get_text(strip=True) if name_tag else 'No Name'
link_tag = name_tag.find('a', href=True) if name_tag else None
link = link_tag['href'] if link_tag else 'No Link'
full_link = 'https://archive89.com' + link
price_tag = product_info.find('span', class_='money')
price_text = price_tag.get_text(strip=True) if price_tag else 'No Price'
# Remove commas from the price_text and then convert it to a float
price_text = price_text.replace(',', '')
price_gbp = float(price_text.replace('£', '').replace(' GBP', ''))
price_eur = convert_price(price_gbp, rate_gbp, rate_eur)
price_chf = convert_price(price_gbp, rate_gbp, rate_chf)
image_tag = product_block.find('img', src=True)
image_url = 'https:' + image_tag['src'].split("?")[0] if image_tag else 'No Image'
sold_out_tag = product_block.find('div', class_='card__badge')
sold_out = 'Sold out' in sold_out_tag.get_text() if sold_out_tag else False
products.append({
'name': name,
'detail_link': full_link,
'price_gbp': f"£{price_gbp:.2f}",
'price_eur': f"€{price_eur:.2f}",
'price_chf': f"CHF {price_chf:.2f}",
'image_url': image_url,
'sold_out': sold_out
})
return products
def check_page(url, rates):
try:
response = requests.get(url)
response.raise_for_status()
soup = BeautifulSoup(response.text, 'html.parser')
if "No products found" in soup.text:
return False, []
else:
return True, extract_products(soup, rates)
except requests.RequestException as e:
print(f"Error while checking the page: {e}")
return None, []
def send_discord_webhook(product):
color = 0x91f795 if not product['sold_out'] else 0xff5151
webhook_payload = {
"content": None,
"embeds": [
{
"title": product['name'],
"description": f"[**Buy now!**]({product['detail_link']})\nMessage sent at <t:{int(time.time())}:F>",
"color": color,
"fields": [
{"name": "Price in GBP", "value": f"```{product['price_gbp']}```", "inline": True},
{"name": "Price in EUR", "value": f"```{product['price_eur']}```", "inline": True},
{"name": "Price in CHF", "value": f"```{product['price_chf']}```", "inline": True}
],
"author": {
"name": "Archive 89 Monitor",
"url": "https://github.com/d-suter/archive-89-monitor"
},
"footer": {"text": "Archive 89 Monitor"},
"timestamp": time.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
"thumbnail": {"url": product['image_url']}
}
],
"attachments": []
}
try:
response = requests.post(DISCORD_WEBHOOK_URL, json=webhook_payload)
if response.status_code in [401, 403, 429]:
return False
response.raise_for_status()
return True
except requests.RequestException as e:
print(f"Webhook send failed: {e}")
return False
def save_products(products, file_path):
with open(file_path, 'w') as file:
json.dump(products, file, indent=4)
def monitor_website(base_url, rates, existing_products, file_path):
new_products = False
for current_page in range(1, 100):
url = f"{base_url}?page={current_page}"
print(f"Checking {url}")
exists, products = check_page(url, rates)
if exists is None:
print("Error occurred. Stopping the monitoring.")
break
elif not exists:
print(f"No products found on page {current_page}. Moving to next cycle.")
break
else:
for product in products:
product_id = product['detail_link']
if product_id not in existing_products:
print(f"New product found: {product['name']}")
success = send_discord_webhook(product)
if not success:
print("Webhook failed, waiting for 1 minute before retrying...")
time.sleep(60)
send_discord_webhook(product)
existing_products[product_id] = product
new_products = True
if new_products:
save_products(existing_products, file_path)
time.sleep(1200)
# Main execution
exchange_rates = load_exchange_rates("exchange-rate.json")
existing_products = load_existing_products("products.json")
base_url = "https://archive89.com/collections/all"
monitor_website(base_url, exchange_rates, existing_products, "products.json")