Scrape all locations

This commit is contained in:
agatha 2023-12-02 15:33:07 -05:00
parent 6c33e418e2
commit f3990bc4d2
2 changed files with 30 additions and 9 deletions

38
main.py
View File

@ -1,6 +1,26 @@
"""buyvm stock checker""" """buyvm stock checker"""
import requests
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
BASE_URL = 'https://my.frantech.ca/'
URLS = [
'https://my.frantech.ca/cart.php?gid=37', # Las Vegas
'https://my.frantech.ca/cart.php?gid=38', # New York
'https://my.frantech.ca/cart.php?gid=48', # Miami
'https://my.frantech.ca/cart.php?gid=39', # Luxembourg
]
def get_url(url):
try:
response = requests.get(url)
response.raise_for_status()
except requests.RequestException as e:
print(f'error fetching {url}: {str(e)}')
return None
return response.text
def get_packages(html): def get_packages(html):
soup = BeautifulSoup(html, 'html.parser') soup = BeautifulSoup(html, 'html.parser')
@ -19,7 +39,7 @@ def get_packages(html):
order_button = package_element.find('a', class_='btn-primary') order_button = package_element.find('a', class_='btn-primary')
if order_button: if order_button:
order_url = order_button['href'] order_url = order_button['href']
package['url'] = order_url package['url'] = BASE_URL + order_url
else: else:
package['url'] = '' package['url'] = ''
@ -29,15 +49,15 @@ def get_packages(html):
def main(): def main():
with open('tests/data/stock.html', 'r', encoding='utf-8') as file: for url in URLS:
html = file.read() html = get_url(url)
packages = get_packages(html) packages = get_packages(html)
for package in packages: for package in packages:
print('Package Name:', package['name']) print('Package Name:', package['name'])
print('Package Quantity:', package['qty']) print('Package Quantity:', package['qty'])
print('Order URL:', package['url']) print('Order URL:', package['url'])
print('---------------------------') print('---------------------------')
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -1 +1,2 @@
beautifulsoup4 beautifulsoup4
requests