V2 Thursday morning

import requests, csv, json, os
from bs4 import BeautifulSoup

def scrape_guam_numbers(query=’camacho’, pages=1, save=True): base_url = ‘https://www.guamphonebook.com/whitepages
headers = {‘User-Agent’: ‘Mozilla/5.0 (iPhone; CPU iPhone OS 17_0 like Mac OS X)’} results = []

for page in range(1, pages + 1):
params = {‘name’: query, ‘page’: page}
r = requests.get(base_url, params=params, headers=headers) soup = BeautifulSoup(r.text, ‘html.parser’)

for entry in soup.select(‘.directory-listing’):
name = entry.select_one(‘.listing-name’)
phone = entry.select_one(‘.listing-phone’)
if name and phone:
results.append({
‘name’: name.text.strip(),
‘phone’: phone.text.strip()
})

if save:
folder = os.path.expanduser(‘~/Documents/GuamScraper’) os.makedirs(folder, exist_ok=True)

with open(os.path.join(folder, f'{query}.csv’), ‘w’, newline=”, encoding=’utf-8′) as f: writer = csv.DictWriter(f, fieldnames=[‘name’, ‘phone’]) writer.writeheader()
writer.writerows(results)

with open(os.path.join(folder, f'{query}.json’), ‘w’, encoding=’utf-8′) as f: json.dump(results, f, indent=2)

return results

# 🔎 Example run
if __name__ == ‘__main__’:
data = scrape_guam_numbers(‘delacruz’, pages=3)
for d in data:
print(f”{d[‘name’]}: {d[‘phone’]}”)

Leave a comment