import requests, csv, json
from bs4 import BeautifulSoup
def scrape_guam_numbers(query=’camacho’, pages=2, output_csv=True, output_json=True): base_url = ‘https://www.guamphonebook.com/whitepages‘
headers = {‘User-Agent’: ‘Mozilla/5.0 (iPhone; CPU iPhone OS 17_0 like Mac OS X)’} results = []
for page in range(1, pages + 1):
params = {‘name’: query, ‘page’: page}
resp = requests.get(base_url, params=params, headers=headers) soup = BeautifulSoup(resp.text, ‘html.parser’)
for entry in soup.select(‘.directory-listing’):
name = entry.select_one(‘.listing-name’)
phone = entry.select_one(‘.listing-phone’)
if name and phone:
results.append({
‘name’: name.text.strip(),
‘phone’: phone.text.strip()
})
# Save to CSV
if output_csv:
with open(f'{query}_results.csv’, ‘w’, newline=”, encoding=’utf-8′) as f: writer = csv.DictWriter(f, fieldnames=[‘name’, ‘phone’]) writer.writeheader()
writer.writerows(results)
# Save to JSON
if output_json:
with open(f'{query}_results.json’, ‘w’, encoding=’utf-8′) as f: json.dump(results, f, indent=2)
return results
# 🔎 Example
if __name__ == ‘__main__’:
listings = scrape_guam_numbers(‘delacruz’, pages=3)
for item in listings:
print(f”{item[‘name’]}: {item[‘phone’]}”)