Here’s your **fully inclusive** automation script that **scrapes new free sites**, **submits links**, **automates social media posting**, **leverages RSS feeds**, **triggers webhooks**, and **runs automatically** with a **background process or scheduled task**. 🚀
“`python
import requests
import selenium.webdriver as webdriver
import time
import feedparser
from bs4 import BeautifulSoup
import schedule
# CONFIGURABLE SETTINGS
TARGET_SITES = [“https://example-site.com/submit”, “https://anotherfree-listing.com”%5D RSS_FEED = “https://your-site.com/rss.xml”
SOCIAL_MEDIA_HOOKS = {
“telegram”: “https://api.telegram.org/bot/sendMessage”, “twitter”: “https://api.twitter.com/2/tweets“,
}
YOUR_LINK = “https://your-content.com”
def scrape_new_sites():
“”” Scrape and identify new submission sites dynamically “”” query = “free website listing submit link”
search_url = f”https://www.google.com/search?q={query.replace(‘ ‘, ‘+’)}”
options = webdriver.ChromeOptions()
options.add_argument(‘–headless’)
driver = webdriver.Chrome(options=options)
driver.get(search_url)
soup = BeautifulSoup(driver.page_source, “html.parser”)
new_sites = [a[‘href’] for a in soup.select(“a”) if “submit” in a.text.lower()] driver.quit()
print(f”Discovered {len(new_sites)} new submission sites.”) return new_sites
def submit_links(sites):
“”” Automate submission process “””
for site in sites:
try:
response = requests.post(site, data={“url”: YOUR_LINK}) if response.status_code == 200:
print(f”✅ Successfully submitted to {site}”)
except Exception as e:
print(f”❌ Failed to submit to {site}: {str(e)}”)
def post_to_social(media_hooks):
“”” Automate social media sharing “””
for platform, url in media_hooks.items():
payload = {“message”: f”Check this out: {YOUR_LINK}”} response = requests.post(url, json=payload)
if response.status_code == 200:
print(f”✅ Posted to {platform}”)
else:
print(f”❌ Failed to post on {platform}”)
def monitor_rss_feed(feed_url):
“”” Extract latest posts from RSS feeds and auto-share “”” feed = feedparser.parse(feed_url)
for entry in feed.entries:
print(f”New RSS Post: {entry.title} – {entry.link}”)
post_to_social(SOCIAL_MEDIA_HOOKS)
def full_automation():
“”” Run full automation sequence “””
print(“🔍 Scraping new submission sites…”)
new_sites = scrape_new_sites()
print(“📢 Submitting links to discovered sites…”)
submit_links(TARGET_SITES + new_sites)
print(“🚀 Posting to social media…”)
post_to_social(SOCIAL_MEDIA_HOOKS)
print(“📰 Monitoring RSS feeds for auto-sharing…”)
monitor_rss_feed(RSS_FEED)
print(“✅ Automation Complete!”)
# SCHEDULED TASK
schedule.every(6).hours.do(full_automation)
print(“⌛ Running scheduled automation every 6 hours…”)
while True:
schedule.run_pending()
time.sleep(60)
“`
### **Set Up for Automatic Execution**
#### **Option 1: Run as a Background Process**
Run this script **directly** from terminal:
“`sh
nohup python3 automation.py &
“`
It will **continue running** even after you close the terminal.
#### **Option 2: Schedule on Linux/macOS (Cron)**
1. Open terminal:
“`sh
crontab -e
“`
2. Add this line to run **every 6 hours**:
“`sh
0 */6 * * * /usr/bin/python3 /path/to/automation.py
“`
3. Save and confirm with:
“`sh
crontab -l
“`
#### **Option 3: Schedule on Windows (Task Scheduler)**
1. Open **Task Scheduler** → **Create Basic Task**.
2. Set the trigger to **daily/hourly**.
3. Choose **”Start a Program”** and set **python.exe** with the script path. 4. Click **Finish** and test with **Run**.
This script **automates everything** in **one go**, plus ensures **periodic execution**. Let me know if you want refinements! 🚀 Sent from my iPhone