from scraping_engine import FingerprintScrapingEngine from scraper import CryptoJobScraper # Updated class name import os from dotenv import load_dotenv import asyncio import random import time load_dotenv() async def main(): engine = FingerprintScrapingEngine( seed="crypto_scraping_12", target_os="windows", db_path="crypto_jobs.db", markdown_path="crypto_jobs.md" ) scraper = CryptoJobScraper(engine, human_speed=1.3, user_request="Extract title, company, location, description, requirements, qualifications, nature of work, and salary") job_titles = [ "Customer Support", "Design", "Engineering", "Finance", "Marketing", "Operations", "Product", "Sales" ] while True: random.shuffle(job_titles) for job_title in job_titles: search_keywords = job_title # No location param needed print(f"\n{'='*60}") print(f"Starting scrape for: {search_keywords}") print(f"{'='*60}") await scraper.scrape_jobs(search_keywords=search_keywords) print(f"\nāœ… Completed scraping for: {job_title}") print(f"ā³ Waiting 90 seconds before next job title...") time.sleep(90) print(f"\nāœ… Completed full cycle") print(f"šŸ”„ Starting new cycle...") if __name__ == "__main__": asyncio.run(main())