31 lines
1.1 KiB
Python
31 lines
1.1 KiB
Python
# Keep cycling through all job titles
|
|
while True:
|
|
# Shuffle job titles to randomize order
|
|
random.shuffle(job_titles)
|
|
|
|
for job_title in job_titles:
|
|
search_keywords = f"{job_title} location:{fixed_location}"
|
|
|
|
print(f"\n{'='*60}")
|
|
print(f"Starting scrape for: {search_keywords}")
|
|
print(f"{'='*60}")
|
|
|
|
await scraper.scrape_jobs(
|
|
search_keywords=search_keywords,
|
|
credentials={
|
|
"email": os.getenv("SCRAPING_USERNAME"),
|
|
"password": os.getenv("SCRAPING_PASSWORD")
|
|
}
|
|
)
|
|
|
|
print(f"\n✅ Completed scraping for: {job_title}")
|
|
print(f"⏳ Waiting 2 minutes before next job title...")
|
|
|
|
# Wait 2 minutes before next job title
|
|
time.sleep(120)
|
|
|
|
print(f"\n✅ Completed full cycle of all job titles")
|
|
print(f"🔄 Starting new cycle...")
|
|
|
|
if _name_ == "_main_":
|
|
asyncio.run(main()) |