update code to take input from env vars
add the ability to use many api keys replaced some prints with logging added stats
هذا الالتزام موجود في:
2
.gitignore
مباع
2
.gitignore
مباع
@@ -1,3 +1,5 @@
|
||||
jobs.csv
|
||||
.venv
|
||||
__pycache__
|
||||
.env
|
||||
.vscode
|
6
ai.py
6
ai.py
@@ -7,10 +7,8 @@ from google import genai
|
||||
from google.genai import types
|
||||
|
||||
|
||||
def generate(description, instruction):
|
||||
client = genai.Client(
|
||||
api_key=""
|
||||
)
|
||||
def generate(description, instruction, api_key):
|
||||
client = genai.Client(api_key=api_key)
|
||||
|
||||
model = "gemini-2.5-flash"
|
||||
contents = [
|
||||
|
2
jobs.py
2
jobs.py
@@ -14,7 +14,7 @@ def getJobs(jobTitle, results_wanted, hours_old):
|
||||
# "bdjobs",
|
||||
],
|
||||
search_term=jobTitle,
|
||||
location="Egypt",
|
||||
location="Cairo",
|
||||
results_wanted=results_wanted,
|
||||
google_search_term=f"{jobTitle} jobs near Cairo since {hours_old} hours",
|
||||
hours_old=hours_old,
|
||||
|
112
main.py
112
main.py
@@ -1,55 +1,101 @@
|
||||
from jobs import getJobs
|
||||
from ai import generate
|
||||
from google.genai.errors import ServerError, ClientError
|
||||
from alert import send_email
|
||||
import json
|
||||
import time
|
||||
import os
|
||||
import logging
|
||||
from random import shuffle
|
||||
|
||||
SENDER = ""
|
||||
PASSWORD = ""
|
||||
RECEIVER = ""
|
||||
logging.basicConfig(
|
||||
level=logging.WARNING, format="%(asctime)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
|
||||
SENDER = os.getenv("smtp_email")
|
||||
PASSWORD = os.getenv("smtp_password")
|
||||
RECEIVER = os.getenv("receiver_email")
|
||||
api_keys = os.getenv("api_keys").split(",")
|
||||
|
||||
good_fit_jobs = []
|
||||
|
||||
# stats
|
||||
total_fail = 0
|
||||
total_fail_overload = 0
|
||||
total_overload = 0
|
||||
total_empty_response = 0
|
||||
total_fail_empty_response = 0
|
||||
|
||||
shuffle(api_keys)
|
||||
|
||||
with open("instruction.txt", "r") as f:
|
||||
CV = f.read()
|
||||
|
||||
|
||||
def get_jobs(job_title, cv, results_wanted, hours_old):
|
||||
global total_fail, total_fail_overload, total_overload, total_empty_response, total_fail_empty_response
|
||||
key_number = 0
|
||||
|
||||
jobs = getJobs(job_title, results_wanted, hours_old)
|
||||
for i, job in jobs.iterrows():
|
||||
# print(job["description"])
|
||||
# print("_______________")
|
||||
print("index is :", i)
|
||||
print("index is :", i) # for debugging
|
||||
|
||||
if (i + 1) % 10 == 0 and i != 0:
|
||||
print("Sleeping to avoid API rate limits")
|
||||
logging.warning("sleeping to avoid API rate limits")
|
||||
time.sleep(60)
|
||||
try_count = 3
|
||||
|
||||
while try_count > 0:
|
||||
try:
|
||||
cleaned_description = "\n".join(
|
||||
[line for line in job["description"].splitlines() if line.strip()]
|
||||
)
|
||||
ai_response = generate(cleaned_description, cv)
|
||||
ai_response_dict = json.loads(ai_response)
|
||||
break
|
||||
except json.JSONDecodeError as e:
|
||||
|
||||
try:
|
||||
cleaned_description = "\n".join(
|
||||
[line for line in job["description"].splitlines() if line.strip()]
|
||||
)
|
||||
ai_response = generate(cleaned_description, cv, api_keys[key_number])
|
||||
ai_response_dict = json.loads(ai_response)
|
||||
break
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
try_count -= 1
|
||||
total_empty_response += 1
|
||||
if try_count == 0:
|
||||
total_fail += 1
|
||||
total_fail_empty_response += 1
|
||||
|
||||
logging.warning("Sleeping after JSONDecodeError")
|
||||
time.sleep(6)
|
||||
|
||||
except ServerError as e:
|
||||
|
||||
if e.details["error"]["code"] == 503:
|
||||
try_count -= 1
|
||||
print("_______________")
|
||||
print(cleaned_description)
|
||||
print("_______________")
|
||||
print(ai_response)
|
||||
print(e)
|
||||
print("Sleeping after fail to avoid API rate limits")
|
||||
time.sleep(6)
|
||||
total_overload += 1
|
||||
if try_count == 0:
|
||||
total_fail += 1
|
||||
total_fail_overload += 1
|
||||
logging.warning("sleeping to after The model is overloaded.")
|
||||
print(e.details)
|
||||
time.sleep(10)
|
||||
else:
|
||||
logging.critical(e.details)
|
||||
return 1
|
||||
|
||||
except ClientError as e:
|
||||
if e.details["error"]["code"] == 429:
|
||||
logging.warning("api limit hit")
|
||||
key_number += 1
|
||||
if key_number > len(api_keys):
|
||||
logging.critical("All api keys hit the limit")
|
||||
return 1
|
||||
else:
|
||||
logging.critical(e.details)
|
||||
return 1
|
||||
|
||||
else:
|
||||
print("All attempts failed.")
|
||||
logging.critical("All attempts failed")
|
||||
continue
|
||||
# print(ai_response_dict)
|
||||
if ai_response_dict["percentage"] > 0:
|
||||
# print("adding job to good_fit_jobs")
|
||||
|
||||
if ai_response_dict["percentage"] > 50:
|
||||
good_fit_jobs.append(
|
||||
{
|
||||
"title": job["title"],
|
||||
@@ -61,8 +107,18 @@ def get_jobs(job_title, cv, results_wanted, hours_old):
|
||||
)
|
||||
|
||||
|
||||
def print_stats():
|
||||
stats = f"""total fail: {total_fail}
|
||||
total empty responses: {total_empty_response} fail: {total_fail_empty_response}
|
||||
Total overloads: {total_overload} fail: {total_fail_overload}"""
|
||||
print(stats)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
get_jobs("devops", CV, results_wanted=2, hours_old=4)
|
||||
# get_jobs("backend", CV, results_wanted=30, hours_old=2)
|
||||
get_jobs("devops", CV, results_wanted=30, hours_old=2)
|
||||
get_jobs("backend", CV, results_wanted=30, hours_old=2)
|
||||
if len(good_fit_jobs) > 0:
|
||||
send_email(SENDER, RECEIVER, PASSWORD, good_fit_jobs)
|
||||
else:
|
||||
print("no good fit jobs")
|
||||
print_stats()
|
||||
|
المرجع في مشكلة جديدة
حظر مستخدم