Compare commits
10 الالتزامات
2448de13ea
...
425411c200
المؤلف | SHA1 | التاريخ | |
---|---|---|---|
425411c200 | |||
02dc99de38 | |||
0016c58321 | |||
2acaafad13 | |||
54bd50c991 | |||
f2db15a367 | |||
249659c8a5 | |||
3e58f09861 | |||
6b7cf11ef4 | |||
d932ea498e |
@@ -2,7 +2,9 @@ name: Docker Image CI
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ "main" ]
|
paths:
|
||||||
|
- '**'
|
||||||
|
- '!.github/workflows/**'
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ "main" ]
|
branches: [ "main" ]
|
||||||
|
|
||||||
|
@@ -8,13 +8,14 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
run-docker:
|
run-docker:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run container
|
- name: pull image
|
||||||
|
run: docker pull ahmedhesham301/jobfit-ai:latest
|
||||||
|
- name: Create .env file
|
||||||
run: |
|
run: |
|
||||||
docker run \
|
echo "smtp_email=${{ secrets.SMTP_EMAIL }}" >> .env
|
||||||
-e smtp_email=${{ secrets.SMTP_EMAIL }} \
|
echo "smtp_password=${{ secrets.SMTP_PASSWORD }}" >> .env
|
||||||
-e smtp_password=${{ secrets.SMTP_PASSWORD }} \
|
echo "receiver_email=${{ secrets.RECEIVER_EMAIL }}" >> .env
|
||||||
-e receiver_email=${{ secrets.RECEIVER_EMAIL }} \
|
echo "api_keys=${{ secrets.API_KEYS }}" >> .env
|
||||||
-e api_keys=${{ secrets.API_KEYS }} \
|
- name: Run container
|
||||||
ahmedhesham301/jobfit-ai:latest
|
run: docker run --env-file .env ahmedhesham301/jobfit-ai:latest
|
||||||
|
@@ -12,6 +12,7 @@ total_fail_empty_response = 0
|
|||||||
|
|
||||||
|
|
||||||
def filter_jobs(jobs, cv, api_keys, good_fit_jobs):
|
def filter_jobs(jobs, cv, api_keys, good_fit_jobs):
|
||||||
|
global total_fail,total_overload,total_fail_overload,total_empty_response,total_fail_empty_response
|
||||||
key_number = 0
|
key_number = 0
|
||||||
|
|
||||||
for i, job in jobs.iterrows():
|
for i, job in jobs.iterrows():
|
||||||
@@ -72,7 +73,7 @@ def filter_jobs(jobs, cv, api_keys, good_fit_jobs):
|
|||||||
logging.critical("All attempts failed")
|
logging.critical("All attempts failed")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if ai_response_dict["percentage"] > 50:
|
if ai_response_dict["percentage"] > 70:
|
||||||
good_fit_jobs.append(
|
good_fit_jobs.append(
|
||||||
{
|
{
|
||||||
"title": job["title"],
|
"title": job["title"],
|
||||||
@@ -90,4 +91,4 @@ def print_stats():
|
|||||||
stats = f"""total fail: {total_fail}
|
stats = f"""total fail: {total_fail}
|
||||||
total empty responses: {total_empty_response} fail: {total_fail_empty_response}
|
total empty responses: {total_empty_response} fail: {total_fail_empty_response}
|
||||||
Total overloads: {total_overload} fail: {total_fail_overload}"""
|
Total overloads: {total_overload} fail: {total_fail_overload}"""
|
||||||
print(stats)
|
logging.warning(stats)
|
||||||
|
@@ -1,42 +1,60 @@
|
|||||||
i will give you job descriptions and you tell me if i'm a good fit or not in a percentage, why I'm I a good fit and what I'm I missing
|
i will give you job descriptions and you tell me if i'm a good fit or not in a percentage, why I'm I a good fit and what I'm I missing
|
||||||
this is my cv:
|
this is my cv:
|
||||||
Ahmed Hesham
|
Ahmed Hesham
|
||||||
Ahmed.hesham.farag@gmail.com
|
DevOps Engineer
|
||||||
Cairo, Egypt
|
ahmed.hesham.farag@gmail.com
|
||||||
LinkedIn:https://www.linkedin.com/in/ahmed-hesham0/
|
linkedin:https://www.linkedin.com/in/ahmed-hesham0/
|
||||||
GitHub:https://github.com/ahmedhesham301
|
Cairo,Egypt
|
||||||
PROFILE
|
website:ahmeddev.net
|
||||||
DevOps Engineer with hands-on experience in automating CI/CD pipelines, containerizing
|
github:https://github.com/ahmedhesham301
|
||||||
applications, and deploying scalable infrastructure on AWS using Docker, Terraform, and
|
SUMMARY:
|
||||||
Ansible. Skilled in backend development with Python and Go, building RESTful APIs and
|
Aspiring DevOps engineer with a strong drive to learn and grow. Currently focused on building a solid
|
||||||
automating system workflows.
|
foundation through consistent learning and hands-on practice. Always curious and ready to face challenges as
|
||||||
EDUCATION
|
part of the learning journey. Looking for an internship opportunity where I can contribute, learn from
|
||||||
Sadat Academy | Cairo, Egypt
|
experienced professionals, and take meaningful steps toward becoming a skilled engineer.
|
||||||
Computer science
|
EDUCATION:
|
||||||
Predicted Grade: Very good
|
Bachelor of Computer Science
|
||||||
Oct 2022 – June 2026
|
Sadat Academy
|
||||||
TECHNICAL SKILLS
|
Sep 2022 – Jun 2026
|
||||||
-DevOps Tools: Jenkins, Docker, Ansible, Terraform.
|
EXPERIENCE:
|
||||||
-Programming & Scripting: Python, GO, Bash, YAML, JSON.
|
DevOps Intern | GhaymahSep 2025 – Present
|
||||||
-Monitoring & Observability: Prometheus, Grafana.
|
DevOps Intern | Orange Digital Centre, Cairo, EgyptJan 2025 – Feb 2025
|
||||||
-Cloud Platforms & OS: Linux, AWS, Ubuntu.
|
SKILLS:
|
||||||
-Database: SQL, PostgreSQL.
|
Containerization & Orchestration: Docker
|
||||||
-Version control: Git, GitHub.
|
Cloud Provider: AWS
|
||||||
Projects
|
CI/CD: Jenkins & Github Actions
|
||||||
•Built CI/CD pipeline using Jenkins to test and deploy Flask app to EC2 on AWS. The
|
Monitoring & Observability: Prometheus & Grafana
|
||||||
pipeline included building a Docker image, pushing it to Docker Hub, provisioning
|
Configuration Management & IaC: Ansible & Terraform
|
||||||
infrastructure with Terraform, and configuring the server using Ansible.
|
Version Control: Git & GitHub
|
||||||
GitHub Repo: weather-app
|
Programming & Scripting: Python, Bash, Golang
|
||||||
•A Docker image for a Minecraft server that allows you to specify the desired version as
|
Database: Postgresql & SQL
|
||||||
an argument. The image automatically downloads the specified version from Mojang's
|
|
||||||
website and launches the server with a single command. GitHub repo
|
PROJECTS:
|
||||||
•An Amazon price tracker that monitors the price of a specified item. It scrapes the current
|
-weather-app
|
||||||
price and sends you an email notification whenever the price drops below your desired
|
Automated application deployment using Docker containers for consistent environments.
|
||||||
price.
|
Implemented CI/CD pipelines with Jenkins to streamline build, test, and deployment processes.
|
||||||
EXPERIENCE
|
Provisioned and managed cloud infrastructure using Terraform for scalable and secure resources.
|
||||||
Orange Digital Centre
|
Configured server environments and orchestration with Ansible for efficient configuration management.
|
||||||
Jan 2025 – Feb 2025
|
Applied best practices in infrastructure as code, automation, and cloud resource optimization.
|
||||||
DevOps internship
|
|
||||||
Gained hands-on experience in CI/CD pipelines using Jenkins, Ansible, and Terraform.
|
-containerizing Minecraft server
|
||||||
ADDITIONAL SKILLS
|
Designed and implemented automated scripts for building, deploying, and running a Minecraft server
|
||||||
Languages: Arabic (Native), English (Fluent)Microsoft Office: Proficient in Word and PowerPoint
|
using Bash and Docker.
|
||||||
|
Developed Dockerfile and shell scripts to streamline server setup, configuration, and lifecycle management.
|
||||||
|
Containerized the server for consistent, reproducible deployments across environments.
|
||||||
|
Automated build and deployment processes to minimize manual intervention and improve reliability.
|
||||||
|
Documented setup and usage instructions for ease of onboarding and maintenance.
|
||||||
|
|
||||||
|
-go url shortener
|
||||||
|
Developed a full-stack URL shortening service using Go (backend) and JavaScript/HTML (frontend).
|
||||||
|
Implemented RESTful APIs for creating, retrieving, and redirecting shortened URLs.
|
||||||
|
Integrated PostgreSQL for persistent storage and efficient URL management.
|
||||||
|
Added monitoring and metrics collection with Prometheus.
|
||||||
|
Containerized the application using Docker and managed multi-service deployment with Docker Compose.
|
||||||
|
Applied middleware for request monitoring.
|
||||||
|
Wrote modular, maintainable code following best practices.
|
||||||
|
|
||||||
|
-JobFit AI
|
||||||
|
A Python-based application that leverages Google Gemini AI to analyze job descriptions and candidate
|
||||||
|
profiles, search for relevant jobs, and generate a fit percentage. The system highlights strengths, identifies
|
||||||
|
skill gaps, and automatically sends emails with high-fit job matches to users.
|
21
jobs.py
21
jobs.py
@@ -1,28 +1,37 @@
|
|||||||
from jobspy import scrape_jobs
|
from jobspy import scrape_jobs
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
def getJobs(jobTitle, results_wanted, hours_old):
|
|
||||||
|
def getJobs(
|
||||||
|
jobTitle,
|
||||||
|
results_wanted,
|
||||||
|
hours_old,
|
||||||
|
country,
|
||||||
|
location,
|
||||||
|
is_remote,
|
||||||
|
):
|
||||||
jobs = scrape_jobs(
|
jobs = scrape_jobs(
|
||||||
site_name=[
|
site_name=[
|
||||||
"indeed",
|
"indeed",
|
||||||
"linkedin",
|
"linkedin",
|
||||||
# "zip_recruiter",
|
# "zip_recruiter",
|
||||||
"google",
|
# "google",
|
||||||
# "glassdoor",
|
# "glassdoor",
|
||||||
# "bayt",
|
# "bayt",
|
||||||
# "naukri",
|
# "naukri",
|
||||||
# "bdjobs",
|
# "bdjobs",
|
||||||
],
|
],
|
||||||
search_term=jobTitle,
|
search_term=jobTitle,
|
||||||
location="Cairo",
|
location=location,
|
||||||
results_wanted=results_wanted,
|
results_wanted=results_wanted,
|
||||||
google_search_term=f"{jobTitle} jobs near Cairo since {hours_old} hours",
|
# google_search_term=f"{jobTitle} jobs near Cairo since {hours_old} hours",
|
||||||
hours_old=hours_old,
|
hours_old=hours_old,
|
||||||
country_indeed="Egypt",
|
country_indeed=country,
|
||||||
|
is_remote=is_remote,
|
||||||
linkedin_fetch_description=True, # gets more info such as description, direct job url (slower)
|
linkedin_fetch_description=True, # gets more info such as description, direct job url (slower)
|
||||||
# proxies=["208.195.175.46:65095", "208.195.175.45:65095", "localhost"],
|
# proxies=["208.195.175.46:65095", "208.195.175.45:65095", "localhost"],
|
||||||
)
|
)
|
||||||
logging.warning(f"Found {len(jobs)} {jobTitle} jobs")
|
logging.warning(f"Found {len(jobs)} {jobTitle} jobs in {country},{location}")
|
||||||
# print(jobs)
|
# print(jobs)
|
||||||
return jobs
|
return jobs
|
||||||
# jobs.to_csv(
|
# jobs.to_csv(
|
||||||
|
19
main.py
19
main.py
@@ -23,20 +23,23 @@ with open("instruction.txt", "r") as f:
|
|||||||
CV = f.read()
|
CV = f.read()
|
||||||
|
|
||||||
|
|
||||||
def get_jobs(job_title, results_wanted, hours_old):
|
def get_jobs(job_title, results_wanted, hours_old, country, location, is_remote):
|
||||||
global all_jobs
|
global all_jobs
|
||||||
jobs = getJobs(job_title, results_wanted, hours_old)
|
jobs = getJobs(job_title, results_wanted, hours_old, country, location, is_remote=False)
|
||||||
all_jobs = pd.concat([all_jobs, jobs], ignore_index=True)
|
all_jobs = pd.concat([all_jobs, jobs], ignore_index=True)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
get_jobs("devops", results_wanted=30, hours_old=2)
|
get_jobs("devops", results_wanted=30, hours_old=2, country="egypt", location="cairo")
|
||||||
get_jobs("backend", results_wanted=30, hours_old=2)
|
get_jobs("backend", results_wanted=30, hours_old=2, country="egypt", location="cairo")
|
||||||
get_jobs("software engineer", results_wanted=30, hours_old=2)
|
get_jobs("software engineer",results_wanted=30,hours_old=2,country="egypt",location="cairo",)
|
||||||
get_jobs("cloud", results_wanted=30, hours_old=2)
|
get_jobs("cloud", results_wanted=30, hours_old=2, country="egypt", location="cairo")
|
||||||
get_jobs("sre", results_wanted=30, hours_old=2)
|
get_jobs("site reliability engineer",results_wanted=30,hours_old=2,country="egypt",location="cairo")
|
||||||
get_jobs("intern", results_wanted=30, hours_old=2)
|
get_jobs("sre", results_wanted=30, hours_old=2, country="egypt", location="cairo")
|
||||||
|
get_jobs("intern", results_wanted=30, hours_old=2, country="egypt", location="cairo")
|
||||||
|
# get_jobs("devops",results_wanted=200,hours_old=2,country="worldwide",location="",is_remote=True)
|
||||||
all_jobs.drop_duplicates(inplace=True, ignore_index=True)
|
all_jobs.drop_duplicates(inplace=True, ignore_index=True)
|
||||||
|
logging.warning(f"Total jobs no duplicates: {len(all_jobs)}")
|
||||||
filter_jobs(all_jobs, CV, api_keys, good_fit_jobs)
|
filter_jobs(all_jobs, CV, api_keys, good_fit_jobs)
|
||||||
if len(good_fit_jobs) > 0:
|
if len(good_fit_jobs) > 0:
|
||||||
send_email(SENDER, RECEIVER, PASSWORD, good_fit_jobs)
|
send_email(SENDER, RECEIVER, PASSWORD, good_fit_jobs)
|
||||||
|
المرجع في مشكلة جديدة
حظر مستخدم