Compare commits
10 الالتزامات
2448de13ea
...
425411c200
المؤلف | SHA1 | التاريخ | |
---|---|---|---|
425411c200 | |||
02dc99de38 | |||
0016c58321 | |||
2acaafad13 | |||
54bd50c991 | |||
f2db15a367 | |||
249659c8a5 | |||
3e58f09861 | |||
6b7cf11ef4 | |||
d932ea498e |
@@ -2,7 +2,9 @@ name: Docker Image CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- '**'
|
||||
- '!.github/workflows/**'
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
|
||||
|
@@ -8,13 +8,14 @@ on:
|
||||
jobs:
|
||||
run-docker:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Run container
|
||||
- name: pull image
|
||||
run: docker pull ahmedhesham301/jobfit-ai:latest
|
||||
- name: Create .env file
|
||||
run: |
|
||||
docker run \
|
||||
-e smtp_email=${{ secrets.SMTP_EMAIL }} \
|
||||
-e smtp_password=${{ secrets.SMTP_PASSWORD }} \
|
||||
-e receiver_email=${{ secrets.RECEIVER_EMAIL }} \
|
||||
-e api_keys=${{ secrets.API_KEYS }} \
|
||||
ahmedhesham301/jobfit-ai:latest
|
||||
echo "smtp_email=${{ secrets.SMTP_EMAIL }}" >> .env
|
||||
echo "smtp_password=${{ secrets.SMTP_PASSWORD }}" >> .env
|
||||
echo "receiver_email=${{ secrets.RECEIVER_EMAIL }}" >> .env
|
||||
echo "api_keys=${{ secrets.API_KEYS }}" >> .env
|
||||
- name: Run container
|
||||
run: docker run --env-file .env ahmedhesham301/jobfit-ai:latest
|
||||
|
@@ -12,6 +12,7 @@ total_fail_empty_response = 0
|
||||
|
||||
|
||||
def filter_jobs(jobs, cv, api_keys, good_fit_jobs):
|
||||
global total_fail,total_overload,total_fail_overload,total_empty_response,total_fail_empty_response
|
||||
key_number = 0
|
||||
|
||||
for i, job in jobs.iterrows():
|
||||
@@ -72,7 +73,7 @@ def filter_jobs(jobs, cv, api_keys, good_fit_jobs):
|
||||
logging.critical("All attempts failed")
|
||||
continue
|
||||
|
||||
if ai_response_dict["percentage"] > 50:
|
||||
if ai_response_dict["percentage"] > 70:
|
||||
good_fit_jobs.append(
|
||||
{
|
||||
"title": job["title"],
|
||||
@@ -90,4 +91,4 @@ def print_stats():
|
||||
stats = f"""total fail: {total_fail}
|
||||
total empty responses: {total_empty_response} fail: {total_fail_empty_response}
|
||||
Total overloads: {total_overload} fail: {total_fail_overload}"""
|
||||
print(stats)
|
||||
logging.warning(stats)
|
||||
|
@@ -1,42 +1,60 @@
|
||||
i will give you job descriptions and you tell me if i'm a good fit or not in a percentage, why I'm I a good fit and what I'm I missing
|
||||
this is my cv:
|
||||
Ahmed Hesham
|
||||
Ahmed.hesham.farag@gmail.com
|
||||
Cairo, Egypt
|
||||
LinkedIn:https://www.linkedin.com/in/ahmed-hesham0/
|
||||
GitHub:https://github.com/ahmedhesham301
|
||||
PROFILE
|
||||
DevOps Engineer with hands-on experience in automating CI/CD pipelines, containerizing
|
||||
applications, and deploying scalable infrastructure on AWS using Docker, Terraform, and
|
||||
Ansible. Skilled in backend development with Python and Go, building RESTful APIs and
|
||||
automating system workflows.
|
||||
EDUCATION
|
||||
Sadat Academy | Cairo, Egypt
|
||||
Computer science
|
||||
Predicted Grade: Very good
|
||||
Oct 2022 – June 2026
|
||||
TECHNICAL SKILLS
|
||||
-DevOps Tools: Jenkins, Docker, Ansible, Terraform.
|
||||
-Programming & Scripting: Python, GO, Bash, YAML, JSON.
|
||||
-Monitoring & Observability: Prometheus, Grafana.
|
||||
-Cloud Platforms & OS: Linux, AWS, Ubuntu.
|
||||
-Database: SQL, PostgreSQL.
|
||||
-Version control: Git, GitHub.
|
||||
Projects
|
||||
•Built CI/CD pipeline using Jenkins to test and deploy Flask app to EC2 on AWS. The
|
||||
pipeline included building a Docker image, pushing it to Docker Hub, provisioning
|
||||
infrastructure with Terraform, and configuring the server using Ansible.
|
||||
GitHub Repo: weather-app
|
||||
•A Docker image for a Minecraft server that allows you to specify the desired version as
|
||||
an argument. The image automatically downloads the specified version from Mojang's
|
||||
website and launches the server with a single command. GitHub repo
|
||||
•An Amazon price tracker that monitors the price of a specified item. It scrapes the current
|
||||
price and sends you an email notification whenever the price drops below your desired
|
||||
price.
|
||||
EXPERIENCE
|
||||
Orange Digital Centre
|
||||
Jan 2025 – Feb 2025
|
||||
DevOps internship
|
||||
Gained hands-on experience in CI/CD pipelines using Jenkins, Ansible, and Terraform.
|
||||
ADDITIONAL SKILLS
|
||||
Languages: Arabic (Native), English (Fluent)Microsoft Office: Proficient in Word and PowerPoint
|
||||
DevOps Engineer
|
||||
ahmed.hesham.farag@gmail.com
|
||||
linkedin:https://www.linkedin.com/in/ahmed-hesham0/
|
||||
Cairo,Egypt
|
||||
website:ahmeddev.net
|
||||
github:https://github.com/ahmedhesham301
|
||||
SUMMARY:
|
||||
Aspiring DevOps engineer with a strong drive to learn and grow. Currently focused on building a solid
|
||||
foundation through consistent learning and hands-on practice. Always curious and ready to face challenges as
|
||||
part of the learning journey. Looking for an internship opportunity where I can contribute, learn from
|
||||
experienced professionals, and take meaningful steps toward becoming a skilled engineer.
|
||||
EDUCATION:
|
||||
Bachelor of Computer Science
|
||||
Sadat Academy
|
||||
Sep 2022 – Jun 2026
|
||||
EXPERIENCE:
|
||||
DevOps Intern | GhaymahSep 2025 – Present
|
||||
DevOps Intern | Orange Digital Centre, Cairo, EgyptJan 2025 – Feb 2025
|
||||
SKILLS:
|
||||
Containerization & Orchestration: Docker
|
||||
Cloud Provider: AWS
|
||||
CI/CD: Jenkins & Github Actions
|
||||
Monitoring & Observability: Prometheus & Grafana
|
||||
Configuration Management & IaC: Ansible & Terraform
|
||||
Version Control: Git & GitHub
|
||||
Programming & Scripting: Python, Bash, Golang
|
||||
Database: Postgresql & SQL
|
||||
|
||||
PROJECTS:
|
||||
-weather-app
|
||||
Automated application deployment using Docker containers for consistent environments.
|
||||
Implemented CI/CD pipelines with Jenkins to streamline build, test, and deployment processes.
|
||||
Provisioned and managed cloud infrastructure using Terraform for scalable and secure resources.
|
||||
Configured server environments and orchestration with Ansible for efficient configuration management.
|
||||
Applied best practices in infrastructure as code, automation, and cloud resource optimization.
|
||||
|
||||
-containerizing Minecraft server
|
||||
Designed and implemented automated scripts for building, deploying, and running a Minecraft server
|
||||
using Bash and Docker.
|
||||
Developed Dockerfile and shell scripts to streamline server setup, configuration, and lifecycle management.
|
||||
Containerized the server for consistent, reproducible deployments across environments.
|
||||
Automated build and deployment processes to minimize manual intervention and improve reliability.
|
||||
Documented setup and usage instructions for ease of onboarding and maintenance.
|
||||
|
||||
-go url shortener
|
||||
Developed a full-stack URL shortening service using Go (backend) and JavaScript/HTML (frontend).
|
||||
Implemented RESTful APIs for creating, retrieving, and redirecting shortened URLs.
|
||||
Integrated PostgreSQL for persistent storage and efficient URL management.
|
||||
Added monitoring and metrics collection with Prometheus.
|
||||
Containerized the application using Docker and managed multi-service deployment with Docker Compose.
|
||||
Applied middleware for request monitoring.
|
||||
Wrote modular, maintainable code following best practices.
|
||||
|
||||
-JobFit AI
|
||||
A Python-based application that leverages Google Gemini AI to analyze job descriptions and candidate
|
||||
profiles, search for relevant jobs, and generate a fit percentage. The system highlights strengths, identifies
|
||||
skill gaps, and automatically sends emails with high-fit job matches to users.
|
21
jobs.py
21
jobs.py
@@ -1,28 +1,37 @@
|
||||
from jobspy import scrape_jobs
|
||||
import logging
|
||||
|
||||
def getJobs(jobTitle, results_wanted, hours_old):
|
||||
|
||||
def getJobs(
|
||||
jobTitle,
|
||||
results_wanted,
|
||||
hours_old,
|
||||
country,
|
||||
location,
|
||||
is_remote,
|
||||
):
|
||||
jobs = scrape_jobs(
|
||||
site_name=[
|
||||
"indeed",
|
||||
"linkedin",
|
||||
# "zip_recruiter",
|
||||
"google",
|
||||
# "google",
|
||||
# "glassdoor",
|
||||
# "bayt",
|
||||
# "naukri",
|
||||
# "bdjobs",
|
||||
],
|
||||
search_term=jobTitle,
|
||||
location="Cairo",
|
||||
location=location,
|
||||
results_wanted=results_wanted,
|
||||
google_search_term=f"{jobTitle} jobs near Cairo since {hours_old} hours",
|
||||
# google_search_term=f"{jobTitle} jobs near Cairo since {hours_old} hours",
|
||||
hours_old=hours_old,
|
||||
country_indeed="Egypt",
|
||||
country_indeed=country,
|
||||
is_remote=is_remote,
|
||||
linkedin_fetch_description=True, # gets more info such as description, direct job url (slower)
|
||||
# proxies=["208.195.175.46:65095", "208.195.175.45:65095", "localhost"],
|
||||
)
|
||||
logging.warning(f"Found {len(jobs)} {jobTitle} jobs")
|
||||
logging.warning(f"Found {len(jobs)} {jobTitle} jobs in {country},{location}")
|
||||
# print(jobs)
|
||||
return jobs
|
||||
# jobs.to_csv(
|
||||
|
19
main.py
19
main.py
@@ -23,20 +23,23 @@ with open("instruction.txt", "r") as f:
|
||||
CV = f.read()
|
||||
|
||||
|
||||
def get_jobs(job_title, results_wanted, hours_old):
|
||||
def get_jobs(job_title, results_wanted, hours_old, country, location, is_remote):
|
||||
global all_jobs
|
||||
jobs = getJobs(job_title, results_wanted, hours_old)
|
||||
jobs = getJobs(job_title, results_wanted, hours_old, country, location, is_remote=False)
|
||||
all_jobs = pd.concat([all_jobs, jobs], ignore_index=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
get_jobs("devops", results_wanted=30, hours_old=2)
|
||||
get_jobs("backend", results_wanted=30, hours_old=2)
|
||||
get_jobs("software engineer", results_wanted=30, hours_old=2)
|
||||
get_jobs("cloud", results_wanted=30, hours_old=2)
|
||||
get_jobs("sre", results_wanted=30, hours_old=2)
|
||||
get_jobs("intern", results_wanted=30, hours_old=2)
|
||||
get_jobs("devops", results_wanted=30, hours_old=2, country="egypt", location="cairo")
|
||||
get_jobs("backend", results_wanted=30, hours_old=2, country="egypt", location="cairo")
|
||||
get_jobs("software engineer",results_wanted=30,hours_old=2,country="egypt",location="cairo",)
|
||||
get_jobs("cloud", results_wanted=30, hours_old=2, country="egypt", location="cairo")
|
||||
get_jobs("site reliability engineer",results_wanted=30,hours_old=2,country="egypt",location="cairo")
|
||||
get_jobs("sre", results_wanted=30, hours_old=2, country="egypt", location="cairo")
|
||||
get_jobs("intern", results_wanted=30, hours_old=2, country="egypt", location="cairo")
|
||||
# get_jobs("devops",results_wanted=200,hours_old=2,country="worldwide",location="",is_remote=True)
|
||||
all_jobs.drop_duplicates(inplace=True, ignore_index=True)
|
||||
logging.warning(f"Total jobs no duplicates: {len(all_jobs)}")
|
||||
filter_jobs(all_jobs, CV, api_keys, good_fit_jobs)
|
||||
if len(good_fit_jobs) > 0:
|
||||
send_email(SENDER, RECEIVER, PASSWORD, good_fit_jobs)
|
||||
|
المرجع في مشكلة جديدة
حظر مستخدم