Compare commits
74 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| f86396107d | |||
| c581fa5e1c | |||
| 4310623ece | |||
| 7e81f6ed88 | |||
| d5be5d1c40 | |||
| 20491c8113 | |||
| c3ac8cd83f | |||
| abc9e94104 | |||
| 167b1becbc | |||
|
|
daedc67e7a | ||
|
|
154aadb6b7 | ||
|
|
3c98e1c165 | ||
|
|
ac08eb489d | ||
|
|
0b9fd670b9 | ||
|
|
16938f42bf | ||
|
|
cf6c9e7bd7 | ||
|
|
d020819ec0 | ||
|
|
42f0bc7508 | ||
|
|
3562c806c7 | ||
|
|
5334733573 | ||
|
|
5f6645f22a | ||
| 980947d459 | |||
| 28a9e16bab | |||
| d71dc31de2 | |||
| 2d3f4602de | |||
| 2ddb069d1d | |||
| 4aa6acec9e | |||
| 2fb7a76fd9 | |||
| 923ef9aed5 | |||
| e659b82905 | |||
| b6f62844bd | |||
| 578fa1837b | |||
| 538f2cef71 | |||
| d36f4bcc5c | |||
| 7fd437e561 | |||
| 41f4a31602 | |||
| 3d0bfbca2c | |||
| d159cadacc | |||
| c65cce9de7 | |||
| 013d5692bf | |||
| 9f781d784d | |||
| f2674c379c | |||
| 653e4f0ea0 | |||
| 3270788902 | |||
| 70f8ce1a6b | |||
| 1d04352ed7 | |||
| 2fcbcaa302 | |||
| a78053474b | |||
| ae9349ca7e | |||
| 0afe014947 | |||
|
|
1da5dc0a30 | ||
| ba7d0c9121 | |||
|
|
b70411e1f1 | ||
| 01369a743d | |||
| 7c6703354e | |||
| 080977cdb7 | |||
| df7510da2e | |||
| e1515442f4 | |||
|
|
6d5b8f2314 | ||
|
|
5841e7b9d4 | ||
|
|
b2877877dd | ||
|
|
0f3aa43b89 | ||
|
|
e0b3102007 | ||
|
|
a5d87b8e25 | ||
|
|
22639a489a | ||
| 8d81d16682 | |||
| fa5ba578bb | |||
| 53ca792988 | |||
| e160357256 | |||
| c912663c3d | |||
| 66d2aa0a66 | |||
| 1d33e52100 | |||
| b35100c92f | |||
| 81acc68aaa |
39
.env
Normal file
39
.env
Normal file
@ -0,0 +1,39 @@
|
||||
# Database Configuration (AWS RDS)
|
||||
DB_USER=recipes_user
|
||||
DB_PASSWORD=recipes_password
|
||||
DB_NAME=recipes_db
|
||||
DB_HOST=my-recipes-rds.chw4omcqsuqv7.eu-central-1.rds.amazonaws.com
|
||||
DB_PORT=5432
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USER=dvirlabs@gmail.com
|
||||
SMTP_PASSWORD=agaanrhbbazbdytv
|
||||
SMTP_FROM=dvirlabs@gmail.com
|
||||
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID=143092846986-hsi59m0on2c9rb5qrdoejfceieao2ioc.apps.googleusercontent.com
|
||||
GOOGLE_CLIENT_SECRET=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
|
||||
GOOGLE_REDIRECT_URI=http://localhost:8000/auth/google/callback
|
||||
FRONTEND_URL=http://localhost
|
||||
|
||||
# Microsoft Entra ID (Azure AD) OAuth
|
||||
AZURE_CLIENT_ID=db244cf5-eb11-4738-a2ea-5b0716c9ec0a
|
||||
AZURE_CLIENT_SECRET=Zad8Q~qRBxaQq8up0lLXAq4pHzrVM2JFGFJhHaDp
|
||||
AZURE_TENANT_ID=consumers
|
||||
AZURE_REDIRECT_URI=http://localhost:8000/auth/azure/callback
|
||||
|
||||
# Cloudflare R2 Backup Configuration
|
||||
R2_ENDPOINT=https://d4704b8c40b2f95b2c7bf7ee4ecc52f8.r2.cloudflarestorage.com
|
||||
R2_ACCESS_KEY=1997b1e48a337c0dbe1f7552a08631b5
|
||||
R2_SECRET_KEY=369694e39fedfedb254158c147171f5760de84fa2346d5d5d5a961f1f517dbc6
|
||||
R2_BUCKET_NAME=recipes-backups
|
||||
|
||||
# Automatic Backup Schedule
|
||||
# Options: test (every 1 minute), daily, weekly, disabled
|
||||
BACKUP_INTERVAL=weekly
|
||||
|
||||
# Frontend API URL (for frontend container to connect to backend via docker-compose)
|
||||
# Use the service name 'backend' from docker-compose.yaml
|
||||
VITE_API_URL=http://backend:8000
|
||||
39
.env.aws
Normal file
39
.env.aws
Normal file
@ -0,0 +1,39 @@
|
||||
# Database Configuration (AWS RDS)
|
||||
DB_USER=recipes_user
|
||||
DB_PASSWORD=recipes_password
|
||||
DB_NAME=recipes_db
|
||||
DB_HOST=my-recipes-rds.chw4omcqsuqv7.eu-central-1.rds.amazonaws.com
|
||||
DB_PORT=5432
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USER=dvirlabs@gmail.com
|
||||
SMTP_PASSWORD=agaanrhbbazbdytv
|
||||
SMTP_FROM=dvirlabs@gmail.com
|
||||
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID=143092846986-hsi59m0on2c9rb5qrdoejfceieao2ioc.apps.googleusercontent.com
|
||||
GOOGLE_CLIENT_SECRET=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
|
||||
GOOGLE_REDIRECT_URI=http://localhost:8000/auth/google/callback
|
||||
FRONTEND_URL=http://localhost
|
||||
|
||||
# Microsoft Entra ID (Azure AD) OAuth
|
||||
AZURE_CLIENT_ID=db244cf5-eb11-4738-a2ea-5b0716c9ec0a
|
||||
AZURE_CLIENT_SECRET=Zad8Q~qRBxaQq8up0lLXAq4pHzrVM2JFGFJhHaDp
|
||||
AZURE_TENANT_ID=consumers
|
||||
AZURE_REDIRECT_URI=http://localhost:8000/auth/azure/callback
|
||||
|
||||
# Cloudflare R2 Backup Configuration
|
||||
R2_ENDPOINT=https://d4704b8c40b2f95b2c7bf7ee4ecc52f8.r2.cloudflarestorage.com
|
||||
R2_ACCESS_KEY=1997b1e48a337c0dbe1f7552a08631b5
|
||||
R2_SECRET_KEY=369694e39fedfedb254158c147171f5760de84fa2346d5d5d5a961f1f517dbc6
|
||||
R2_BUCKET_NAME=recipes-backups
|
||||
|
||||
# Automatic Backup Schedule
|
||||
# Options: test (every 1 minute), daily, weekly, disabled
|
||||
BACKUP_INTERVAL=weekly
|
||||
|
||||
# Frontend API URL (for frontend container to connect to backend via docker-compose)
|
||||
# Use the service name 'backend' from docker-compose.yaml
|
||||
VITE_API_URL=http://backend:8000
|
||||
40
.env.example
Normal file
40
.env.example
Normal file
@ -0,0 +1,40 @@
|
||||
# Database Configuration (AWS RDS)
|
||||
DB_USER=recipes_user
|
||||
DB_PASSWORD=your_secure_password_here
|
||||
DB_NAME=recipes_db
|
||||
DB_HOST=your-rds-endpoint.us-east-1.rds.amazonaws.com
|
||||
DB_PORT=5432
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USER=your-email@gmail.com
|
||||
SMTP_PASSWORD=your-app-password
|
||||
SMTP_FROM=your-email@gmail.com
|
||||
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID=your-google-client-id
|
||||
GOOGLE_CLIENT_SECRET=your-google-client-secret
|
||||
GOOGLE_REDIRECT_URI=https://your-domain.com/auth/google/callback
|
||||
FRONTEND_URL=https://your-domain.com
|
||||
|
||||
# Microsoft Entra ID (Azure AD) OAuth
|
||||
AZURE_CLIENT_ID=your-azure-client-id
|
||||
AZURE_CLIENT_SECRET=your-azure-client-secret
|
||||
AZURE_TENANT_ID=consumers
|
||||
AZURE_REDIRECT_URI=https://your-domain.com/auth/azure/callback
|
||||
|
||||
# Cloudflare R2 Backup Configuration
|
||||
# Get these from your Cloudflare dashboard -> R2 -> Manage R2 API Tokens
|
||||
R2_ENDPOINT=https://<account-id>.r2.cloudflarestorage.com
|
||||
R2_ACCESS_KEY=your-r2-access-key-id
|
||||
R2_SECRET_KEY=your-r2-secret-access-key
|
||||
R2_BUCKET_NAME=recipes-backups
|
||||
|
||||
# Automatic Backup Schedule
|
||||
# Options: test (every 1 minute), daily, weekly, disabled
|
||||
BACKUP_INTERVAL=weekly
|
||||
|
||||
# Frontend API URL (for frontend container to connect to backend via docker-compose)
|
||||
# Use the service name 'backend' from docker-compose.yaml
|
||||
VITE_API_URL=http://backend:8000
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -1 +1,3 @@
|
||||
node_modules/
|
||||
node_modules/
|
||||
my-recipes/
|
||||
my-recipes-chart/
|
||||
@ -3,7 +3,7 @@ steps:
|
||||
name: Build & Push Frontend
|
||||
image: woodpeckerci/plugin-kaniko
|
||||
when:
|
||||
branch: [ master, develop ]
|
||||
branch: [ master, develop, aws ]
|
||||
event: [ push, pull_request, tag ]
|
||||
path:
|
||||
include: [ frontend/** ]
|
||||
@ -24,7 +24,7 @@ steps:
|
||||
name: Build & Push Backend
|
||||
image: woodpeckerci/plugin-kaniko
|
||||
when:
|
||||
branch: [ master, develop ]
|
||||
branch: [ master, develop, aws ]
|
||||
event: [ push, pull_request, tag ]
|
||||
path:
|
||||
include: [ backend/** ]
|
||||
@ -63,7 +63,7 @@ steps:
|
||||
- |
|
||||
TAG="${CI_COMMIT_BRANCH}-${CI_COMMIT_SHA:0:7}"
|
||||
echo "💡 Setting frontend tag to: $TAG"
|
||||
yq -i ".frontend.tag = \"$TAG\"" manifests/${CI_REPO_NAME}/values.yaml
|
||||
yq -i ".frontend.image.tag = \"$TAG\"" manifests/${CI_REPO_NAME}/values.yaml
|
||||
git add manifests/${CI_REPO_NAME}/values.yaml
|
||||
git commit -m "frontend: update tag to $TAG" || echo "No changes"
|
||||
git push origin HEAD
|
||||
@ -93,7 +93,7 @@ steps:
|
||||
- |
|
||||
TAG="${CI_COMMIT_BRANCH}-${CI_COMMIT_SHA:0:7}"
|
||||
echo "💡 Setting backend tag to: $TAG"
|
||||
yq -i ".backend.tag = \"$TAG\"" manifests/${CI_REPO_NAME}/values.yaml
|
||||
yq -i ".backend.image.tag = \"$TAG\"" manifests/${CI_REPO_NAME}/values.yaml
|
||||
git add manifests/${CI_REPO_NAME}/values.yaml
|
||||
git commit -m "backend: update tag to $TAG" || echo "No changes"
|
||||
git push origin HEAD
|
||||
|
||||
237
AWS_DEPLOYMENT.md
Normal file
237
AWS_DEPLOYMENT.md
Normal file
@ -0,0 +1,237 @@
|
||||
# AWS EC2 Deployment Guide
|
||||
|
||||
This guide explains how to deploy the my-recipes application on AWS EC2 with RDS database.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. AWS Account with EC2 and RDS access
|
||||
2. Docker and Docker Compose installed on EC2 instance
|
||||
3. RDS PostgreSQL database instance created
|
||||
4. Domain name (optional, but recommended)
|
||||
5. SSL certificate (optional, but recommended for production)
|
||||
|
||||
## Step 1: Set Up AWS RDS
|
||||
|
||||
1. **Create RDS PostgreSQL Instance:**
|
||||
- Go to AWS RDS Console
|
||||
- Click "Create database"
|
||||
- Choose PostgreSQL engine
|
||||
- Select version 15 or higher
|
||||
- Choose appropriate instance size (db.t3.micro for testing, db.t3.small+ for production)
|
||||
- Set Master username (e.g., `recipes_user`)
|
||||
- Set Master password (strong password)
|
||||
- Database name: `recipes_db`
|
||||
- Enable public accessibility if needed (not recommended for production)
|
||||
- Configure VPC security group to allow connections from EC2
|
||||
|
||||
2. **Security Group Configuration:**
|
||||
- Create/modify security group for RDS
|
||||
- Add inbound rule: PostgreSQL (port 5432) from EC2 security group
|
||||
- Note the endpoint: `your-db-instance.xxxx.region.rds.amazonaws.com`
|
||||
|
||||
3. **Initialize Database Schema:**
|
||||
```bash
|
||||
# Connect to RDS and run schema.sql
|
||||
psql -h your-rds-endpoint.region.rds.amazonaws.com \
|
||||
-U recipes_user \
|
||||
-d recipes_db \
|
||||
-f backend/schema.sql
|
||||
```
|
||||
|
||||
## Step 2: Set Up AWS EC2
|
||||
|
||||
1. **Launch EC2 Instance:**
|
||||
- Choose Ubuntu 22.04 LTS or Amazon Linux 2023
|
||||
- Instance type: t3.micro (minimum), t3.small+ (recommended)
|
||||
- Configure security group:
|
||||
- SSH (port 22) - your IP only
|
||||
- HTTP (port 80) - 0.0.0.0/0
|
||||
- HTTPS (port 443) - 0.0.0.0/0
|
||||
- Custom TCP (port 8000) - optional, for direct API access
|
||||
- Add storage: 20GB minimum
|
||||
- Assign Elastic IP (recommended)
|
||||
|
||||
2. **Connect to EC2:**
|
||||
```bash
|
||||
ssh -i your-key.pem ec2-user@your-ec2-public-ip
|
||||
# or for Ubuntu:
|
||||
ssh -i your-key.pem ubuntu@your-ec2-public-ip
|
||||
```
|
||||
|
||||
3. **Install Docker and Docker Compose:**
|
||||
```bash
|
||||
# Update system
|
||||
sudo yum update -y # For Amazon Linux
|
||||
# sudo apt update && sudo apt upgrade -y # For Ubuntu
|
||||
|
||||
# Install Docker
|
||||
sudo yum install docker -y # Amazon Linux
|
||||
# sudo apt install docker.io -y # Ubuntu
|
||||
|
||||
sudo systemctl start docker
|
||||
sudo systemctl enable docker
|
||||
sudo usermod -a -G docker ec2-user # or ubuntu
|
||||
|
||||
# Install Docker Compose
|
||||
sudo curl -L "https://github.com/docker/compose/releases/latest/download/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
|
||||
sudo chmod +x /usr/local/bin/docker-compose
|
||||
|
||||
# Logout and login again for group changes to take effect
|
||||
exit
|
||||
```
|
||||
|
||||
## Step 3: Deploy Application
|
||||
|
||||
1. **Clone Repository:**
|
||||
```bash
|
||||
cd ~
|
||||
git clone https://your-repo-url/my-recipes.git
|
||||
cd my-recipes
|
||||
```
|
||||
|
||||
2. **Configure Environment Variables:**
|
||||
```bash
|
||||
# Copy the example env file from root to backend directory
|
||||
cp .env.example backend/.env
|
||||
nano backend/.env # or vim backend/.env
|
||||
```
|
||||
|
||||
Update the following variables:
|
||||
```env
|
||||
# Database Configuration (from RDS)
|
||||
DB_USER=recipes_user
|
||||
DB_PASSWORD=your_rds_password
|
||||
DB_NAME=recipes_db
|
||||
DB_HOST=your-rds-endpoint.region.rds.amazonaws.com
|
||||
DB_PORT=5432
|
||||
|
||||
# Frontend URL (your domain or EC2 public IP)
|
||||
FRONTEND_URL=http://your-ec2-ip
|
||||
# or FRONTEND_URL=https://your-domain.com
|
||||
|
||||
# Update OAuth redirect URIs
|
||||
GOOGLE_REDIRECT_URI=http://your-ec2-ip/auth/google/callback
|
||||
AZURE_REDIRECT_URI=http://your-ec2-ip/auth/azure/callback
|
||||
|
||||
# Update other configurations as needed
|
||||
```
|
||||
|
||||
3. **Build and Start Services:**
|
||||
```bash
|
||||
cd /home/ec2-user/my-recipes # or /home/ubuntu/my-recipes
|
||||
docker-compose up -d --build
|
||||
```
|
||||
|
||||
4. **Verify Services:**
|
||||
```bash
|
||||
docker-compose ps
|
||||
docker-compose logs -f
|
||||
```
|
||||
|
||||
## Step 4: Configure Domain and SSL (Optional but Recommended)
|
||||
|
||||
1. **Set Up Domain:**
|
||||
- Point your domain's A record to EC2 Elastic IP
|
||||
- Update FRONTEND_URL in .env file
|
||||
|
||||
2. **Install Nginx and Certbot:**
|
||||
```bash
|
||||
sudo yum install nginx certbot python3-certbot-nginx -y # Amazon Linux
|
||||
# sudo apt install nginx certbot python3-certbot-nginx -y # Ubuntu
|
||||
```
|
||||
|
||||
3. **Configure Nginx:**
|
||||
Create `/etc/nginx/conf.d/recipes.conf`:
|
||||
```nginx
|
||||
server {
|
||||
listen 80;
|
||||
server_name your-domain.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:80;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
}
|
||||
|
||||
location /api {
|
||||
proxy_pass http://localhost:8000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. **Get SSL Certificate:**
|
||||
```bash
|
||||
sudo certbot --nginx -d your-domain.com
|
||||
sudo systemctl restart nginx
|
||||
```
|
||||
|
||||
## Step 5: Monitoring and Maintenance
|
||||
|
||||
1. **View Logs:**
|
||||
```bash
|
||||
docker-compose logs -f backend
|
||||
docker-compose logs -f frontend
|
||||
```
|
||||
|
||||
2. **Restart Services:**
|
||||
```bash
|
||||
docker-compose restart
|
||||
```
|
||||
|
||||
3. **Update Application:**
|
||||
```bash
|
||||
git pull
|
||||
docker-compose down
|
||||
docker-compose up -d --build
|
||||
```
|
||||
|
||||
4. **Backup Database:**
|
||||
Backups are automatic based on BACKUP_INTERVAL setting. Manual backup:
|
||||
```bash
|
||||
docker-compose exec backend python backup_db.py
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
1. **Can't connect to RDS:**
|
||||
- Check security group rules
|
||||
- Verify VPC and subnet configuration
|
||||
- Test connection: `telnet rds-endpoint 5432`
|
||||
|
||||
2. **Services won't start:**
|
||||
```bash
|
||||
docker-compose logs
|
||||
```
|
||||
|
||||
3. **Database connection errors:**
|
||||
- Verify DATABASE_URL format
|
||||
- Check DB credentials
|
||||
- Ensure RDS is accessible from EC2
|
||||
|
||||
4. **Port conflicts:**
|
||||
```bash
|
||||
sudo netstat -tulpn | grep :80
|
||||
sudo netstat -tulpn | grep :8000
|
||||
```
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
1. Use strong passwords for DB and admin accounts
|
||||
2. Enable SSL/TLS for all connections
|
||||
3. Keep EC2 security groups restrictive
|
||||
4. Don't expose RDS publicly
|
||||
5. Use IAM roles for AWS service access
|
||||
6. Regularly update Docker images and system packages
|
||||
7. Enable CloudWatch monitoring
|
||||
8. Set up automated backups
|
||||
9. Use secrets management (AWS Secrets Manager or Parameter Store)
|
||||
|
||||
## Cost Optimization
|
||||
|
||||
1. Use reserved instances for long-term deployments
|
||||
2. Enable auto-shutdown for non-production environments
|
||||
3. Monitor and optimize RDS instance size
|
||||
4. Use S3 or R2 for backups (already configured)
|
||||
5. Consider using Application Load Balancer for multiple instances
|
||||
40
aws/.env
Normal file
40
aws/.env
Normal file
@ -0,0 +1,40 @@
|
||||
# Database Configuration (AWS RDS)
|
||||
DB_USER=recipes_user
|
||||
DB_PASSWORD=recipes_password
|
||||
DB_NAME=recipes_db
|
||||
DB_HOST=my-recipes-rds.chw4omcguqv7.eu-central-1.rds.amazonaws.com
|
||||
DB_PORT=5432
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USER=dvirlabs@gmail.com
|
||||
SMTP_PASSWORD=agaanrhbbazbdytv
|
||||
SMTP_FROM=dvirlabs@gmail.com
|
||||
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID=143092846986-hsi59m0on2c9rb5qrdoejfceieao2ioc.apps.googleusercontent.com
|
||||
GOOGLE_CLIENT_SECRET=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
|
||||
GOOGLE_REDIRECT_URI=http://localhost:8000/auth/google/callback
|
||||
FRONTEND_URL=http://localhost
|
||||
|
||||
# Microsoft Entra ID (Azure AD) OAuth
|
||||
AZURE_CLIENT_ID=db244cf5-eb11-4738-a2ea-5b0716c9ec0a
|
||||
AZURE_CLIENT_SECRET=Zad8Q~qRBxaQq8up0lLXAq4pHzrVM2JFGFJhHaDp
|
||||
AZURE_TENANT_ID=consumers
|
||||
AZURE_REDIRECT_URI=http://localhost:8000/auth/azure/callback
|
||||
|
||||
# AWS S3 Backup Configuration
|
||||
S3_ENDPOINT=https://s3.eu-central-1.amazonaws.com
|
||||
S3_ACCESS_KEY=1997b1e48a337c0dbe1f7552a08631b5
|
||||
S3_SECRET_KEY=369694e39fedfedb254158c147171f5760de84fa2346d5d5d5a961f1f517dbc6
|
||||
S3_BUCKET_NAME=recipes-backups
|
||||
S3_REGION=eu-central-1
|
||||
|
||||
# Automatic Backup Schedule
|
||||
# Options: test (every 1 minute), daily, weekly, disabled
|
||||
BACKUP_INTERVAL=weekly
|
||||
|
||||
# Frontend API URL - Replace with your EC2 public IP
|
||||
# This is used by the browser to connect to the backend
|
||||
API_BASE=http://54.93.173.105:8000
|
||||
225
aws/EKS_DEPLOYMENT.md
Normal file
225
aws/EKS_DEPLOYMENT.md
Normal file
@ -0,0 +1,225 @@
|
||||
# AWS EKS Deployment Guide
|
||||
|
||||
This directory contains the Helm chart and configuration for deploying My Recipes application to Amazon EKS (Elastic Kubernetes Service).
|
||||
|
||||
## Structure
|
||||
|
||||
```
|
||||
aws/
|
||||
├── my-recipes-chart/ # Base Helm chart with default values
|
||||
│ ├── Chart.yaml
|
||||
│ ├── values.yaml # Base configuration (don't modify directly)
|
||||
│ └── templates/ # Kubernetes resource templates
|
||||
└── values.yaml # Project-specific values (override base values)
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. **AWS CLI** - Configured with appropriate credentials
|
||||
2. **kubectl** - Kubernetes command-line tool
|
||||
3. **Helm 3** - Package manager for Kubernetes
|
||||
4. **eksctl** (optional) - For creating EKS clusters
|
||||
|
||||
## Setup Steps
|
||||
|
||||
### 1. Create EKS Cluster (if not already exists)
|
||||
|
||||
```bash
|
||||
eksctl create cluster \
|
||||
--name my-recipes-cluster \
|
||||
--region eu-central-1 \
|
||||
--nodegroup-name standard-workers \
|
||||
--node-type t3.medium \
|
||||
--nodes 2 \
|
||||
--nodes-min 1 \
|
||||
--nodes-max 3
|
||||
```
|
||||
|
||||
### 2. Configure kubectl
|
||||
|
||||
```bash
|
||||
aws eks update-kubeconfig --region eu-central-1 --name my-recipes-cluster
|
||||
```
|
||||
|
||||
### 3. Create Namespace
|
||||
|
||||
```bash
|
||||
kubectl create namespace my-apps
|
||||
```
|
||||
|
||||
### 4. Install Ingress Controller (if not already installed)
|
||||
|
||||
For AWS ALB Ingress Controller:
|
||||
```bash
|
||||
# Install AWS Load Balancer Controller
|
||||
helm repo add eks https://aws.github.io/eks-charts
|
||||
helm install aws-load-balancer-controller eks/aws-load-balancer-controller \
|
||||
-n kube-system \
|
||||
--set clusterName=my-recipes-cluster
|
||||
```
|
||||
|
||||
Or for NGINX Ingress Controller:
|
||||
```bash
|
||||
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
|
||||
helm install nginx-ingress ingress-nginx/ingress-nginx \
|
||||
-n ingress-nginx --create-namespace
|
||||
```
|
||||
|
||||
### 5. Install cert-manager (for SSL certificates)
|
||||
|
||||
```bash
|
||||
helm repo add jetstack https://charts.jetstack.io
|
||||
helm install cert-manager jetstack/cert-manager \
|
||||
--namespace cert-manager \
|
||||
--create-namespace \
|
||||
--set installCRDs=true
|
||||
```
|
||||
|
||||
### 6. Configure values.yaml
|
||||
|
||||
Edit `values.yaml` in this directory and update:
|
||||
|
||||
- **Container images**: Update ECR repository URLs
|
||||
- **Domain names**: Replace `<YOUR_DOMAIN>` with your actual domain
|
||||
- **S3 credentials**: Add your AWS access key and secret key
|
||||
- **Database**: Configure RDS connection details
|
||||
- **OAuth**: Update redirect URIs with your domain
|
||||
|
||||
### 7. Create S3 Bucket for Backups
|
||||
|
||||
```bash
|
||||
aws s3 mb s3://my-recipes-backups --region eu-central-1
|
||||
```
|
||||
|
||||
### 8. Push Docker Images to ECR
|
||||
|
||||
```bash
|
||||
# Create ECR repositories
|
||||
aws ecr create-repository --repository-name my-recipes-backend --region eu-central-1
|
||||
aws ecr create-repository --repository-name my-recipes-frontend --region eu-central-1
|
||||
|
||||
# Login to ECR
|
||||
aws ecr get-login-password --region eu-central-1 | docker login --username AWS --password-stdin <AWS_ACCOUNT_ID>.dkr.ecr.eu-central-1.amazonaws.com
|
||||
|
||||
# Build and push backend
|
||||
cd backend
|
||||
docker build -t my-recipes-backend .
|
||||
docker tag my-recipes-backend:latest <AWS_ACCOUNT_ID>.dkr.ecr.eu-central-1.amazonaws.com/my-recipes-backend:latest
|
||||
docker push <AWS_ACCOUNT_ID>.dkr.ecr.eu-central-1.amazonaws.com/my-recipes-backend:latest
|
||||
|
||||
# Build and push frontend
|
||||
cd ../frontend
|
||||
docker build -t my-recipes-frontend .
|
||||
docker tag my-recipes-frontend:latest <AWS_ACCOUNT_ID>.dkr.ecr.eu-central-1.amazonaws.com/my-recipes-frontend:latest
|
||||
docker push <AWS_ACCOUNT_ID>.dkr.ecr.eu-central-1.amazonaws.com/my-recipes-frontend:latest
|
||||
```
|
||||
|
||||
### 9. Deploy with Helm
|
||||
|
||||
```bash
|
||||
# From the aws directory
|
||||
helm install my-recipes ./my-recipes-chart \
|
||||
-f values.yaml \
|
||||
-n my-apps
|
||||
```
|
||||
|
||||
### 10. Verify Deployment
|
||||
|
||||
```bash
|
||||
# Check pods
|
||||
kubectl get pods -n my-apps
|
||||
|
||||
# Check services
|
||||
kubectl get svc -n my-apps
|
||||
|
||||
# Check ingress
|
||||
kubectl get ingress -n my-apps
|
||||
|
||||
# View logs
|
||||
kubectl logs -f deployment/my-recipes-backend -n my-apps
|
||||
```
|
||||
|
||||
## Upgrading
|
||||
|
||||
To update the deployment:
|
||||
|
||||
```bash
|
||||
# Update values.yaml with new configuration
|
||||
helm upgrade my-recipes ./my-recipes-chart \
|
||||
-f values.yaml \
|
||||
-n my-apps
|
||||
```
|
||||
|
||||
## Using AWS RDS (Recommended for Production)
|
||||
|
||||
1. Create RDS PostgreSQL instance
|
||||
2. Configure security groups to allow EKS node group access
|
||||
3. Update `database` section in `values.yaml` with RDS connection details
|
||||
4. The chart will automatically use external database instead of in-cluster PostgreSQL
|
||||
|
||||
## Using S3 for Backups
|
||||
|
||||
The application is configured to use AWS S3 for database backups instead of Cloudflare R2. Ensure:
|
||||
|
||||
1. S3 bucket exists and is accessible
|
||||
2. AWS credentials have appropriate permissions:
|
||||
- `s3:PutObject`
|
||||
- `s3:GetObject`
|
||||
- `s3:ListBucket`
|
||||
- `s3:DeleteObject`
|
||||
|
||||
## Environment Variables
|
||||
|
||||
The chart automatically creates secrets from `values.yaml`:
|
||||
- Database credentials
|
||||
- OAuth client secrets
|
||||
- Email SMTP credentials
|
||||
- S3 access keys
|
||||
|
||||
All sensitive data should be stored in AWS Secrets Manager in production and referenced via External Secrets Operator.
|
||||
|
||||
## Monitoring
|
||||
|
||||
To view application logs:
|
||||
|
||||
```bash
|
||||
# Backend logs
|
||||
kubectl logs -f deployment/my-recipes-backend -n my-apps
|
||||
|
||||
# Frontend logs
|
||||
kubectl logs -f deployment/my-recipes-frontend -n my-apps
|
||||
|
||||
# Database logs (if using in-cluster DB)
|
||||
kubectl logs -f statefulset/my-recipes-db -n my-apps
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Pods not starting
|
||||
```bash
|
||||
kubectl describe pod <pod-name> -n my-apps
|
||||
```
|
||||
|
||||
### Database connection issues
|
||||
```bash
|
||||
kubectl exec -it deployment/my-recipes-backend -n my-apps -- env | grep DB_
|
||||
```
|
||||
|
||||
### Ingress not working
|
||||
```bash
|
||||
kubectl describe ingress -n my-apps
|
||||
```
|
||||
|
||||
## Uninstall
|
||||
|
||||
```bash
|
||||
helm uninstall my-recipes -n my-apps
|
||||
```
|
||||
|
||||
## Cost Optimization
|
||||
|
||||
For non-production environments:
|
||||
- Reduce replica counts to 1
|
||||
- Use smaller instance types (t3.small)
|
||||
- Use in-cluster PostgreSQL instead of RDS
|
||||
- Configure cluster autoscaling
|
||||
70
aws/README.md
Normal file
70
aws/README.md
Normal file
@ -0,0 +1,70 @@
|
||||
# AWS Deployment Guide
|
||||
|
||||
## Quick Start
|
||||
|
||||
1. Pull the latest changes:
|
||||
```bash
|
||||
cd /root/my-recipes
|
||||
git pull origin aws
|
||||
```
|
||||
|
||||
2. Navigate to the AWS deployment folder:
|
||||
```bash
|
||||
cd aws
|
||||
```
|
||||
|
||||
3. Run docker-compose:
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
4. Check logs:
|
||||
```bash
|
||||
docker compose logs -f
|
||||
```
|
||||
|
||||
## What's Included
|
||||
|
||||
- `docker-compose.yaml` - Docker compose configuration with all services
|
||||
- `.env` - Environment variables with AWS RDS, R2, OAuth configs
|
||||
|
||||
## Services
|
||||
|
||||
- **Backend**: http://localhost:8000 (FastAPI)
|
||||
- **Frontend**: http://localhost (Nginx)
|
||||
|
||||
## Configuration
|
||||
|
||||
All configuration is in the `.env` file:
|
||||
- AWS RDS endpoint already configured
|
||||
- Cloudflare R2 backup storage configured
|
||||
- Google & Azure OAuth configured
|
||||
- Email SMTP configured
|
||||
|
||||
## Useful Commands
|
||||
|
||||
```bash
|
||||
# Start services
|
||||
docker compose up -d
|
||||
|
||||
# Stop services
|
||||
docker compose down
|
||||
|
||||
# View logs
|
||||
docker compose logs -f
|
||||
|
||||
# Restart services
|
||||
docker compose restart
|
||||
|
||||
# Rebuild and start
|
||||
docker compose up -d --build
|
||||
|
||||
# Remove everything (including volumes)
|
||||
docker compose down -v
|
||||
```
|
||||
|
||||
## Backups
|
||||
|
||||
Automatic backups are scheduled weekly on Sundays at 2:00 AM and stored in Cloudflare R2.
|
||||
|
||||
Manual backups location: `../backend/backups/`
|
||||
69
aws/docker-compose.yaml
Normal file
69
aws/docker-compose.yaml
Normal file
@ -0,0 +1,69 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
backend:
|
||||
build: ../backend
|
||||
container_name: recipes-backend
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "8000:8000"
|
||||
environment:
|
||||
# Database Configuration (RDS)
|
||||
DATABASE_URL: postgresql://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}
|
||||
DB_USER: ${DB_USER}
|
||||
DB_PASSWORD: ${DB_PASSWORD}
|
||||
DB_NAME: ${DB_NAME}
|
||||
DB_HOST: ${DB_HOST}
|
||||
DB_PORT: ${DB_PORT:-5432}
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST: ${SMTP_HOST}
|
||||
SMTP_PORT: ${SMTP_PORT:-587}
|
||||
SMTP_USER: ${SMTP_USER}
|
||||
SMTP_PASSWORD: ${SMTP_PASSWORD}
|
||||
SMTP_FROM: ${SMTP_FROM}
|
||||
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID: ${GOOGLE_CLIENT_ID}
|
||||
GOOGLE_CLIENT_SECRET: ${GOOGLE_CLIENT_SECRET}
|
||||
GOOGLE_REDIRECT_URI: ${GOOGLE_REDIRECT_URI}
|
||||
|
||||
# Microsoft Azure OAuth
|
||||
AZURE_CLIENT_ID: ${AZURE_CLIENT_ID}
|
||||
AZURE_CLIENT_SECRET: ${AZURE_CLIENT_SECRET}
|
||||
AZURE_TENANT_ID: ${AZURE_TENANT_ID:-consumers}
|
||||
AZURE_REDIRECT_URI: ${AZURE_REDIRECT_URI}
|
||||
|
||||
# Frontend URL
|
||||
FRONTEND_URL: ${FRONTEND_URL}
|
||||
|
||||
# Cloudflare R2 Backup Configuration
|
||||
R2_ENDPOINT: ${R2_ENDPOINT}
|
||||
R2_ACCESS_KEY: ${R2_ACCESS_KEY}
|
||||
R2_SECRET_KEY: ${R2_SECRET_KEY}
|
||||
R2_BUCKET_NAME: ${R2_BUCKET_NAME}
|
||||
|
||||
# Backup Schedule
|
||||
BACKUP_INTERVAL: ${BACKUP_INTERVAL:-weekly}
|
||||
volumes:
|
||||
- ../backend/backups:/app/backups
|
||||
- ../backend/restores:/app/restores
|
||||
networks:
|
||||
- recipes-network
|
||||
|
||||
frontend:
|
||||
build: ../frontend
|
||||
container_name: recipes-frontend
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
environment:
|
||||
API_BASE: ${API_BASE:-http://localhost:8000}
|
||||
depends_on:
|
||||
- backend
|
||||
networks:
|
||||
- recipes-network
|
||||
|
||||
networks:
|
||||
recipes-network:
|
||||
driver: bridge
|
||||
14
aws/final-app/my-recipes-chart-aws/Chart.yaml
Normal file
14
aws/final-app/my-recipes-chart-aws/Chart.yaml
Normal file
@ -0,0 +1,14 @@
|
||||
apiVersion: v2
|
||||
name: my-recipes
|
||||
description: Complete recipe management application with PostgreSQL, FastAPI backend, and React frontend
|
||||
type: application
|
||||
version: 1.0.0
|
||||
appVersion: "1.0.0"
|
||||
keywords:
|
||||
- recipes
|
||||
- fastapi
|
||||
- react
|
||||
- postgresql
|
||||
maintainers:
|
||||
- name: Development Team
|
||||
|
||||
@ -0,0 +1,45 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-add-missing-tables
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
data:
|
||||
add-tables.sql: |
|
||||
-- Create grocery lists table
|
||||
CREATE TABLE IF NOT EXISTS grocery_lists (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
items TEXT[] NOT NULL DEFAULT '{}',
|
||||
owner_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
is_pinned BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Create grocery list shares table
|
||||
CREATE TABLE IF NOT EXISTS grocery_list_shares (
|
||||
id SERIAL PRIMARY KEY,
|
||||
list_id INTEGER NOT NULL REFERENCES grocery_lists(id) ON DELETE CASCADE,
|
||||
shared_with_user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
can_edit BOOLEAN DEFAULT FALSE,
|
||||
shared_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(list_id, shared_with_user_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_grocery_lists_owner_id ON grocery_lists (owner_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_grocery_list_shares_list_id ON grocery_list_shares (list_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_grocery_list_shares_user_id ON grocery_list_shares (shared_with_user_id);
|
||||
|
||||
-- Create notifications table
|
||||
CREATE TABLE IF NOT EXISTS notifications (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
type TEXT NOT NULL,
|
||||
message TEXT NOT NULL,
|
||||
related_id INTEGER,
|
||||
is_read BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_notifications_user_id ON notifications (user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_notifications_is_read ON notifications (is_read);
|
||||
@ -0,0 +1,49 @@
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-add-missing-tables
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
annotations:
|
||||
"helm.sh/hook": post-upgrade
|
||||
"helm.sh/hook-weight": "6"
|
||||
"helm.sh/hook-delete-policy": before-hook-creation
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
restartPolicy: Never
|
||||
containers:
|
||||
- name: add-tables
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
- name: PGHOST
|
||||
value: {{ .Release.Name }}-db
|
||||
- name: PGPORT
|
||||
value: "{{ .Values.postgres.port }}"
|
||||
- name: PGDATABASE
|
||||
value: {{ .Values.postgres.database }}
|
||||
- name: PGUSER
|
||||
value: {{ .Values.postgres.user }}
|
||||
- name: PGPASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_PASSWORD
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
- |
|
||||
echo "Waiting for database to be ready..."
|
||||
until pg_isready -h $PGHOST -p $PGPORT -U $PGUSER; do
|
||||
echo "Database not ready, waiting..."
|
||||
sleep 2
|
||||
done
|
||||
echo "Database ready, adding missing tables..."
|
||||
psql -v ON_ERROR_STOP=1 -f /sql/add-tables.sql
|
||||
echo "Tables added successfully!"
|
||||
volumeMounts:
|
||||
- name: sql
|
||||
mountPath: /sql
|
||||
volumes:
|
||||
- name: sql
|
||||
configMap:
|
||||
name: {{ .Release.Name }}-add-missing-tables
|
||||
@ -0,0 +1,99 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-admin-init
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
data:
|
||||
create-admin.py: |
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import psycopg2
|
||||
import bcrypt
|
||||
from time import sleep
|
||||
|
||||
def wait_for_db():
|
||||
"""Wait for database to be ready"""
|
||||
max_retries = 30
|
||||
retry_count = 0
|
||||
|
||||
while retry_count < max_retries:
|
||||
try:
|
||||
conn = psycopg2.connect(
|
||||
host=os.environ['DB_HOST'],
|
||||
port=os.environ['DB_PORT'],
|
||||
database=os.environ['DB_NAME'],
|
||||
user=os.environ['DB_USER'],
|
||||
password=os.environ['DB_PASSWORD']
|
||||
)
|
||||
conn.close()
|
||||
print("✓ Database is ready")
|
||||
return True
|
||||
except Exception as e:
|
||||
retry_count += 1
|
||||
print(f"Waiting for database... ({retry_count}/{max_retries})")
|
||||
sleep(2)
|
||||
|
||||
print("✗ Database connection timeout")
|
||||
return False
|
||||
|
||||
def create_admin_user():
|
||||
"""Create admin user if not exists"""
|
||||
try:
|
||||
# Hash the password
|
||||
password = os.environ.get('ADMIN_PASSWORD', 'admin123')
|
||||
password_hash = bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt()).decode('utf-8')
|
||||
|
||||
# Connect to database
|
||||
conn = psycopg2.connect(
|
||||
host=os.environ['DB_HOST'],
|
||||
port=os.environ['DB_PORT'],
|
||||
database=os.environ['DB_NAME'],
|
||||
user=os.environ['DB_USER'],
|
||||
password=os.environ['DB_PASSWORD']
|
||||
)
|
||||
cur = conn.cursor()
|
||||
|
||||
# Insert admin user
|
||||
cur.execute("""
|
||||
INSERT INTO users (username, email, password_hash, first_name, last_name, display_name, is_admin)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s)
|
||||
ON CONFLICT (username) DO UPDATE SET
|
||||
email = EXCLUDED.email,
|
||||
password_hash = EXCLUDED.password_hash,
|
||||
first_name = EXCLUDED.first_name,
|
||||
last_name = EXCLUDED.last_name,
|
||||
display_name = EXCLUDED.display_name,
|
||||
is_admin = EXCLUDED.is_admin
|
||||
""", (
|
||||
os.environ.get('ADMIN_USERNAME', 'admin'),
|
||||
os.environ.get('ADMIN_EMAIL', 'admin@myrecipes.local'),
|
||||
password_hash,
|
||||
os.environ.get('ADMIN_FIRST_NAME', 'Admin'),
|
||||
os.environ.get('ADMIN_LAST_NAME', 'User'),
|
||||
os.environ.get('ADMIN_DISPLAY_NAME', 'מנהל'),
|
||||
True
|
||||
))
|
||||
|
||||
conn.commit()
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
print(f"✓ Admin user '{os.environ.get('ADMIN_USERNAME', 'admin')}' created/updated successfully")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Error creating admin user: {e}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Starting admin user initialization...")
|
||||
|
||||
if not wait_for_db():
|
||||
sys.exit(1)
|
||||
|
||||
if not create_admin_user():
|
||||
sys.exit(1)
|
||||
|
||||
print("✓ Admin user initialization completed")
|
||||
sys.exit(0)
|
||||
@ -0,0 +1,75 @@
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-admin-init-{{ .Release.Revision }}
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
labels:
|
||||
app: {{ .Release.Name }}-admin-init
|
||||
component: init
|
||||
annotations:
|
||||
"helm.sh/hook": post-install,post-upgrade
|
||||
"helm.sh/hook-weight": "10"
|
||||
"helm.sh/hook-delete-policy": before-hook-creation
|
||||
spec:
|
||||
ttlSecondsAfterFinished: 300
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: {{ .Release.Name }}-admin-init
|
||||
spec:
|
||||
restartPolicy: Never
|
||||
containers:
|
||||
- name: admin-init
|
||||
image: python:3.12-slim
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
pip install --no-cache-dir psycopg2-binary bcrypt > /dev/null 2>&1
|
||||
python3 /scripts/create-admin.py
|
||||
env:
|
||||
- name: DB_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_HOST
|
||||
- name: DB_PORT
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_PORT
|
||||
- name: DB_NAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_NAME
|
||||
- name: DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_USER
|
||||
- name: DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_PASSWORD
|
||||
- name: ADMIN_USERNAME
|
||||
value: {{ .Values.admin.username | quote }}
|
||||
- name: ADMIN_EMAIL
|
||||
value: {{ .Values.admin.email | quote }}
|
||||
- name: ADMIN_PASSWORD
|
||||
value: {{ .Values.admin.password | quote }}
|
||||
- name: ADMIN_FIRST_NAME
|
||||
value: {{ .Values.admin.firstName | quote }}
|
||||
- name: ADMIN_LAST_NAME
|
||||
value: {{ .Values.admin.lastName | quote }}
|
||||
- name: ADMIN_DISPLAY_NAME
|
||||
value: {{ .Values.admin.displayName | quote }}
|
||||
volumeMounts:
|
||||
- name: init-script
|
||||
mountPath: /scripts
|
||||
volumes:
|
||||
- name: init-script
|
||||
configMap:
|
||||
name: {{ .Release.Name }}-admin-init
|
||||
defaultMode: 0755
|
||||
@ -0,0 +1,35 @@
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-app-secrets
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
type: Opaque
|
||||
stringData:
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID: {{ .Values.oauth.google.clientId | quote }}
|
||||
GOOGLE_CLIENT_SECRET: {{ .Values.oauth.google.clientSecret | quote }}
|
||||
GOOGLE_REDIRECT_URI: {{ .Values.oauth.google.redirectUri | quote }}
|
||||
|
||||
# Microsoft Entra ID (Azure AD) OAuth
|
||||
AZURE_CLIENT_ID: {{ .Values.oauth.azure.clientId | quote }}
|
||||
AZURE_CLIENT_SECRET: {{ .Values.oauth.azure.clientSecret | quote }}
|
||||
AZURE_TENANT_ID: {{ .Values.oauth.azure.tenantId | quote }}
|
||||
AZURE_REDIRECT_URI: {{ .Values.oauth.azure.redirectUri | quote }}
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST: {{ .Values.email.smtpHost | quote }}
|
||||
SMTP_PORT: {{ .Values.email.smtpPort | quote }}
|
||||
SMTP_USER: {{ .Values.email.smtpUser | quote }}
|
||||
SMTP_PASSWORD: {{ .Values.email.smtpPassword | quote }}
|
||||
SMTP_FROM: {{ .Values.email.smtpFrom | quote }}
|
||||
|
||||
# Frontend URL for redirects
|
||||
FRONTEND_URL: {{ .Values.frontend.externalUrl | quote }}
|
||||
|
||||
# S3 Backup Configuration
|
||||
S3_ENDPOINT: {{ .Values.s3.endpoint | quote }}
|
||||
S3_ACCESS_KEY: {{ .Values.s3.accessKey | quote }}
|
||||
S3_SECRET_KEY: {{ .Values.s3.secretKey | quote }}
|
||||
S3_BUCKET_NAME: {{ .Values.s3.bucketName | quote }}
|
||||
S3_REGION: {{ .Values.s3.region | quote }}
|
||||
BACKUP_INTERVAL: {{ .Values.s3.backupInterval | quote }}
|
||||
@ -0,0 +1,119 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-{{ .Values.backend.name }}
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
labels:
|
||||
app: {{ .Release.Name }}-{{ .Values.backend.name }}
|
||||
component: backend
|
||||
spec:
|
||||
replicas: {{ .Values.backend.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
app: {{ .Release.Name }}-{{ .Values.backend.name }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: {{ .Release.Name }}-{{ .Values.backend.name }}
|
||||
component: backend
|
||||
spec:
|
||||
initContainers:
|
||||
- name: db-migration
|
||||
image: postgres:16-alpine
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
echo "Waiting for database to be ready..."
|
||||
until pg_isready -h $DB_HOST -U $DB_USER; do
|
||||
echo "Database not ready, waiting..."
|
||||
sleep 2
|
||||
done
|
||||
echo "Database is ready, running migration..."
|
||||
PGPASSWORD=$DB_PASSWORD psql -h $DB_HOST -U $DB_USER -d $DB_NAME -f /migration/migrate.sql
|
||||
echo "Migration completed successfully"
|
||||
env:
|
||||
- name: DB_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_HOST
|
||||
- name: DB_PORT
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_PORT
|
||||
- name: DB_NAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_NAME
|
||||
- name: DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_USER
|
||||
- name: DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_PASSWORD
|
||||
volumeMounts:
|
||||
- name: migration-script
|
||||
mountPath: /migration
|
||||
containers:
|
||||
- name: {{ .Values.backend.name }}
|
||||
image: "{{ .Values.backend.image.repository }}:{{ .Values.backend.image.tag }}"
|
||||
imagePullPolicy: {{ .Values.backend.image.pullPolicy }}
|
||||
ports:
|
||||
- containerPort: {{ .Values.backend.service.targetPort }}
|
||||
name: http
|
||||
protocol: TCP
|
||||
env:
|
||||
{{- if .Values.backend.env }}
|
||||
{{- range $key, $value := .Values.backend.env }}
|
||||
- name: {{ $key }}
|
||||
value: {{ $value | quote }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
- secretRef:
|
||||
name: {{ .Release.Name }}-app-secrets
|
||||
startupProbe:
|
||||
httpGet:
|
||||
path: /docs
|
||||
port: http
|
||||
initialDelaySeconds: 15
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 30
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /docs
|
||||
port: http
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /docs
|
||||
port: http
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 2
|
||||
resources:
|
||||
requests:
|
||||
cpu: {{ .Values.backend.resources.requests.cpu }}
|
||||
memory: {{ .Values.backend.resources.requests.memory }}
|
||||
limits:
|
||||
cpu: {{ .Values.backend.resources.limits.cpu }}
|
||||
memory: {{ .Values.backend.resources.limits.memory }}
|
||||
volumes:
|
||||
- name: migration-script
|
||||
configMap:
|
||||
name: {{ .Release.Name }}-db-migration
|
||||
|
||||
@ -0,0 +1,17 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-{{ .Values.backend.name }}
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
labels:
|
||||
app: {{ .Release.Name }}-{{ .Values.backend.name }}
|
||||
component: backend
|
||||
spec:
|
||||
type: {{ .Values.backend.service.type }}
|
||||
ports:
|
||||
- port: {{ .Values.backend.service.port }}
|
||||
targetPort: {{ .Values.backend.service.targetPort }}
|
||||
protocol: TCP
|
||||
name: http
|
||||
selector:
|
||||
app: {{ .Release.Name }}-{{ .Values.backend.name }}
|
||||
@ -0,0 +1,54 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-db-migration
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
data:
|
||||
migrate.sql: |
|
||||
-- Add made_by column to recipes if it doesn't exist
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'recipes' AND column_name = 'made_by'
|
||||
) THEN
|
||||
ALTER TABLE recipes ADD COLUMN made_by TEXT;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Create index if it doesn't exist
|
||||
CREATE INDEX IF NOT EXISTS idx_recipes_made_by ON recipes (made_by);
|
||||
|
||||
-- Add is_admin column to users if it doesn't exist
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'users' AND column_name = 'is_admin'
|
||||
) THEN
|
||||
ALTER TABLE users ADD COLUMN is_admin BOOLEAN DEFAULT FALSE;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Add auth_provider column to users if it doesn't exist
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'users' AND column_name = 'auth_provider'
|
||||
) THEN
|
||||
ALTER TABLE users ADD COLUMN auth_provider TEXT DEFAULT 'local';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Verify recipes schema
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'recipes'
|
||||
ORDER BY ordinal_position;
|
||||
|
||||
-- Verify users schema
|
||||
SELECT column_name, data_type
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'users'
|
||||
ORDER BY ordinal_position;
|
||||
@ -0,0 +1,75 @@
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-db-migration-{{ .Release.Revision }}
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
labels:
|
||||
app: {{ .Release.Name }}-db-migration
|
||||
component: migration
|
||||
annotations:
|
||||
"helm.sh/hook": post-upgrade,post-install
|
||||
"helm.sh/hook-weight": "5"
|
||||
"helm.sh/hook-delete-policy": before-hook-creation
|
||||
spec:
|
||||
ttlSecondsAfterFinished: 300
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: {{ .Release.Name }}-db-migration
|
||||
spec:
|
||||
restartPolicy: Never
|
||||
containers:
|
||||
- name: migrate
|
||||
image: postgres:16-alpine
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
echo "Waiting for database to be ready..."
|
||||
until pg_isready -h $DB_HOST -U $DB_USER; do
|
||||
echo "Database not ready, waiting..."
|
||||
sleep 2
|
||||
done
|
||||
echo "Database is ready, applying schema..."
|
||||
PGPASSWORD=$DB_PASSWORD psql -h $DB_HOST -U $DB_USER -d $DB_NAME -f /schema/schema.sql
|
||||
echo "Schema applied, running migrations..."
|
||||
PGPASSWORD=$DB_PASSWORD psql -h $DB_HOST -U $DB_USER -d $DB_NAME -f /migration/migrate.sql
|
||||
echo "Migration completed successfully"
|
||||
env:
|
||||
- name: DB_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_HOST
|
||||
- name: DB_PORT
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_PORT
|
||||
- name: DB_NAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_NAME
|
||||
- name: DB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_USER
|
||||
- name: DB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
key: DB_PASSWORD
|
||||
volumeMounts:
|
||||
- name: migration-script
|
||||
mountPath: /migration
|
||||
- name: schema-script
|
||||
mountPath: /schema
|
||||
volumes:
|
||||
- name: migration-script
|
||||
configMap:
|
||||
name: {{ .Release.Name }}-db-migration
|
||||
- name: schema-script
|
||||
configMap:
|
||||
name: {{ .Release.Name }}-db-schema
|
||||
@ -0,0 +1,134 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-db-schema
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
data:
|
||||
schema.sql: |
|
||||
-- Create users table
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
username TEXT UNIQUE NOT NULL,
|
||||
email TEXT UNIQUE NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
first_name TEXT,
|
||||
last_name TEXT,
|
||||
display_name TEXT NOT NULL,
|
||||
is_admin BOOLEAN DEFAULT FALSE,
|
||||
auth_provider TEXT DEFAULT 'local',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_users_username ON users (username);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_email ON users (email);
|
||||
|
||||
-- Create recipes table (matching backend schema with TEXT[] arrays)
|
||||
CREATE TABLE IF NOT EXISTS recipes (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
meal_type TEXT NOT NULL, -- breakfast / lunch / dinner / snack
|
||||
time_minutes INTEGER NOT NULL,
|
||||
tags TEXT[] NOT NULL DEFAULT '{}', -- {"מהיר", "בריא"}
|
||||
ingredients TEXT[] NOT NULL DEFAULT '{}', -- {"ביצה", "עגבניה", "מלח"}
|
||||
steps TEXT[] NOT NULL DEFAULT '{}', -- {"לחתוך", "לבשל", ...}
|
||||
image TEXT, -- Base64-encoded image or image URL
|
||||
made_by TEXT, -- Person who created this recipe version
|
||||
user_id INTEGER REFERENCES users(id) ON DELETE SET NULL, -- Recipe owner
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Indexes for filters
|
||||
CREATE INDEX IF NOT EXISTS idx_recipes_meal_type
|
||||
ON recipes (meal_type);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_recipes_time_minutes
|
||||
ON recipes (time_minutes);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_recipes_made_by
|
||||
ON recipes (made_by);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_recipes_user_id
|
||||
ON recipes (user_id);
|
||||
|
||||
-- Add new columns to existing users table
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'users' AND column_name = 'first_name'
|
||||
) THEN
|
||||
ALTER TABLE users ADD COLUMN first_name TEXT;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'users' AND column_name = 'last_name'
|
||||
) THEN
|
||||
ALTER TABLE users ADD COLUMN last_name TEXT;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'users' AND column_name = 'display_name'
|
||||
) THEN
|
||||
ALTER TABLE users ADD COLUMN display_name TEXT;
|
||||
-- Set display_name to username for existing users
|
||||
UPDATE users SET display_name = username WHERE display_name IS NULL;
|
||||
ALTER TABLE users ALTER COLUMN display_name SET NOT NULL;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'users' AND column_name = 'is_admin'
|
||||
) THEN
|
||||
ALTER TABLE users ADD COLUMN is_admin BOOLEAN DEFAULT FALSE;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Create grocery lists table
|
||||
CREATE TABLE IF NOT EXISTS grocery_lists (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
items TEXT[] NOT NULL DEFAULT '{}',
|
||||
owner_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
is_pinned BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Create grocery list shares table
|
||||
CREATE TABLE IF NOT EXISTS grocery_list_shares (
|
||||
id SERIAL PRIMARY KEY,
|
||||
list_id INTEGER NOT NULL REFERENCES grocery_lists(id) ON DELETE CASCADE,
|
||||
shared_with_user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
can_edit BOOLEAN DEFAULT FALSE,
|
||||
shared_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(list_id, shared_with_user_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_grocery_lists_owner_id ON grocery_lists (owner_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_grocery_list_shares_list_id ON grocery_list_shares (list_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_grocery_list_shares_user_id ON grocery_list_shares (shared_with_user_id);
|
||||
|
||||
-- Create notifications table
|
||||
CREATE TABLE IF NOT EXISTS notifications (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
type TEXT NOT NULL,
|
||||
message TEXT NOT NULL,
|
||||
related_id INTEGER,
|
||||
is_read BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_notifications_user_id ON notifications (user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_notifications_is_read ON notifications (is_read);
|
||||
23
aws/final-app/my-recipes-chart-aws/templates/db-secret.yaml
Normal file
23
aws/final-app/my-recipes-chart-aws/templates/db-secret.yaml
Normal file
@ -0,0 +1,23 @@
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-db-credentials
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
type: Opaque
|
||||
stringData:
|
||||
{{- if .Values.database }}
|
||||
# External database (e.g., AWS RDS)
|
||||
DB_HOST: {{ .Values.database.host | quote }}
|
||||
DB_PORT: {{ .Values.database.port | quote }}
|
||||
DB_NAME: {{ .Values.database.name | quote }}
|
||||
DB_USER: {{ .Values.database.user | quote }}
|
||||
DB_PASSWORD: {{ .Values.database.password | quote }}
|
||||
{{- else }}
|
||||
# In-cluster PostgreSQL
|
||||
DB_HOST: {{ printf "%s-%s-headless.%s.svc.cluster.local" .Release.Name .Values.postgres.name .Values.global.namespace }}
|
||||
DB_PORT: "{{ .Values.postgres.port }}"
|
||||
DB_NAME: {{ .Values.postgres.database | quote }}
|
||||
DB_USER: {{ .Values.postgres.user | quote }}
|
||||
DB_PASSWORD: {{ .Values.postgres.password | quote }}
|
||||
{{- end }}
|
||||
|
||||
39
aws/final-app/my-recipes-chart-aws/templates/db-service.yaml
Normal file
39
aws/final-app/my-recipes-chart-aws/templates/db-service.yaml
Normal file
@ -0,0 +1,39 @@
|
||||
{{- if not .Values.database }}
|
||||
{{- /* Only deploy in-cluster PostgreSQL services if external database is not configured */ -}}
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-{{ .Values.postgres.name }}-headless
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
labels:
|
||||
app: {{ .Release.Name }}-{{ .Values.postgres.name }}
|
||||
component: database
|
||||
spec:
|
||||
clusterIP: None
|
||||
selector:
|
||||
app: {{ .Release.Name }}-{{ .Values.postgres.name }}
|
||||
ports:
|
||||
- name: postgres
|
||||
port: {{ .Values.postgres.port }}
|
||||
targetPort: {{ .Values.postgres.port }}
|
||||
protocol: TCP
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-{{ .Values.postgres.name }}
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
labels:
|
||||
app: {{ .Release.Name }}-{{ .Values.postgres.name }}
|
||||
component: database
|
||||
spec:
|
||||
type: {{ .Values.postgres.service.type }}
|
||||
selector:
|
||||
app: {{ .Release.Name }}-{{ .Values.postgres.name }}
|
||||
ports:
|
||||
- name: postgres
|
||||
port: {{ .Values.postgres.service.port }}
|
||||
targetPort: {{ .Values.postgres.port }}
|
||||
protocol: TCP
|
||||
{{- end }}
|
||||
|
||||
@ -0,0 +1,89 @@
|
||||
{{- if not .Values.database }}
|
||||
{{- /* Only deploy in-cluster PostgreSQL if external database is not configured */ -}}
|
||||
apiVersion: apps/v1
|
||||
kind: StatefulSet
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-{{ .Values.postgres.name }}
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
labels:
|
||||
app: {{ .Release.Name }}-{{ .Values.postgres.name }}
|
||||
component: database
|
||||
spec:
|
||||
serviceName: {{ .Release.Name }}-{{ .Values.postgres.name }}-headless
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: {{ .Release.Name }}-{{ .Values.postgres.name }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: {{ .Release.Name }}-{{ .Values.postgres.name }}
|
||||
component: database
|
||||
spec:
|
||||
containers:
|
||||
- name: postgres
|
||||
image: "{{ .Values.postgres.image.repository }}:{{ .Values.postgres.image.tag }}"
|
||||
imagePullPolicy: {{ .Values.postgres.image.pullPolicy }}
|
||||
ports:
|
||||
- containerPort: {{ .Values.postgres.port }}
|
||||
name: postgres
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: {{ .Values.postgres.user | quote }}
|
||||
- name: POSTGRES_PASSWORD
|
||||
value: {{ .Values.postgres.password | quote }}
|
||||
- name: POSTGRES_DB
|
||||
value: {{ .Values.postgres.database | quote }}
|
||||
- name: PGDATA
|
||||
value: /var/lib/postgresql/data/pgdata
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /var/lib/postgresql/data
|
||||
- name: init-sql
|
||||
mountPath: /docker-entrypoint-initdb.d
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- pg_isready -U {{ .Values.postgres.user }}
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
readinessProbe:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- pg_isready -U {{ .Values.postgres.user }}
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 2
|
||||
resources:
|
||||
requests:
|
||||
cpu: {{ .Values.postgres.resources.requests.cpu }}
|
||||
memory: {{ .Values.postgres.resources.requests.memory }}
|
||||
limits:
|
||||
cpu: {{ .Values.postgres.resources.limits.cpu }}
|
||||
memory: {{ .Values.postgres.resources.limits.memory }}
|
||||
volumes:
|
||||
- name: init-sql
|
||||
configMap:
|
||||
name: {{ .Release.Name }}-db-schema
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- {{ .Values.postgres.persistence.accessMode }}
|
||||
resources:
|
||||
requests:
|
||||
storage: {{ .Values.postgres.persistence.size }}
|
||||
{{- if .Values.postgres.persistence.storageClass }}
|
||||
storageClassName: {{ .Values.postgres.persistence.storageClass | quote }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
@ -0,0 +1,57 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-{{ .Values.frontend.name }}
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
labels:
|
||||
app: {{ .Release.Name }}-{{ .Values.frontend.name }}
|
||||
component: frontend
|
||||
spec:
|
||||
replicas: {{ .Values.frontend.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
app: {{ .Release.Name }}-{{ .Values.frontend.name }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: {{ .Release.Name }}-{{ .Values.frontend.name }}
|
||||
component: frontend
|
||||
spec:
|
||||
containers:
|
||||
- name: {{ .Values.frontend.name }}
|
||||
image: "{{ .Values.frontend.image.repository }}:{{ .Values.frontend.image.tag }}"
|
||||
imagePullPolicy: {{ .Values.frontend.image.pullPolicy }}
|
||||
ports:
|
||||
- containerPort: {{ .Values.frontend.service.targetPort }}
|
||||
name: http
|
||||
protocol: TCP
|
||||
{{- with .Values.frontend.env }}
|
||||
env:
|
||||
{{- range $key, $value := . }}
|
||||
- name: {{ $key }}
|
||||
value: {{ $value | quote }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 2
|
||||
resources:
|
||||
requests:
|
||||
cpu: {{ .Values.frontend.resources.requests.cpu }}
|
||||
memory: {{ .Values.frontend.resources.requests.memory }}
|
||||
limits:
|
||||
cpu: {{ .Values.frontend.resources.limits.cpu }}
|
||||
memory: {{ .Values.frontend.resources.limits.memory }}
|
||||
@ -0,0 +1,17 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ .Release.Name }}-{{ .Values.frontend.name }}
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
labels:
|
||||
app: {{ .Release.Name }}-{{ .Values.frontend.name }}
|
||||
component: frontend
|
||||
spec:
|
||||
type: {{ .Values.frontend.service.type }}
|
||||
ports:
|
||||
- port: {{ .Values.frontend.service.port }}
|
||||
targetPort: {{ .Values.frontend.service.targetPort }}
|
||||
protocol: TCP
|
||||
name: http
|
||||
selector:
|
||||
app: {{ .Release.Name }}-{{ .Values.frontend.name }}
|
||||
48
aws/final-app/my-recipes-chart-aws/templates/ingress.yaml
Normal file
48
aws/final-app/my-recipes-chart-aws/templates/ingress.yaml
Normal file
@ -0,0 +1,48 @@
|
||||
{{- if .Values.frontend.ingress.enabled }}
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: {{ .Release.Name }}
|
||||
namespace: {{ .Values.global.namespace }}
|
||||
labels:
|
||||
app: {{ .Release.Name }}
|
||||
{{- with .Values.frontend.ingress.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- if .Values.frontend.ingress.className }}
|
||||
ingressClassName: {{ .Values.frontend.ingress.className }}
|
||||
{{- end }}
|
||||
rules:
|
||||
# Frontend rule
|
||||
{{- range .Values.frontend.ingress.hosts }}
|
||||
- host: {{ .host | quote }}
|
||||
http:
|
||||
paths:
|
||||
{{- range .paths }}
|
||||
- path: {{ .path }}
|
||||
pathType: {{ .pathType }}
|
||||
backend:
|
||||
service:
|
||||
name: {{ $.Release.Name }}-{{ $.Values.frontend.name }}
|
||||
port:
|
||||
number: {{ $.Values.frontend.service.port }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
# Backend API rule
|
||||
{{- range .Values.backend.ingress.hosts }}
|
||||
- host: {{ .host | quote }}
|
||||
http:
|
||||
paths:
|
||||
{{- range .paths }}
|
||||
- path: {{ .path }}
|
||||
pathType: {{ .pathType }}
|
||||
backend:
|
||||
service:
|
||||
name: {{ $.Release.Name }}-{{ $.Values.backend.name }}
|
||||
port:
|
||||
number: {{ $.Values.backend.service.port }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
182
aws/final-app/my-recipes-chart-aws/values.yaml
Normal file
182
aws/final-app/my-recipes-chart-aws/values.yaml
Normal file
@ -0,0 +1,182 @@
|
||||
global:
|
||||
namespace: my-apps
|
||||
imagePullSecrets: []
|
||||
|
||||
# Backend configuration
|
||||
backend:
|
||||
name: backend
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository: harbor.dvirlabs.com/my-apps/my-recipes-backend
|
||||
pullPolicy: IfNotPresent
|
||||
tag: "latest"
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 8000
|
||||
targetPort: 8000
|
||||
|
||||
resources:
|
||||
requests:
|
||||
cpu: 100m
|
||||
memory: 128Mi
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
|
||||
env:
|
||||
PYTHONUNBUFFERED: "1"
|
||||
|
||||
# Secrets are created in db-secret.yaml
|
||||
# These are passed via envFrom secretRef
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
className: "alb"
|
||||
annotations:
|
||||
alb.ingress.kubernetes.io/scheme: internet-facing
|
||||
alb.ingress.kubernetes.io/target-type: ip
|
||||
alb.ingress.kubernetes.io/listen-ports: '[{"HTTPS":443}]'
|
||||
alb.ingress.kubernetes.io/certificate-arn: "" # Set in project-specific values
|
||||
hosts:
|
||||
- host: api-my-recipes.dvirlabs.com
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
tls:
|
||||
- secretName: api-my-recipes-tls
|
||||
hosts:
|
||||
- api-my-recipes.dvirlabs.com
|
||||
|
||||
# Frontend configuration
|
||||
frontend:
|
||||
name: frontend
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository: harbor.dvirlabs.com/my-apps/my-recipes-frontend
|
||||
pullPolicy: IfNotPresent
|
||||
tag: "latest"
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 80
|
||||
targetPort: 80
|
||||
|
||||
env:
|
||||
API_BASE: "https://api-my-recipes.dvirlabs.com"
|
||||
|
||||
resources:
|
||||
requests:
|
||||
cpu: 50m
|
||||
memory: 64Mi
|
||||
limits:
|
||||
cpu: 200m
|
||||
memory: 256Mi
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
className: "alb"
|
||||
annotations:
|
||||
alb.ingress.kubernetes.io/scheme: internet-facing
|
||||
alb.ingress.kubernetes.io/target-type: ip
|
||||
alb.ingress.kubernetes.io/listen-ports: '[{"HTTPS":443}]'
|
||||
alb.ingress.kubernetes.io/certificate-arn: "" # Set in project-specific values
|
||||
hosts:
|
||||
- host: my-recipes.dvirlabs.com
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
tls:
|
||||
- secretName: my-recipes-tls
|
||||
hosts:
|
||||
- my-recipes.dvirlabs.com
|
||||
externalUrl: "https://my-recipes.dvirlabs.com"
|
||||
|
||||
# PostgreSQL configuration
|
||||
postgres:
|
||||
name: db
|
||||
image:
|
||||
repository: postgres
|
||||
tag: "16-alpine"
|
||||
pullPolicy: IfNotPresent
|
||||
|
||||
user: recipes_user
|
||||
password: recipes_password
|
||||
database: recipes_db
|
||||
port: 5432
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 5432
|
||||
targetPort: 5432
|
||||
|
||||
persistence:
|
||||
enabled: true
|
||||
accessMode: ReadWriteOnce
|
||||
storageClass: "nfs-client"
|
||||
size: 10Gi
|
||||
|
||||
resources:
|
||||
requests:
|
||||
cpu: 100m
|
||||
memory: 256Mi
|
||||
limits:
|
||||
cpu: 1000m
|
||||
memory: 1Gi
|
||||
|
||||
# OAuth Configuration
|
||||
oauth:
|
||||
google:
|
||||
clientId: "143092846986-hsi59m0on2c9rb5qrdoejfceieao2ioc.apps.googleusercontent.com"
|
||||
clientSecret: "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"
|
||||
redirectUri: "https://api-my-recipes.dvirlabs.com/auth/google/callback"
|
||||
|
||||
azure:
|
||||
clientId: "db244cf5-eb11-4738-a2ea-5b0716c9ec0a"
|
||||
clientSecret: "Zad8Q~qRBxaQq8up0lLXAq4pHzrVM2JFGFJhHaDp"
|
||||
tenantId: "consumers"
|
||||
redirectUri: "https://api-my-recipes.dvirlabs.com/auth/azure/callback"
|
||||
|
||||
# Email Configuration
|
||||
email:
|
||||
smtpHost: "smtp.gmail.com"
|
||||
smtpPort: "587"
|
||||
smtpUser: "dvirlabs@gmail.com"
|
||||
smtpPassword: "agaanrhbbazbdytv"
|
||||
smtpFrom: "dvirlabs@gmail.com"
|
||||
|
||||
# S3 Backup Configuration
|
||||
s3:
|
||||
endpoint: "https://s3.amazonaws.com" # Can be overridden for specific regions
|
||||
accessKey: "" # Set this in project-specific values.yaml
|
||||
secretKey: "" # Set this in project-specific values.yaml
|
||||
bucketName: "" # Set this in project-specific values.yaml
|
||||
region: "us-east-1" # Set this in project-specific values.yaml
|
||||
backupInterval: "weekly" # Options: test (1 min), daily, weekly
|
||||
|
||||
# Admin User Configuration
|
||||
admin:
|
||||
username: "admin"
|
||||
email: "admin@example.com"
|
||||
password: "admin123" # Change this in production!
|
||||
firstName: "Admin"
|
||||
lastName: "User"
|
||||
displayName: "Admin User"
|
||||
|
||||
# Ingress configuration
|
||||
ingress:
|
||||
enabled: false # Individual frontend/backend ingress resources handle routing instead
|
||||
className: "nginx"
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: "letsencrypt-prod"
|
||||
hosts:
|
||||
- host: my-recipes.dvirlabs.com
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend: frontend
|
||||
tls:
|
||||
- secretName: recipes-tls
|
||||
hosts:
|
||||
- my-recipes.dvirlabs.com
|
||||
|
||||
110
aws/final-app/values.yaml
Normal file
110
aws/final-app/values.yaml
Normal file
@ -0,0 +1,110 @@
|
||||
# Project-specific values for AWS EKS deployment
|
||||
# This file overrides the base values in my-recipes-chart/values.yaml
|
||||
|
||||
global:
|
||||
namespace: my-apps
|
||||
|
||||
# Backend configuration
|
||||
backend:
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository: 430842105273.dkr.ecr.eu-central-1.amazonaws.com/my-recipes-backend # Update with your ECR repository
|
||||
tag: "latest"
|
||||
|
||||
ingress:
|
||||
className: "alb"
|
||||
annotations:
|
||||
alb.ingress.kubernetes.io/scheme: internet-facing
|
||||
alb.ingress.kubernetes.io/target-type: ip
|
||||
alb.ingress.kubernetes.io/listen-ports: '[{"HTTPS":443}]'
|
||||
# Add your ACM certificate ARN below if you have one
|
||||
# alb.ingress.kubernetes.io/certificate-arn: "arn:aws:acm:..."
|
||||
hosts:
|
||||
- host: api-my-recipes.aws-dvirlabs.com
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
|
||||
# Frontend configuration
|
||||
frontend:
|
||||
replicaCount: 2
|
||||
image:
|
||||
repository: 430842105273.dkr.ecr.eu-central-1.amazonaws.com/my-recipes-frontend # Update with your ECR repository
|
||||
tag: "latest"
|
||||
|
||||
env:
|
||||
API_BASE: "https://api-my-recipes.aws-dvirlabs.com"
|
||||
|
||||
ingress:
|
||||
className: "alb"
|
||||
annotations:
|
||||
alb.ingress.kubernetes.io/scheme: internet-facing
|
||||
alb.ingress.kubernetes.io/target-type: ip
|
||||
alb.ingress.kubernetes.io/listen-ports: '[{"HTTPS":443}]'
|
||||
# Add your ACM certificate ARN below if you have one
|
||||
# alb.ingress.kubernetes.io/certificate-arn: "arn:aws:acm:..."
|
||||
hosts:
|
||||
- host: my-recipes.aws-dvirlabs.com
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
|
||||
externalUrl: "https://my-recipes.aws-dvirlabs.com"
|
||||
|
||||
# PostgreSQL configuration
|
||||
postgres:
|
||||
# For AWS RDS, set this to use external database
|
||||
# Leave enabled: true to use in-cluster database
|
||||
enabled: false # Set to false if using RDS
|
||||
|
||||
# If using RDS, these values are ignored but kept for reference
|
||||
persistence:
|
||||
storageClass: "gp3" # EKS default storage class
|
||||
size: 20Gi
|
||||
|
||||
# OAuth Configuration
|
||||
oauth:
|
||||
google:
|
||||
clientId: "143092846986-hsi59m0on2c9rb5qrdoejfceieao2ioc.apps.googleusercontent.com"
|
||||
clientSecret: "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"
|
||||
redirectUri: "https://api-my-recipes.aws-dvirlabs.com/auth/google/callback"
|
||||
|
||||
azure:
|
||||
clientId: "db244cf5-eb11-4738-a2ea-5b0716c9ec0a"
|
||||
clientSecret: "Zad8Q~qRBxaQq8up0lLXAq4pHzrVM2JFGFJhHaDp"
|
||||
tenantId: "consumers"
|
||||
redirectUri: "https://api-my-recipes.aws-dvirlabs.com/auth/azure/callback"
|
||||
|
||||
# Email Configuration
|
||||
email:
|
||||
smtpHost: "smtp.gmail.com"
|
||||
smtpPort: "587"
|
||||
smtpUser: "dvirlabs@gmail.com"
|
||||
smtpPassword: "agaanrhbbazbdytv"
|
||||
smtpFrom: "dvirlabs@gmail.com"
|
||||
|
||||
# S3 Backup Configuration for AWS
|
||||
s3:
|
||||
endpoint: "https://s3.eu-central-1.amazonaws.com" # Update with your region
|
||||
accessKey: "AKIAXXXXXXXXXXXXXXXX" # Replace with your AWS Access Key
|
||||
secretKey: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" # Replace with your AWS Secret Key
|
||||
bucketName: "my-recipes-backups" # Update with your S3 bucket name
|
||||
region: "eu-central-1" # Update with your region
|
||||
backupInterval: "weekly"
|
||||
|
||||
# Admin User Configuration
|
||||
admin:
|
||||
username: "admin"
|
||||
email: "dvirlabs@gmail.com"
|
||||
password: "AdminPassword123!" # Change this after first login!
|
||||
firstName: "Dvir"
|
||||
lastName: "Admin"
|
||||
displayName: "Dvir Admin"
|
||||
|
||||
# Database connection for AWS RDS (used when postgres.enabled: false)
|
||||
database:
|
||||
host: "my-recipes-rds.chw4omcguqv7.eu-central-1.rds.amazonaws.com"
|
||||
port: "5432"
|
||||
name: "recipes_db"
|
||||
user: "recipes_user"
|
||||
password: "recipes_password" # Store securely in AWS Secrets Manager in production
|
||||
33
backend/.env
33
backend/.env
@ -4,3 +4,36 @@ DB_USER=recipes_user
|
||||
DB_NAME=recipes_db
|
||||
DB_HOST=localhost
|
||||
DB_PORT=5432
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USER=dvirlabs@gmail.com
|
||||
SMTP_PASSWORD=agaanrhbbazbdytv
|
||||
SMTP_FROM=dvirlabs@gmail.com
|
||||
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID=143092846986-hsi59m0on2c9rb5qrdoejfceieao2ioc.apps.googleusercontent.com
|
||||
GOOGLE_CLIENT_SECRET=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
|
||||
GOOGLE_REDIRECT_URI=http://localhost:8000/auth/google/callback
|
||||
FRONTEND_URL=http://localhost:5174
|
||||
|
||||
# Microsoft Entra ID (Azure AD) OAuth
|
||||
AZURE_CLIENT_ID=db244cf5-eb11-4738-a2ea-5b0716c9ec0a
|
||||
AZURE_CLIENT_SECRET=Zad8Q~qRBxaQq8up0lLXAq4pHzrVM2JFGFJhHaDp
|
||||
AZURE_TENANT_ID=consumers
|
||||
AZURE_REDIRECT_URI=http://localhost:8000/auth/azure/callback
|
||||
|
||||
# Cloudflare R2 Backup Configuration
|
||||
R2_ENDPOINT=https://d4704b8c40b2f95b2c7bf7ee4ecc52f8.r2.cloudflarestorage.com
|
||||
R2_ACCESS_KEY=1997b1e48a337c0dbe1f7552a08631b5
|
||||
R2_SECRET_KEY=369694e39fedfedb254158c147171f5760de84fa2346d5d5d5a961f1f517dbc6
|
||||
# Buckets are auto-selected based on environment (FRONTEND_URL)
|
||||
# Dev: my-recipes-db-bkp-dev
|
||||
# Prod: my-recipes-db-bkp-prod
|
||||
|
||||
# Automatic Backup Schedule
|
||||
# Options: test (every 1 minute), daily, weekly, disabled
|
||||
# For testing: BACKUP_INTERVAL=test
|
||||
# For production: BACKUP_INTERVAL=weekly
|
||||
BACKUP_INTERVAL=weekly
|
||||
|
||||
39
backend/.env.aws
Normal file
39
backend/.env.aws
Normal file
@ -0,0 +1,39 @@
|
||||
# Database Configuration (AWS RDS)
|
||||
DB_USER=recipes_user
|
||||
DB_PASSWORD=recipes_password
|
||||
DB_NAME=recipes_db
|
||||
DB_HOST=my-recipes-rds.chw4omcqsuqv7.eu-central-1.rds.amazonaws.com
|
||||
DB_PORT=5432
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USER=dvirlabs@gmail.com
|
||||
SMTP_PASSWORD=agaanrhbbazbdytv
|
||||
SMTP_FROM=dvirlabs@gmail.com
|
||||
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID=143092846986-hsi59m0on2c9rb5qrdoejfceieao2ioc.apps.googleusercontent.com
|
||||
GOOGLE_CLIENT_SECRET=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
|
||||
GOOGLE_REDIRECT_URI=http://localhost:8000/auth/google/callback
|
||||
FRONTEND_URL=http://localhost
|
||||
|
||||
# Microsoft Entra ID (Azure AD) OAuth
|
||||
AZURE_CLIENT_ID=db244cf5-eb11-4738-a2ea-5b0716c9ec0a
|
||||
AZURE_CLIENT_SECRET=Zad8Q~qRBxaQq8up0lLXAq4pHzrVM2JFGFJhHaDp
|
||||
AZURE_TENANT_ID=consumers
|
||||
AZURE_REDIRECT_URI=http://localhost:8000/auth/azure/callback
|
||||
|
||||
# Cloudflare R2 Backup Configuration
|
||||
R2_ENDPOINT=https://d4704b8c40b2f95b2c7bf7ee4ecc52f8.r2.cloudflarestorage.com
|
||||
R2_ACCESS_KEY=1997b1e48a337c0dbe1f7552a08631b5
|
||||
R2_SECRET_KEY=369694e39fedfedb254158c147171f5760de84fa2346d5d5d5a961f1f517dbc6
|
||||
R2_BUCKET_NAME=recipes-backups
|
||||
|
||||
# Automatic Backup Schedule
|
||||
# Options: test (every 1 minute), daily, weekly, disabled
|
||||
BACKUP_INTERVAL=weekly
|
||||
|
||||
# Frontend API URL (for frontend container to connect to backend via docker-compose)
|
||||
# Use the service name 'backend' from docker-compose.yaml
|
||||
VITE_API_URL=http://backend:8000
|
||||
40
backend/.env.example
Normal file
40
backend/.env.example
Normal file
@ -0,0 +1,40 @@
|
||||
# Database Configuration (AWS RDS)
|
||||
DB_USER=recipes_user
|
||||
DB_PASSWORD=your_secure_password_here
|
||||
DB_NAME=recipes_db
|
||||
DB_HOST=your-rds-endpoint.us-east-1.rds.amazonaws.com
|
||||
DB_PORT=5432
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USER=your-email@gmail.com
|
||||
SMTP_PASSWORD=your-app-password
|
||||
SMTP_FROM=your-email@gmail.com
|
||||
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID=your-google-client-id
|
||||
GOOGLE_CLIENT_SECRET=your-google-client-secret
|
||||
GOOGLE_REDIRECT_URI=https://your-domain.com/auth/google/callback
|
||||
FRONTEND_URL=https://your-domain.com
|
||||
|
||||
# Microsoft Entra ID (Azure AD) OAuth
|
||||
AZURE_CLIENT_ID=your-azure-client-id
|
||||
AZURE_CLIENT_SECRET=your-azure-client-secret
|
||||
AZURE_TENANT_ID=consumers
|
||||
AZURE_REDIRECT_URI=https://your-domain.com/auth/azure/callback
|
||||
|
||||
# Cloudflare R2 Backup Configuration
|
||||
# Get these from your Cloudflare dashboard -> R2 -> Manage R2 API Tokens
|
||||
R2_ENDPOINT=https://<account-id>.r2.cloudflarestorage.com
|
||||
R2_ACCESS_KEY=your-r2-access-key-id
|
||||
R2_SECRET_KEY=your-r2-secret-access-key
|
||||
R2_BUCKET_NAME=recipes-backups
|
||||
|
||||
# Automatic Backup Schedule
|
||||
# Options: test (every 1 minute), daily, weekly, disabled
|
||||
BACKUP_INTERVAL=weekly
|
||||
|
||||
# Frontend API URL (for frontend container to connect to backend via docker-compose)
|
||||
# Use the service name 'backend' from docker-compose.yaml
|
||||
VITE_API_URL=http://backend:8000
|
||||
28
backend/.env.local
Normal file
28
backend/.env.local
Normal file
@ -0,0 +1,28 @@
|
||||
DATABASE_URL=postgresql://recipes_user:Aa123456@localhost:5432/recipes_db
|
||||
DB_PASSWORD=Aa123456
|
||||
DB_USER=recipes_user
|
||||
DB_NAME=recipes_db
|
||||
DB_HOST=localhost
|
||||
DB_PORT=5432
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST=smtp.gmail.com
|
||||
SMTP_PORT=587
|
||||
SMTP_USER=dvirlabs@gmail.com
|
||||
SMTP_PASSWORD=agaanrhbbazbdytv
|
||||
SMTP_FROM=dvirlabs@gmail.com
|
||||
|
||||
# Secret Key for sessions (OAuth state token)
|
||||
SECRET_KEY=your-super-secret-key-min-32-chars-dev-only-change-in-prod
|
||||
|
||||
# Google OAuth (LOCAL - localhost redirect)
|
||||
GOOGLE_CLIENT_ID=143092846986-hsi59m0on2c9rb5qrdoejfceieao2ioc.apps.googleusercontent.com
|
||||
GOOGLE_CLIENT_SECRET=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
|
||||
GOOGLE_REDIRECT_URI=http://localhost:8000/auth/google/callback
|
||||
FRONTEND_URL=http://localhost:5174
|
||||
|
||||
# Microsoft Entra ID (Azure AD) OAuth
|
||||
AZURE_CLIENT_ID=db244cf5-eb11-4738-a2ea-5b0716c9ec0a
|
||||
AZURE_CLIENT_SECRET=Zad8Q~qRBxaQq8up0lLXAq4pHzrVM2JFGFJhHaDp
|
||||
AZURE_TENANT_ID=consumers
|
||||
AZURE_REDIRECT_URI=http://localhost:8000/auth/azure/callback
|
||||
1
backend/.gitignore
vendored
Normal file
1
backend/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
__pycache__/
|
||||
150
backend/AUTOMATIC_BACKUP.md
Normal file
150
backend/AUTOMATIC_BACKUP.md
Normal file
@ -0,0 +1,150 @@
|
||||
# Automatic Backup System
|
||||
|
||||
## ✅ Setup Complete!
|
||||
|
||||
The backend now automatically runs backups when the application starts. No cron setup needed!
|
||||
|
||||
## How It Works
|
||||
|
||||
When you start the backend:
|
||||
```bash
|
||||
cd backend
|
||||
uvicorn main:app --reload
|
||||
```
|
||||
|
||||
The backup scheduler starts automatically and you'll see:
|
||||
```
|
||||
⏰ Backup scheduler started: EVERY 1 MINUTE (testing mode)
|
||||
⚠️ WARNING: Test mode active! Change BACKUP_INTERVAL to 'weekly' for production
|
||||
```
|
||||
|
||||
## Testing (Current Setting)
|
||||
|
||||
**Currently set to: `BACKUP_INTERVAL=test`**
|
||||
|
||||
- Runs **every 1 minute**
|
||||
- Check backend console logs for backup status
|
||||
- Check R2 bucket for new files
|
||||
|
||||
**Expected console output:**
|
||||
```
|
||||
[2025-12-21 15:30:45] INFO: Starting scheduled backup...
|
||||
[2025-12-21 15:30:53] INFO: ✅ Scheduled backup completed: recipes_db_20251221_153045.sql.gz
|
||||
```
|
||||
|
||||
## Change to Production Schedule
|
||||
|
||||
After testing works, update `.env`:
|
||||
|
||||
```env
|
||||
# Change from:
|
||||
BACKUP_INTERVAL=test
|
||||
|
||||
# To one of these:
|
||||
BACKUP_INTERVAL=weekly # Sunday at 2:00 AM (recommended)
|
||||
BACKUP_INTERVAL=daily # Every day at 2:00 AM
|
||||
BACKUP_INTERVAL=disabled # Turn off automatic backups
|
||||
```
|
||||
|
||||
Then restart the backend:
|
||||
```bash
|
||||
# Stop current server (Ctrl+C)
|
||||
# Start again
|
||||
uvicorn main:app --reload
|
||||
```
|
||||
|
||||
You'll see:
|
||||
```
|
||||
⏰ Backup scheduler started: WEEKLY on Sundays at 2:00 AM
|
||||
✅ Backup scheduler is running
|
||||
```
|
||||
|
||||
## Available Options
|
||||
|
||||
| Setting | Description | When it runs |
|
||||
|---------|-------------|--------------|
|
||||
| `test` | Testing mode | Every 1 minute |
|
||||
| `daily` | Daily backups | Every day at 2:00 AM |
|
||||
| `weekly` | Weekly backups | Sundays at 2:00 AM |
|
||||
| `disabled` | No automatic backups | Never (manual only) |
|
||||
|
||||
## Manual Backup Still Available
|
||||
|
||||
Admin users can still trigger manual backups from the Admin Panel in the UI, regardless of the automatic schedule.
|
||||
|
||||
## Monitoring
|
||||
|
||||
### Check if scheduler is running
|
||||
Look for these messages in backend console when starting:
|
||||
```
|
||||
⏰ Backup scheduler started: ...
|
||||
✅ Backup scheduler is running
|
||||
```
|
||||
|
||||
### Watch backup logs in real-time
|
||||
The scheduled backups show in your backend console:
|
||||
```
|
||||
[2025-12-21 15:30:45] INFO: Starting scheduled backup...
|
||||
[2025-12-21 15:30:53] INFO: ✅ Scheduled backup completed: recipes_db_20251221_153045.sql.gz
|
||||
```
|
||||
|
||||
### Verify backups are created
|
||||
- Check R2 bucket in Cloudflare dashboard
|
||||
- Look for files: `recipes_db_YYYYMMDD_HHMMSS.sql.gz`
|
||||
|
||||
## Production Deployment
|
||||
|
||||
When deploying to production:
|
||||
|
||||
1. **Update `.env`:**
|
||||
```env
|
||||
BACKUP_INTERVAL=weekly
|
||||
```
|
||||
|
||||
2. **Keep the backend running:**
|
||||
- Use systemd, docker, or process manager
|
||||
- The scheduler only runs while the backend is running
|
||||
|
||||
3. **Using Docker:**
|
||||
```dockerfile
|
||||
# In your Dockerfile or docker-compose.yml
|
||||
# No additional cron setup needed!
|
||||
# The app handles scheduling internally
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Backup scheduler is DISABLED"
|
||||
- Check `.env` has `BACKUP_INTERVAL` set
|
||||
- Not set to `disabled`
|
||||
|
||||
### No backups running in test mode
|
||||
- Check backend console for error messages
|
||||
- Verify R2 credentials in `.env`
|
||||
- Verify database credentials in `.env`
|
||||
- Check that `APScheduler` is installed: `pip install APScheduler`
|
||||
|
||||
### Backups not running at scheduled time
|
||||
- Backend must be running 24/7
|
||||
- Use systemd or docker in production
|
||||
- Check server timezone matches expected schedule
|
||||
|
||||
### Want to disable automatic backups
|
||||
```env
|
||||
BACKUP_INTERVAL=disabled
|
||||
```
|
||||
|
||||
## Benefits Over Cron
|
||||
|
||||
✅ **No external setup** - Works immediately when backend starts
|
||||
✅ **Cross-platform** - Works on Windows, Linux, Docker
|
||||
✅ **Easy testing** - Just change one env variable
|
||||
✅ **Logs in console** - See backup status in backend logs
|
||||
✅ **No permissions issues** - Runs with same permissions as backend
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. **Test now**: Start backend and wait 1-2 minutes
|
||||
2. **Verify**: Check console logs and R2 bucket
|
||||
3. **Switch to weekly**: Change `.env` to `BACKUP_INTERVAL=weekly`
|
||||
4. **Restart backend**: The new schedule takes effect
|
||||
93
backend/BACKUP_README.md
Normal file
93
backend/BACKUP_README.md
Normal file
@ -0,0 +1,93 @@
|
||||
# Database Backup & Restore Scripts
|
||||
|
||||
## Overview
|
||||
Automated database backup system that exports PostgreSQL database, compresses it with gzip, and uploads to Cloudflare R2 storage.
|
||||
|
||||
## Requirements
|
||||
```bash
|
||||
pip install boto3
|
||||
```
|
||||
|
||||
## Configuration
|
||||
All configuration is stored in `.env` file:
|
||||
- `R2_ENDPOINT`: Cloudflare R2 endpoint URL
|
||||
- `R2_ACCESS_KEY`: R2 API access key
|
||||
- `R2_SECRET_KEY`: R2 API secret key
|
||||
- `R2_BUCKET`: R2 bucket name
|
||||
- Database credentials (DB_HOST, DB_PORT, DB_NAME, DB_USER, DB_PASSWORD)
|
||||
|
||||
## Usage
|
||||
|
||||
### Create Backup
|
||||
```bash
|
||||
cd backend
|
||||
python backup_db.py
|
||||
```
|
||||
|
||||
This will:
|
||||
1. Export the database using `pg_dump`
|
||||
2. Compress the dump with gzip (typically 80-90% reduction)
|
||||
3. Upload to R2 with timestamp
|
||||
4. List all backups in R2
|
||||
5. Clean up old local backups (keeps last 3)
|
||||
|
||||
### Restore from Backup
|
||||
```bash
|
||||
cd backend
|
||||
python restore_db.py
|
||||
```
|
||||
|
||||
This will:
|
||||
1. List all available backups in R2
|
||||
2. Let you select which backup to restore
|
||||
3. Download and decompress the backup
|
||||
4. Restore to the database (with confirmation prompt)
|
||||
|
||||
**WARNING**: Restore will drop all existing tables and recreate them from backup!
|
||||
|
||||
## Automated Backups
|
||||
|
||||
### Linux/Mac (Cron)
|
||||
Add to crontab:
|
||||
```bash
|
||||
# Daily backup at 2 AM
|
||||
0 2 * * * cd /path/to/backend && python backup_db.py >> backup.log 2>&1
|
||||
```
|
||||
|
||||
### Windows (Task Scheduler)
|
||||
Create a scheduled task:
|
||||
1. Open Task Scheduler
|
||||
2. Create Basic Task
|
||||
3. Name: "Recipe DB Backup"
|
||||
4. Trigger: Daily at 2:00 AM
|
||||
5. Action: Start a program
|
||||
- Program: `python`
|
||||
- Arguments: `backup_db.py`
|
||||
- Start in: `C:\path\to\backend`
|
||||
|
||||
## Backup File Format
|
||||
Files are named: `recipes_db_YYYYMMDD_HHMMSS.sql.gz`
|
||||
|
||||
Example: `recipes_db_20251221_140530.sql.gz`
|
||||
|
||||
## Storage
|
||||
- Local backups stored in: `backend/backups/`
|
||||
- R2 backups stored in: `my-recipes-db-bkp` bucket
|
||||
- Local backups auto-cleanup (keeps last 3)
|
||||
- R2 backups are never auto-deleted (manual cleanup if needed)
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### pg_dump not found
|
||||
Install PostgreSQL client tools:
|
||||
- **Windows**: Install PostgreSQL and add to PATH
|
||||
- **Linux**: `sudo apt install postgresql-client`
|
||||
- **Mac**: `brew install postgresql`
|
||||
|
||||
### Connection errors
|
||||
Verify database credentials in `.env` file
|
||||
|
||||
### R2 upload errors
|
||||
- Check R2 credentials
|
||||
- Verify bucket exists
|
||||
- Ensure API token has "Edit" permissions
|
||||
164
backend/BACKUP_SYSTEM_COMPLETE.md
Normal file
164
backend/BACKUP_SYSTEM_COMPLETE.md
Normal file
@ -0,0 +1,164 @@
|
||||
# Database Backup System - Complete Setup
|
||||
|
||||
## ✅ What's Been Implemented
|
||||
|
||||
### 1. **Backend API Endpoints** (Admin Only)
|
||||
- `POST /admin/backup` - Trigger manual backup
|
||||
- `GET /admin/backups` - List all available backups
|
||||
- `POST /admin/restore?filename=<name>` - Restore from backup
|
||||
|
||||
### 2. **Frontend Admin Panel**
|
||||
- New "ניהול" (Management) tab in navigation (visible to admin users only)
|
||||
- 🛡️ Admin button in top bar
|
||||
- Full backup management interface:
|
||||
- Create new backups instantly
|
||||
- View all backups with dates and sizes
|
||||
- Restore from any backup with confirmation
|
||||
|
||||
### 3. **Automated Weekly Backups**
|
||||
- Batch script: `run_backup.bat`
|
||||
- Full setup guide: `WEEKLY_BACKUP_SETUP.md`
|
||||
- Configured for Windows Task Scheduler
|
||||
|
||||
## 🚀 How to Use
|
||||
|
||||
### **Manual Backup (Admin User)**
|
||||
|
||||
1. Login with admin account
|
||||
2. Click 🛡️ "ניהול" button in top bar (or use the ניהול tab)
|
||||
3. Click "צור גיבוי חדש" (Create New Backup)
|
||||
4. Backup is created, compressed, and uploaded to R2
|
||||
5. See confirmation toast: "גיבוי נוצר בהצלחה! 📦"
|
||||
|
||||
### **Restore from Backup (Admin User)**
|
||||
|
||||
1. Go to Admin Panel (🛡️ ניהול)
|
||||
2. View all available backups in the table
|
||||
3. Click "שחזר" (Restore) button for desired backup
|
||||
4. Confirm the warning (this will delete current data!)
|
||||
5. Page will refresh automatically after restore
|
||||
|
||||
### **Setup Weekly Automatic Backups**
|
||||
|
||||
Follow the instructions in `WEEKLY_BACKUP_SETUP.md`:
|
||||
|
||||
**Quick Steps:**
|
||||
1. Open Task Scheduler (`Win + R` → `taskschd.msc`)
|
||||
2. Create Task → "Recipe DB Weekly Backup"
|
||||
3. Set trigger: Weekly, Sunday, 2:00 AM
|
||||
4. Set action: Run `C:\Path\To\backend\run_backup.bat`
|
||||
5. Configure to run even when not logged in
|
||||
|
||||
## 📁 Files Created/Modified
|
||||
|
||||
### Backend
|
||||
- ✅ `backup_restore_api.py` - Core backup/restore functions
|
||||
- ✅ `main.py` - Added admin endpoints
|
||||
- ✅ `requirements.txt` - Added boto3 dependency
|
||||
- ✅ `.env` - Added R2 credentials
|
||||
- ✅ `run_backup.bat` - Windows batch script for scheduled tasks
|
||||
- ✅ `BACKUP_README.md` - Complete documentation
|
||||
- ✅ `WEEKLY_BACKUP_SETUP.md` - Task Scheduler setup guide
|
||||
|
||||
### Frontend
|
||||
- ✅ `backupApi.js` - API calls for backup operations
|
||||
- ✅ `components/AdminPanel.jsx` - Admin UI component
|
||||
- ✅ `components/TopBar.jsx` - Added admin button
|
||||
- ✅ `App.jsx` - Added admin view and navigation
|
||||
- ✅ `App.css` - Added admin panel styles
|
||||
|
||||
## 🔐 Security
|
||||
|
||||
- **Admin-only access**: All backup endpoints check `is_admin` flag
|
||||
- **Non-admin users**: Cannot see the admin button or access backup endpoints
|
||||
- **403 Forbidden**: Returned if non-admin tries to access admin endpoints
|
||||
|
||||
## 💾 Backup Details
|
||||
|
||||
### What's Backed Up
|
||||
- Complete PostgreSQL database (recipes_db)
|
||||
- All tables: users, recipes, grocery lists, shares, notifications
|
||||
|
||||
### Backup Process
|
||||
1. Uses `pg_dump` to export database
|
||||
2. Compresses with gzip (typically 80-90% size reduction)
|
||||
3. Uploads to Cloudflare R2 with timestamp
|
||||
4. Filename format: `recipes_db_YYYYMMDD_HHMMSS.sql.gz`
|
||||
5. Local backups auto-cleanup (keeps last 3)
|
||||
|
||||
### Restore Process
|
||||
1. Downloads from R2
|
||||
2. Decompresses file
|
||||
3. **Drops all existing tables** (CASCADE)
|
||||
4. Restores from SQL file
|
||||
5. Cleans up temporary files
|
||||
|
||||
## 🧪 Testing
|
||||
|
||||
### Test Manual Backup
|
||||
```bash
|
||||
cd backend
|
||||
python backup_db.py
|
||||
```
|
||||
|
||||
### Test Manual Restore
|
||||
```bash
|
||||
cd backend
|
||||
python restore_db.py
|
||||
```
|
||||
|
||||
### Test via Web UI
|
||||
1. Login as admin
|
||||
2. Navigate to Admin Panel
|
||||
3. Click "צור גיבוי חדש"
|
||||
4. Check R2 bucket for new file
|
||||
|
||||
## ⚠️ Important Notes
|
||||
|
||||
1. **Restore is destructive**: It deletes ALL current data
|
||||
2. **Admin access required**: Set user's `is_admin = true` in database
|
||||
3. **R2 credentials**: Already configured in `.env`
|
||||
4. **Weekly backups**: Manual setup required (follow WEEKLY_BACKUP_SETUP.md)
|
||||
5. **PostgreSQL tools**: Must have `pg_dump` and `psql` in system PATH
|
||||
|
||||
## 🔧 Troubleshooting
|
||||
|
||||
### "Admin access required" error
|
||||
- Check if user has `is_admin = true` in database
|
||||
- Run: `SELECT username, is_admin FROM users;` in psql
|
||||
|
||||
### Backup fails
|
||||
- Check `backend/backup.log` for errors
|
||||
- Verify R2 credentials in `.env`
|
||||
- Verify database credentials in `.env`
|
||||
- Test: `python backup_db.py` manually
|
||||
|
||||
### Can't see admin button
|
||||
- Verify user's `is_admin` flag in database
|
||||
- Refresh page after changing admin status
|
||||
- Check browser console for errors
|
||||
|
||||
### Scheduled backup doesn't run
|
||||
- Check Task Scheduler → Task History
|
||||
- Verify `run_backup.bat` path is correct
|
||||
- Check `backend/backup.log` for errors
|
||||
- Test batch file manually first
|
||||
|
||||
## 📊 What Admins Can Do
|
||||
|
||||
✅ Create manual backups anytime
|
||||
✅ View all backups with dates and sizes
|
||||
✅ Restore from any backup point
|
||||
✅ See backup history in table format
|
||||
✅ All regular user features (recipes, grocery lists, etc.)
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. **✅ Test the system**: Create a manual backup from Admin Panel
|
||||
2. **📅 Setup weekly backups**: Follow WEEKLY_BACKUP_SETUP.md
|
||||
3. **🔒 Secure admin access**: Only give admin rights to trusted users
|
||||
4. **📝 Document your backup strategy**: When/how often you back up
|
||||
|
||||
---
|
||||
|
||||
**Your database is now protected with automated backups! 🎉**
|
||||
226
backend/CRON_SETUP.md
Normal file
226
backend/CRON_SETUP.md
Normal file
@ -0,0 +1,226 @@
|
||||
# Automated Backup with Cron (Linux/Production)
|
||||
|
||||
## Quick Setup
|
||||
|
||||
### Option 1: Automated Setup (Recommended)
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
chmod +x setup_cron.sh
|
||||
./setup_cron.sh
|
||||
```
|
||||
|
||||
Then select:
|
||||
- **Option 1**: Every 1 minute (for testing)
|
||||
- **Option 2**: Weekly (Sunday 2 AM) - after testing works
|
||||
|
||||
### Option 2: Manual Setup
|
||||
|
||||
#### 1. Make script executable
|
||||
```bash
|
||||
cd backend
|
||||
chmod +x run_backup.sh
|
||||
```
|
||||
|
||||
#### 2. Edit crontab
|
||||
```bash
|
||||
crontab -e
|
||||
```
|
||||
|
||||
#### 3. Add one of these lines:
|
||||
|
||||
**For Testing (every 1 minute):**
|
||||
```bash
|
||||
* * * * * cd /path/to/backend && ./run_backup.sh
|
||||
```
|
||||
|
||||
**For Production (weekly - Sunday 2 AM):**
|
||||
```bash
|
||||
0 2 * * 0 cd /path/to/backend && ./run_backup.sh
|
||||
```
|
||||
|
||||
**For Daily (2 AM):**
|
||||
```bash
|
||||
0 2 * * * cd /path/to/backend && ./run_backup.sh
|
||||
```
|
||||
|
||||
Replace `/path/to/backend` with your actual path, e.g.:
|
||||
```bash
|
||||
* * * * * cd /home/user/my-recipes/backend && ./run_backup.sh
|
||||
```
|
||||
|
||||
#### 4. Save and exit
|
||||
- Press `Ctrl+X`, then `Y`, then `Enter` (if using nano)
|
||||
- Or `:wq` (if using vim)
|
||||
|
||||
## Verify It's Working
|
||||
|
||||
### 1. Check if cron job is installed
|
||||
```bash
|
||||
crontab -l | grep backup
|
||||
```
|
||||
|
||||
### 2. Wait 2-3 minutes (for 1-minute test)
|
||||
|
||||
### 3. Check the log
|
||||
```bash
|
||||
cd backend
|
||||
tail -f backup.log
|
||||
```
|
||||
|
||||
Expected output:
|
||||
```
|
||||
[2025-12-21 15:30:45] Starting backup...
|
||||
[2025-12-21 15:30:47] Creating database dump...
|
||||
[2025-12-21 15:30:49] Compressing file...
|
||||
[2025-12-21 15:30:51] Uploading to R2...
|
||||
[2025-12-21 15:30:53] ✅ Backup completed: recipes_db_20251221_153045.sql.gz
|
||||
```
|
||||
|
||||
### 4. Check R2 bucket
|
||||
- Should see new backup files appearing
|
||||
- Files named: `recipes_db_YYYYMMDD_HHMMSS.sql.gz`
|
||||
|
||||
## Change from Testing to Weekly
|
||||
|
||||
### Method 1: Using setup script
|
||||
```bash
|
||||
cd backend
|
||||
./setup_cron.sh
|
||||
```
|
||||
Select option 2 (Weekly)
|
||||
|
||||
### Method 2: Manual edit
|
||||
```bash
|
||||
crontab -e
|
||||
```
|
||||
|
||||
Change this line:
|
||||
```bash
|
||||
* * * * * cd /path/to/backend && ./run_backup.sh
|
||||
```
|
||||
|
||||
To this:
|
||||
```bash
|
||||
0 2 * * 0 cd /path/to/backend && ./run_backup.sh
|
||||
```
|
||||
|
||||
Save and exit.
|
||||
|
||||
## Cron Schedule Reference
|
||||
|
||||
```
|
||||
* * * * * command
|
||||
│ │ │ │ │
|
||||
│ │ │ │ └─── Day of week (0-7, 0 and 7 are Sunday)
|
||||
│ │ │ └───── Month (1-12)
|
||||
│ │ └─────── Day of month (1-31)
|
||||
│ └───────── Hour (0-23)
|
||||
└─────────── Minute (0-59)
|
||||
```
|
||||
|
||||
**Examples:**
|
||||
- `* * * * *` - Every minute
|
||||
- `0 2 * * *` - Daily at 2:00 AM
|
||||
- `0 2 * * 0` - Weekly on Sunday at 2:00 AM
|
||||
- `0 2 * * 1` - Weekly on Monday at 2:00 AM
|
||||
- `0 */6 * * *` - Every 6 hours
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Cron job not running
|
||||
|
||||
**1. Check cron service is running:**
|
||||
```bash
|
||||
sudo systemctl status cron
|
||||
# or
|
||||
sudo systemctl status crond
|
||||
```
|
||||
|
||||
**2. Check cron logs:**
|
||||
```bash
|
||||
# Ubuntu/Debian
|
||||
grep CRON /var/log/syslog
|
||||
|
||||
# CentOS/RHEL
|
||||
tail -f /var/log/cron
|
||||
```
|
||||
|
||||
**3. Test script manually:**
|
||||
```bash
|
||||
cd backend
|
||||
./run_backup.sh
|
||||
cat backup.log
|
||||
```
|
||||
|
||||
### No backup.log file
|
||||
|
||||
**Check permissions:**
|
||||
```bash
|
||||
ls -la run_backup.sh
|
||||
# Should be: -rwxr-xr-x
|
||||
|
||||
chmod +x run_backup.sh
|
||||
```
|
||||
|
||||
**Test Python script:**
|
||||
```bash
|
||||
cd backend
|
||||
python3 backup_db.py
|
||||
```
|
||||
|
||||
### Script runs but backup fails
|
||||
|
||||
**Check backup.log for errors:**
|
||||
```bash
|
||||
cat backup.log
|
||||
```
|
||||
|
||||
Common issues:
|
||||
- Database credentials wrong (check `.env`)
|
||||
- R2 credentials wrong (check `.env`)
|
||||
- `pg_dump` not installed: `sudo apt install postgresql-client`
|
||||
- Python packages missing: `pip install boto3`
|
||||
|
||||
## Remove Cron Job
|
||||
|
||||
```bash
|
||||
crontab -e
|
||||
```
|
||||
|
||||
Delete the line with `run_backup.sh`, save and exit.
|
||||
|
||||
## Docker/Container Environment
|
||||
|
||||
If running in Docker, add to your docker-compose.yml or Dockerfile:
|
||||
|
||||
```yaml
|
||||
# docker-compose.yml
|
||||
services:
|
||||
backend:
|
||||
# ... other config
|
||||
command: >
|
||||
sh -c "
|
||||
echo '0 2 * * 0 cd /app && python backup_db.py >> backup.log 2>&1' | crontab - &&
|
||||
crond -f -l 2
|
||||
"
|
||||
```
|
||||
|
||||
Or use a separate container with a cron image.
|
||||
|
||||
## Production Recommendations
|
||||
|
||||
1. **Use weekly backups** - Daily can consume too much storage
|
||||
2. **Monitor logs** - Set up log monitoring/alerts
|
||||
3. **Test restore** - Periodically test restoring from backups
|
||||
4. **Set up retention** - Automatically delete old backups (not implemented yet)
|
||||
5. **Use separate backup server** - Don't backup to same server as database
|
||||
|
||||
## Success Checklist
|
||||
|
||||
- ✅ `run_backup.sh` is executable
|
||||
- ✅ Cron job is installed (`crontab -l` shows it)
|
||||
- ✅ Test run completed successfully
|
||||
- ✅ `backup.log` shows successful backup
|
||||
- ✅ R2 bucket contains backup files
|
||||
- ✅ Changed from 1-minute to weekly schedule
|
||||
@ -5,10 +5,11 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# deps for psycopg2
|
||||
# deps for psycopg2 and pg_dump/psql for backups
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
libpq-dev \
|
||||
postgresql-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY requirements.txt .
|
||||
@ -18,4 +19,4 @@ COPY . .
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
21
backend/MIGRATION_INSTRUCTIONS.md
Normal file
21
backend/MIGRATION_INSTRUCTIONS.md
Normal file
@ -0,0 +1,21 @@
|
||||
# Database Migration Instructions
|
||||
|
||||
## Add auth_provider column to users table
|
||||
|
||||
Run this command in your backend directory:
|
||||
|
||||
```bash
|
||||
# Windows (PowerShell)
|
||||
$env:PGPASSWORD="recipes_password"; psql -h localhost -U recipes_user -d recipes_db -f add_auth_provider_column.sql
|
||||
|
||||
# Or using psql directly
|
||||
psql -h localhost -U recipes_user -d recipes_db -f add_auth_provider_column.sql
|
||||
```
|
||||
|
||||
This will:
|
||||
1. Add the `auth_provider` column to the users table (default: 'local')
|
||||
2. Update all existing users to have 'local' as their auth_provider
|
||||
3. Create an index for faster lookups
|
||||
4. Display the updated table structure
|
||||
|
||||
After running the migration, restart your backend server.
|
||||
142
backend/TEST_BACKUP_SCHEDULE.md
Normal file
142
backend/TEST_BACKUP_SCHEDULE.md
Normal file
@ -0,0 +1,142 @@
|
||||
# Test Scheduled Backup - 1 Minute Setup
|
||||
|
||||
## Quick Test Setup (1 Minute Interval)
|
||||
|
||||
### Step 1: Open Task Scheduler
|
||||
1. Press `Win + R`
|
||||
2. Type `taskschd.msc`
|
||||
3. Press Enter
|
||||
|
||||
### Step 2: Create Test Task
|
||||
1. Click **"Create Task"** (not Basic Task)
|
||||
2. **General Tab:**
|
||||
- Name: `Recipe DB Backup TEST`
|
||||
- Description: `Test backup every 1 minute`
|
||||
- Select "Run whether user is logged on or not"
|
||||
- Check "Run with highest privileges"
|
||||
|
||||
### Step 3: Set 1-Minute Trigger
|
||||
1. Go to **Triggers** tab
|
||||
2. Click "New..."
|
||||
3. Configure:
|
||||
- Begin the task: `On a schedule`
|
||||
- Settings: `Daily`
|
||||
- Recur every: `1 days`
|
||||
- Start: Set to current time (e.g., if it's 3:00 PM, set to 3:00 PM)
|
||||
- Check "Repeat task every": `1 minute`
|
||||
- For a duration of: `1 hour`
|
||||
- Check "Enabled"
|
||||
4. Click OK
|
||||
|
||||
### Step 4: Set Action
|
||||
1. Go to **Actions** tab
|
||||
2. Click "New..."
|
||||
3. Configure:
|
||||
- Action: `Start a program`
|
||||
- Program/script: Browse to `run_backup.bat` or paste full path:
|
||||
```
|
||||
C:\Users\dvirl\OneDrive\Desktop\gitea\my-recipes\backend\run_backup.bat
|
||||
```
|
||||
- Start in:
|
||||
```
|
||||
C:\Users\dvirl\OneDrive\Desktop\gitea\my-recipes\backend
|
||||
```
|
||||
4. Click OK
|
||||
|
||||
### Step 5: Settings
|
||||
1. Go to **Conditions** tab:
|
||||
- Uncheck "Start the task only if the computer is on AC power"
|
||||
|
||||
2. Go to **Settings** tab:
|
||||
- Check "Run task as soon as possible after a scheduled start is missed"
|
||||
- Check "If the task fails, restart every: 1 minutes"
|
||||
- Attempt to restart up to: `3 times`
|
||||
- Check "Stop the task if it runs longer than: 30 minutes"
|
||||
|
||||
3. Click OK
|
||||
4. Enter your Windows password when prompted
|
||||
|
||||
### Step 6: Monitor Test Results
|
||||
|
||||
**Check the backup log:**
|
||||
```bash
|
||||
cd C:\Users\dvirl\OneDrive\Desktop\gitea\my-recipes\backend
|
||||
type backup.log
|
||||
```
|
||||
|
||||
**Or open in Notepad:**
|
||||
- Navigate to `backend\backup.log`
|
||||
- Should see new entries every minute
|
||||
|
||||
**Check R2 bucket:**
|
||||
- Login to Cloudflare Dashboard
|
||||
- Go to R2 → my-recipes-db-bkp
|
||||
- Should see new backup files appearing every minute
|
||||
|
||||
### Expected Log Output
|
||||
```
|
||||
[2025-12-21 15:30:45] Starting backup...
|
||||
[2025-12-21 15:30:47] Creating database dump...
|
||||
[2025-12-21 15:30:49] Compressing file...
|
||||
[2025-12-21 15:30:51] Uploading to R2...
|
||||
[2025-12-21 15:30:53] Backup completed: recipes_db_20251221_153045.sql.gz
|
||||
[2025-12-21 15:30:53] Size: 2.5 MB
|
||||
```
|
||||
|
||||
### Verify It's Working
|
||||
|
||||
Wait 2-3 minutes and check:
|
||||
1. ✅ `backend\backup.log` has multiple entries
|
||||
2. ✅ R2 bucket has new backup files
|
||||
3. ✅ Task Scheduler shows "Last Run Result: (0x0)" = Success
|
||||
|
||||
### If It Works - Convert to Weekly
|
||||
|
||||
1. Open Task Scheduler
|
||||
2. Find "Recipe DB Backup TEST"
|
||||
3. Right-click → Properties
|
||||
4. Go to **Triggers** tab
|
||||
5. Edit the trigger:
|
||||
- Settings: Change to `Weekly`
|
||||
- Recur every: `1 weeks`
|
||||
- Days: Select `Sunday` (or your preferred day)
|
||||
- Time: `02:00:00` (2 AM)
|
||||
- **Uncheck** "Repeat task every"
|
||||
6. Click OK
|
||||
7. **Rename task**: Right-click → Rename → "Recipe DB Weekly Backup"
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
**No backup.log file:**
|
||||
- Task might not be running
|
||||
- Check Task Scheduler History tab
|
||||
- Run `run_backup.bat` manually first
|
||||
|
||||
**backup.log shows errors:**
|
||||
- Check if Python is in PATH
|
||||
- Verify database credentials in `.env`
|
||||
- Verify R2 credentials in `.env`
|
||||
|
||||
**Task shows "Could not start":**
|
||||
- Verify the paths are correct
|
||||
- Make sure you entered Windows password
|
||||
- Try "Run" button in Task Scheduler manually
|
||||
|
||||
**Want to stop test:**
|
||||
- Right-click task → Disable
|
||||
- Or delete the task
|
||||
|
||||
### Manual Test First
|
||||
|
||||
Before setting up Task Scheduler, test manually:
|
||||
```bash
|
||||
cd C:\Users\dvirl\OneDrive\Desktop\gitea\my-recipes\backend
|
||||
run_backup.bat
|
||||
```
|
||||
|
||||
Check if:
|
||||
1. backup.log is created
|
||||
2. Backup appears in R2
|
||||
3. No errors in log
|
||||
|
||||
If manual test works, Task Scheduler will work too!
|
||||
131
backend/WEEKLY_BACKUP_SETUP.md
Normal file
131
backend/WEEKLY_BACKUP_SETUP.md
Normal file
@ -0,0 +1,131 @@
|
||||
# Weekly Backup Setup - Windows Task Scheduler
|
||||
|
||||
This guide will help you set up automatic weekly backups of your database.
|
||||
|
||||
## Setup Instructions
|
||||
|
||||
### 1. Create Batch Script
|
||||
|
||||
Create a file `run_backup.bat` in the `backend` folder:
|
||||
|
||||
```batch
|
||||
@echo off
|
||||
cd /d "%~dp0"
|
||||
python backup_db.py >> backup.log 2>&1
|
||||
```
|
||||
|
||||
### 2. Open Task Scheduler
|
||||
|
||||
1. Press `Win + R`
|
||||
2. Type `taskschd.msc`
|
||||
3. Press Enter
|
||||
|
||||
### 3. Create New Task
|
||||
|
||||
1. Click "Create Task" (not "Create Basic Task")
|
||||
2. In **General** tab:
|
||||
- Name: `Recipe DB Weekly Backup`
|
||||
- Description: `Automatic weekly database backup to Cloudflare R2`
|
||||
- Select "Run whether user is logged on or not"
|
||||
- Check "Run with highest privileges"
|
||||
|
||||
### 4. Configure Trigger
|
||||
|
||||
1. Go to **Triggers** tab
|
||||
2. Click "New..."
|
||||
3. Configure:
|
||||
- Begin the task: `On a schedule`
|
||||
- Settings: `Weekly`
|
||||
- Recur every: `1 weeks`
|
||||
- Days: Select `Sunday` (or your preferred day)
|
||||
- Time: `02:00:00` (2 AM)
|
||||
- Check "Enabled"
|
||||
4. Click OK
|
||||
|
||||
### 5. Configure Action
|
||||
|
||||
1. Go to **Actions** tab
|
||||
2. Click "New..."
|
||||
3. Configure:
|
||||
- Action: `Start a program`
|
||||
- Program/script: `C:\Path\To\backend\run_backup.bat`
|
||||
*(Replace with your actual path)*
|
||||
- Start in: `C:\Path\To\backend\`
|
||||
*(Replace with your actual path)*
|
||||
4. Click OK
|
||||
|
||||
### 6. Additional Settings
|
||||
|
||||
1. Go to **Conditions** tab:
|
||||
- Uncheck "Start the task only if the computer is on AC power"
|
||||
|
||||
2. Go to **Settings** tab:
|
||||
- Check "Run task as soon as possible after a scheduled start is missed"
|
||||
- If the task fails, restart every: `10 minutes`
|
||||
- Attempt to restart up to: `3 times`
|
||||
|
||||
3. Click OK
|
||||
|
||||
### 7. Enter Password
|
||||
|
||||
- You'll be prompted to enter your Windows password
|
||||
- This allows the task to run even when you're not logged in
|
||||
|
||||
## Verify Setup
|
||||
|
||||
### Test the Task
|
||||
|
||||
1. In Task Scheduler, find your task
|
||||
2. Right-click → "Run"
|
||||
3. Check `backend/backup.log` for results
|
||||
|
||||
### View Scheduled Runs
|
||||
|
||||
- In Task Scheduler, select your task
|
||||
- Check the "History" tab to see past runs
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Task doesn't run
|
||||
|
||||
- Check Task Scheduler → Task History for errors
|
||||
- Verify Python is in system PATH
|
||||
- Try running `run_backup.bat` manually first
|
||||
|
||||
### No log file created
|
||||
|
||||
- Check file permissions in backend folder
|
||||
- Verify the "Start in" path is correct
|
||||
|
||||
### Backup fails
|
||||
|
||||
- Check `backend/backup.log` for error messages
|
||||
- Verify database credentials in `.env`
|
||||
- Verify R2 credentials in `.env`
|
||||
- Test by running `python backup_db.py` manually
|
||||
|
||||
## Change Backup Schedule
|
||||
|
||||
1. Open Task Scheduler
|
||||
2. Find "Recipe DB Weekly Backup"
|
||||
3. Right-click → Properties
|
||||
4. Go to Triggers tab
|
||||
5. Edit the trigger to change day/time
|
||||
6. Click OK
|
||||
|
||||
## Disable Automatic Backups
|
||||
|
||||
1. Open Task Scheduler
|
||||
2. Find "Recipe DB Weekly Backup"
|
||||
3. Right-click → Disable
|
||||
|
||||
## View Backup Log
|
||||
|
||||
Check `backend/backup.log` to see backup history:
|
||||
|
||||
```batch
|
||||
cd backend
|
||||
type backup.log
|
||||
```
|
||||
|
||||
Or open it in Notepad.
|
||||
BIN
backend/__pycache__/auth_utils.cpython-312.pyc
Normal file
BIN
backend/__pycache__/auth_utils.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/__pycache__/auth_utils.cpython-313.pyc
Normal file
BIN
backend/__pycache__/auth_utils.cpython-313.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
backend/__pycache__/email_utils.cpython-312.pyc
Normal file
BIN
backend/__pycache__/email_utils.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/__pycache__/grocery_db_utils.cpython-312.pyc
Normal file
BIN
backend/__pycache__/grocery_db_utils.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/__pycache__/grocery_db_utils.cpython-313.pyc
Normal file
BIN
backend/__pycache__/grocery_db_utils.cpython-313.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
backend/__pycache__/notification_db_utils.cpython-312.pyc
Normal file
BIN
backend/__pycache__/notification_db_utils.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/__pycache__/notification_db_utils.cpython-313.pyc
Normal file
BIN
backend/__pycache__/notification_db_utils.cpython-313.pyc
Normal file
Binary file not shown.
BIN
backend/__pycache__/oauth_utils.cpython-312.pyc
Normal file
BIN
backend/__pycache__/oauth_utils.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/__pycache__/user_db_utils.cpython-312.pyc
Normal file
BIN
backend/__pycache__/user_db_utils.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/__pycache__/user_db_utils.cpython-313.pyc
Normal file
BIN
backend/__pycache__/user_db_utils.cpython-313.pyc
Normal file
Binary file not shown.
30
backend/add_auth_provider_column.sql
Normal file
30
backend/add_auth_provider_column.sql
Normal file
@ -0,0 +1,30 @@
|
||||
-- Add auth_provider column to users table
|
||||
-- This tracks whether the user is local or uses OAuth (google, microsoft, etc.)
|
||||
|
||||
-- Add the column if it doesn't exist
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.columns
|
||||
WHERE table_name='users' AND column_name='auth_provider'
|
||||
) THEN
|
||||
ALTER TABLE users ADD COLUMN auth_provider VARCHAR(50) DEFAULT 'local' NOT NULL;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Update existing users to have 'local' as their auth_provider
|
||||
UPDATE users SET auth_provider = 'local' WHERE auth_provider IS NULL;
|
||||
|
||||
-- Create index for faster lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_users_auth_provider ON users(auth_provider);
|
||||
|
||||
-- Display the updated users table structure
|
||||
SELECT
|
||||
column_name,
|
||||
data_type,
|
||||
is_nullable,
|
||||
column_default
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'users'
|
||||
ORDER BY ordinal_position;
|
||||
5
backend/add_is_pinned_column.sql
Normal file
5
backend/add_is_pinned_column.sql
Normal file
@ -0,0 +1,5 @@
|
||||
-- Add is_pinned column to grocery_lists table
|
||||
ALTER TABLE grocery_lists ADD COLUMN IF NOT EXISTS is_pinned BOOLEAN DEFAULT FALSE;
|
||||
|
||||
-- Verify the column was added
|
||||
\d grocery_lists
|
||||
96
backend/auth_utils.py
Normal file
96
backend/auth_utils.py
Normal file
@ -0,0 +1,96 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
from jose import JWTError, jwt
|
||||
import bcrypt
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
import os
|
||||
|
||||
# Secret key for JWT (use environment variable in production)
|
||||
SECRET_KEY = os.getenv("SECRET_KEY", "your-secret-key-change-in-production")
|
||||
ALGORITHM = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 24 * 7 # 7 days
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
"""Hash a password for storing."""
|
||||
# Bcrypt has a 72 byte limit, truncate if necessary
|
||||
password_bytes = password.encode('utf-8')[:72]
|
||||
salt = bcrypt.gensalt()
|
||||
hashed = bcrypt.hashpw(password_bytes, salt)
|
||||
return hashed.decode('utf-8')
|
||||
|
||||
|
||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||
"""Verify a stored password against one provided by user"""
|
||||
# Bcrypt has a 72 byte limit, truncate if necessary
|
||||
password_bytes = plain_password.encode('utf-8')[:72]
|
||||
hashed_bytes = hashed_password.encode('utf-8')
|
||||
return bcrypt.checkpw(password_bytes, hashed_bytes)
|
||||
|
||||
|
||||
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
|
||||
"""Create JWT access token"""
|
||||
to_encode = data.copy()
|
||||
if expires_delta:
|
||||
expire = datetime.utcnow() + expires_delta
|
||||
else:
|
||||
expire = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
to_encode.update({"exp": expire})
|
||||
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||
return encoded_jwt
|
||||
|
||||
|
||||
def decode_token(token: str) -> dict:
|
||||
"""Decode and verify JWT token"""
|
||||
try:
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
return payload
|
||||
except JWTError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid authentication credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
|
||||
def get_current_user(credentials: HTTPAuthorizationCredentials = Depends(security)) -> dict:
|
||||
"""Get current user from JWT token (for protected routes)"""
|
||||
from user_db_utils import get_user_by_id
|
||||
|
||||
token = credentials.credentials
|
||||
payload = decode_token(token)
|
||||
user_id = payload.get("sub")
|
||||
if user_id is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid authentication credentials",
|
||||
)
|
||||
|
||||
# Get full user info from database to include is_admin
|
||||
user = get_user_by_id(int(user_id))
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User not found",
|
||||
)
|
||||
|
||||
return {
|
||||
"user_id": user["id"],
|
||||
"username": user["username"],
|
||||
"display_name": user["display_name"],
|
||||
"is_admin": user.get("is_admin", False)
|
||||
}
|
||||
|
||||
|
||||
# Optional dependency - returns None if no token provided
|
||||
def get_current_user_optional(credentials: Optional[HTTPAuthorizationCredentials] = Depends(security)) -> Optional[dict]:
|
||||
"""Get current user if authenticated, otherwise None"""
|
||||
if not credentials:
|
||||
return None
|
||||
try:
|
||||
return get_current_user(credentials)
|
||||
except HTTPException:
|
||||
return None
|
||||
213
backend/backup_db.py
Normal file
213
backend/backup_db.py
Normal file
@ -0,0 +1,213 @@
|
||||
"""
|
||||
Database backup script for R2 storage
|
||||
Exports PostgreSQL database, compresses it, and uploads to Cloudflare R2
|
||||
"""
|
||||
import os
|
||||
import subprocess
|
||||
import gzip
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
import boto3
|
||||
from botocore.config import Config
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# R2 Configuration
|
||||
R2_ENDPOINT = os.getenv("R2_ENDPOINT")
|
||||
R2_ACCESS_KEY = os.getenv("R2_ACCESS_KEY")
|
||||
R2_SECRET_KEY = os.getenv("R2_SECRET_KEY")
|
||||
R2_BUCKET = os.getenv("R2_BUCKET")
|
||||
|
||||
# Database Configuration
|
||||
DB_HOST = os.getenv("DB_HOST", "localhost")
|
||||
DB_PORT = os.getenv("DB_PORT", "5432")
|
||||
DB_NAME = os.getenv("DB_NAME", "recipes_db")
|
||||
DB_USER = os.getenv("DB_USER", "recipes_user")
|
||||
DB_PASSWORD = os.getenv("DB_PASSWORD", "recipes_password")
|
||||
|
||||
# Backup directory
|
||||
BACKUP_DIR = Path(__file__).parent / "backups"
|
||||
BACKUP_DIR.mkdir(exist_ok=True)
|
||||
|
||||
|
||||
def create_db_dump():
|
||||
"""Create PostgreSQL database dump"""
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
dump_file = BACKUP_DIR / f"recipes_db_{timestamp}.sql"
|
||||
|
||||
print(f"Creating database dump: {dump_file}")
|
||||
|
||||
# Set PGPASSWORD environment variable for pg_dump
|
||||
env = os.environ.copy()
|
||||
env['PGPASSWORD'] = DB_PASSWORD
|
||||
|
||||
# Run pg_dump
|
||||
cmd = [
|
||||
"pg_dump",
|
||||
"-h", DB_HOST,
|
||||
"-p", DB_PORT,
|
||||
"-U", DB_USER,
|
||||
"-d", DB_NAME,
|
||||
"-f", str(dump_file),
|
||||
"--no-owner", # Don't include ownership commands
|
||||
"--no-acl", # Don't include access privileges
|
||||
]
|
||||
|
||||
try:
|
||||
subprocess.run(cmd, env=env, check=True, capture_output=True, text=True)
|
||||
print(f"✓ Database dump created: {dump_file}")
|
||||
return dump_file
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"✗ Error creating database dump: {e.stderr}")
|
||||
raise
|
||||
|
||||
|
||||
def compress_file(file_path):
|
||||
"""Compress file using gzip"""
|
||||
compressed_file = Path(str(file_path) + ".gz")
|
||||
|
||||
print(f"Compressing {file_path.name}...")
|
||||
|
||||
with open(file_path, 'rb') as f_in:
|
||||
with gzip.open(compressed_file, 'wb', compresslevel=9) as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
|
||||
# Remove uncompressed file
|
||||
file_path.unlink()
|
||||
|
||||
# Get compression ratio
|
||||
original_size = file_path.stat().st_size if file_path.exists() else 0
|
||||
compressed_size = compressed_file.stat().st_size
|
||||
ratio = (1 - compressed_size / max(original_size, 1)) * 100 if original_size > 0 else 0
|
||||
|
||||
print(f"✓ Compressed to {compressed_file.name}")
|
||||
print(f" Original: {original_size / 1024:.2f} KB")
|
||||
print(f" Compressed: {compressed_size / 1024:.2f} KB")
|
||||
print(f" Ratio: {ratio:.1f}% reduction")
|
||||
|
||||
return compressed_file
|
||||
|
||||
|
||||
def upload_to_r2(file_path):
|
||||
"""Upload file to Cloudflare R2"""
|
||||
print(f"Uploading {file_path.name} to R2...")
|
||||
|
||||
# Configure S3 client for R2
|
||||
s3_client = boto3.client(
|
||||
's3',
|
||||
endpoint_url=R2_ENDPOINT,
|
||||
aws_access_key_id=R2_ACCESS_KEY,
|
||||
aws_secret_access_key=R2_SECRET_KEY,
|
||||
config=Config(
|
||||
signature_version='s3v4',
|
||||
s3={'addressing_style': 'path'}
|
||||
)
|
||||
)
|
||||
|
||||
# Upload file
|
||||
try:
|
||||
s3_client.upload_file(
|
||||
str(file_path),
|
||||
R2_BUCKET,
|
||||
file_path.name,
|
||||
ExtraArgs={
|
||||
'Metadata': {
|
||||
'backup-date': datetime.now().isoformat(),
|
||||
'db-name': DB_NAME,
|
||||
}
|
||||
}
|
||||
)
|
||||
print(f"✓ Uploaded to R2: s3://{R2_BUCKET}/{file_path.name}")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"✗ Error uploading to R2: {e}")
|
||||
raise
|
||||
|
||||
|
||||
def list_r2_backups():
|
||||
"""List all backups in R2 bucket"""
|
||||
print(f"\nListing backups in R2 bucket: {R2_BUCKET}")
|
||||
|
||||
s3_client = boto3.client(
|
||||
's3',
|
||||
endpoint_url=R2_ENDPOINT,
|
||||
aws_access_key_id=R2_ACCESS_KEY,
|
||||
aws_secret_access_key=R2_SECRET_KEY,
|
||||
config=Config(
|
||||
signature_version='s3v4',
|
||||
s3={'addressing_style': 'path'}
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
response = s3_client.list_objects_v2(Bucket=R2_BUCKET)
|
||||
|
||||
if 'Contents' not in response:
|
||||
print("No backups found")
|
||||
return
|
||||
|
||||
print(f"\nFound {len(response['Contents'])} backup(s):")
|
||||
for obj in sorted(response['Contents'], key=lambda x: x['LastModified'], reverse=True):
|
||||
size_mb = obj['Size'] / (1024 * 1024)
|
||||
print(f" - {obj['Key']}")
|
||||
print(f" Size: {size_mb:.2f} MB")
|
||||
print(f" Date: {obj['LastModified']}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Error listing backups: {e}")
|
||||
|
||||
|
||||
def cleanup_old_local_backups(keep_last=3):
|
||||
"""Keep only the last N local backups"""
|
||||
backups = sorted(BACKUP_DIR.glob("*.sql.gz"), key=lambda x: x.stat().st_mtime, reverse=True)
|
||||
|
||||
if len(backups) > keep_last:
|
||||
print(f"\nCleaning up old local backups (keeping last {keep_last})...")
|
||||
for backup in backups[keep_last:]:
|
||||
print(f" Removing: {backup.name}")
|
||||
backup.unlink()
|
||||
|
||||
|
||||
def main():
|
||||
"""Main backup process"""
|
||||
print("=" * 60)
|
||||
print("Database Backup to Cloudflare R2")
|
||||
print("=" * 60)
|
||||
print()
|
||||
|
||||
try:
|
||||
# Verify R2 credentials
|
||||
if not all([R2_ENDPOINT, R2_ACCESS_KEY, R2_SECRET_KEY, R2_BUCKET]):
|
||||
raise ValueError("Missing R2 credentials in environment variables")
|
||||
|
||||
# Create database dump
|
||||
dump_file = create_db_dump()
|
||||
|
||||
# Compress the dump
|
||||
compressed_file = compress_file(dump_file)
|
||||
|
||||
# Upload to R2
|
||||
upload_to_r2(compressed_file)
|
||||
|
||||
# List all backups
|
||||
list_r2_backups()
|
||||
|
||||
# Cleanup old local backups
|
||||
cleanup_old_local_backups(keep_last=3)
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("✓ Backup completed successfully!")
|
||||
print("=" * 60)
|
||||
|
||||
except Exception as e:
|
||||
print("\n" + "=" * 60)
|
||||
print(f"✗ Backup failed: {e}")
|
||||
print("=" * 60)
|
||||
raise
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
277
backend/backup_restore_api.py
Normal file
277
backend/backup_restore_api.py
Normal file
@ -0,0 +1,277 @@
|
||||
"""
|
||||
Backup and Restore API endpoints for database management.
|
||||
Admin-only access required.
|
||||
"""
|
||||
import os
|
||||
import subprocess
|
||||
import gzip
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
import boto3
|
||||
from botocore.config import Config
|
||||
from botocore.exceptions import ClientError
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
def get_environment() -> str:
|
||||
"""Detect environment based on FRONTEND_URL or deployment environment"""
|
||||
# Check FRONTEND_URL first
|
||||
frontend_url = os.getenv('FRONTEND_URL', '')
|
||||
if 'myrecipes.dvirlabs.com' in frontend_url or 'my-recipes.dvirlabs.com' in frontend_url:
|
||||
return 'prod'
|
||||
|
||||
# Check if explicitly set via ENV variable
|
||||
env_var = os.getenv('ENVIRONMENT', '').lower()
|
||||
if env_var in ['prod', 'production']:
|
||||
return 'prod'
|
||||
|
||||
# Check database host (if not localhost, likely production)
|
||||
db_host = os.getenv('DB_HOST', 'localhost')
|
||||
if db_host not in ['localhost', '127.0.0.1', 'postgres']:
|
||||
return 'prod'
|
||||
|
||||
# Default to dev
|
||||
return 'dev'
|
||||
|
||||
|
||||
def get_r2_bucket() -> str:
|
||||
"""Get R2 bucket name based on environment"""
|
||||
env = get_environment()
|
||||
return f'my-recipes-db-bkp-{env}'
|
||||
|
||||
|
||||
def get_r2_client():
|
||||
"""Get configured R2 client"""
|
||||
return boto3.client(
|
||||
's3',
|
||||
endpoint_url=os.getenv('R2_ENDPOINT'),
|
||||
aws_access_key_id=os.getenv('R2_ACCESS_KEY'),
|
||||
aws_secret_access_key=os.getenv('R2_SECRET_KEY'),
|
||||
config=Config(
|
||||
signature_version='s3v4',
|
||||
s3={'addressing_style': 'path'}
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def create_db_dump() -> str:
|
||||
"""Create a database dump file"""
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
env = get_environment()
|
||||
backup_dir = os.path.join(os.path.dirname(__file__), 'backups')
|
||||
os.makedirs(backup_dir, exist_ok=True)
|
||||
|
||||
dump_file = os.path.join(backup_dir, f'recipes_db_{env}_{timestamp}.sql')
|
||||
|
||||
db_host = os.getenv('DB_HOST', 'localhost')
|
||||
db_port = os.getenv('DB_PORT', '5432')
|
||||
db_name = os.getenv('DB_NAME', 'recipes_db')
|
||||
db_user = os.getenv('DB_USER', 'postgres')
|
||||
db_password = os.getenv('DB_PASSWORD', 'postgres')
|
||||
|
||||
env = os.environ.copy()
|
||||
env['PGPASSWORD'] = db_password
|
||||
|
||||
cmd = [
|
||||
'pg_dump',
|
||||
'-h', db_host,
|
||||
'-p', db_port,
|
||||
'-U', db_user,
|
||||
'-d', db_name,
|
||||
'--no-owner',
|
||||
'--no-acl',
|
||||
'-f', dump_file
|
||||
]
|
||||
|
||||
result = subprocess.run(cmd, env=env, capture_output=True, text=True)
|
||||
|
||||
if result.returncode != 0:
|
||||
raise Exception(f"pg_dump failed: {result.stderr}")
|
||||
|
||||
return dump_file
|
||||
|
||||
|
||||
def compress_file(file_path: str) -> str:
|
||||
"""Compress a file with gzip"""
|
||||
compressed_path = f"{file_path}.gz"
|
||||
|
||||
with open(file_path, 'rb') as f_in:
|
||||
with gzip.open(compressed_path, 'wb', compresslevel=9) as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
|
||||
os.remove(file_path)
|
||||
return compressed_path
|
||||
|
||||
|
||||
def upload_to_r2(file_path: str) -> str:
|
||||
"""Upload file to R2"""
|
||||
s3_client = get_r2_client()
|
||||
bucket_name = get_r2_bucket()
|
||||
file_name = os.path.basename(file_path)
|
||||
|
||||
try:
|
||||
s3_client.upload_file(file_path, bucket_name, file_name)
|
||||
return file_name
|
||||
except ClientError as e:
|
||||
raise Exception(f"R2 upload failed: {str(e)}")
|
||||
|
||||
|
||||
def list_r2_backups() -> List[dict]:
|
||||
"""List all backups in R2"""
|
||||
s3_client = get_r2_client()
|
||||
bucket_name = get_r2_bucket()
|
||||
|
||||
try:
|
||||
response = s3_client.list_objects_v2(Bucket=bucket_name)
|
||||
|
||||
if 'Contents' not in response:
|
||||
return []
|
||||
|
||||
backups = []
|
||||
for obj in response['Contents']:
|
||||
backups.append({
|
||||
'filename': obj['Key'],
|
||||
'size': obj['Size'],
|
||||
'last_modified': obj['LastModified'].isoformat()
|
||||
})
|
||||
|
||||
backups.sort(key=lambda x: x['last_modified'], reverse=True)
|
||||
return backups
|
||||
|
||||
except ClientError as e:
|
||||
raise Exception(f"Failed to list R2 backups: {str(e)}")
|
||||
|
||||
|
||||
def download_from_r2(filename: str) -> str:
|
||||
"""Download a backup from R2"""
|
||||
s3_client = get_r2_client()
|
||||
bucket_name = get_r2_bucket()
|
||||
|
||||
backup_dir = os.path.join(os.path.dirname(__file__), 'backups')
|
||||
os.makedirs(backup_dir, exist_ok=True)
|
||||
|
||||
local_path = os.path.join(backup_dir, filename)
|
||||
|
||||
try:
|
||||
s3_client.download_file(bucket_name, filename, local_path)
|
||||
return local_path
|
||||
except ClientError as e:
|
||||
raise Exception(f"R2 download failed: {str(e)}")
|
||||
|
||||
|
||||
def decompress_file(compressed_path: str) -> str:
|
||||
"""Decompress a gzipped file"""
|
||||
if not compressed_path.endswith('.gz'):
|
||||
raise ValueError("File must be gzipped (.gz)")
|
||||
|
||||
decompressed_path = compressed_path[:-3]
|
||||
|
||||
with gzip.open(compressed_path, 'rb') as f_in:
|
||||
with open(decompressed_path, 'wb') as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
|
||||
return decompressed_path
|
||||
|
||||
|
||||
def restore_database(sql_file: str) -> None:
|
||||
"""Restore database from SQL file"""
|
||||
db_host = os.getenv('DB_HOST', 'localhost')
|
||||
db_port = os.getenv('DB_PORT', '5432')
|
||||
db_name = os.getenv('DB_NAME', 'recipes_db')
|
||||
db_user = os.getenv('DB_USER', 'postgres')
|
||||
db_password = os.getenv('DB_PASSWORD', 'postgres')
|
||||
|
||||
env = os.environ.copy()
|
||||
env['PGPASSWORD'] = db_password
|
||||
|
||||
# Drop all tables first
|
||||
drop_cmd = [
|
||||
'psql',
|
||||
'-h', db_host,
|
||||
'-p', db_port,
|
||||
'-U', db_user,
|
||||
'-d', db_name,
|
||||
'-c', 'DROP SCHEMA public CASCADE; CREATE SCHEMA public;'
|
||||
]
|
||||
|
||||
drop_result = subprocess.run(drop_cmd, env=env, capture_output=True, text=True)
|
||||
|
||||
if drop_result.returncode != 0:
|
||||
raise Exception(f"Failed to drop schema: {drop_result.stderr}")
|
||||
|
||||
# Restore from backup
|
||||
restore_cmd = [
|
||||
'psql',
|
||||
'-h', db_host,
|
||||
'-p', db_port,
|
||||
'-U', db_user,
|
||||
'-d', db_name,
|
||||
'-f', sql_file
|
||||
]
|
||||
|
||||
restore_result = subprocess.run(restore_cmd, env=env, capture_output=True, text=True)
|
||||
|
||||
if restore_result.returncode != 0:
|
||||
raise Exception(f"Database restore failed: {restore_result.stderr}")
|
||||
|
||||
|
||||
def perform_backup() -> dict:
|
||||
"""Perform complete backup process"""
|
||||
try:
|
||||
# Create dump
|
||||
dump_file = create_db_dump()
|
||||
|
||||
# Compress
|
||||
compressed_file = compress_file(dump_file)
|
||||
|
||||
# Upload to R2
|
||||
r2_filename = upload_to_r2(compressed_file)
|
||||
|
||||
# Get file size
|
||||
file_size = os.path.getsize(compressed_file)
|
||||
|
||||
# Clean up local file
|
||||
os.remove(compressed_file)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'filename': r2_filename,
|
||||
'size': file_size,
|
||||
'timestamp': datetime.now().isoformat()
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
|
||||
def perform_restore(filename: str) -> dict:
|
||||
"""Perform complete restore process"""
|
||||
try:
|
||||
# Download from R2
|
||||
compressed_file = download_from_r2(filename)
|
||||
|
||||
# Decompress
|
||||
sql_file = decompress_file(compressed_file)
|
||||
|
||||
# Restore database
|
||||
restore_database(sql_file)
|
||||
|
||||
# Clean up
|
||||
os.remove(compressed_file)
|
||||
os.remove(sql_file)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'filename': filename,
|
||||
'timestamp': datetime.now().isoformat()
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
103
backend/backup_scheduler.py
Normal file
103
backend/backup_scheduler.py
Normal file
@ -0,0 +1,103 @@
|
||||
"""
|
||||
Automatic backup scheduler
|
||||
Runs database backups on a schedule without needing cron
|
||||
"""
|
||||
import os
|
||||
from datetime import datetime
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from backup_restore_api import perform_backup
|
||||
import logging
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='[%(asctime)s] %(levelname)s: %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S'
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
scheduler = BackgroundScheduler()
|
||||
|
||||
|
||||
def scheduled_backup_job():
|
||||
"""Job that runs on schedule to perform backup"""
|
||||
logger.info("Starting scheduled backup...")
|
||||
try:
|
||||
result = perform_backup()
|
||||
if result['success']:
|
||||
logger.info(f"✅ Scheduled backup completed: {result['filename']}")
|
||||
else:
|
||||
logger.error(f"❌ Scheduled backup failed: {result.get('error', 'Unknown error')}")
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Scheduled backup exception: {str(e)}")
|
||||
|
||||
|
||||
def start_backup_scheduler():
|
||||
"""Start the backup scheduler based on environment configuration"""
|
||||
|
||||
# Get backup interval from environment (default: weekly)
|
||||
backup_interval = os.getenv('BACKUP_INTERVAL', 'weekly').lower()
|
||||
|
||||
if backup_interval == 'disabled':
|
||||
logger.info("⏸️ Automatic backups are DISABLED")
|
||||
return
|
||||
|
||||
if backup_interval == 'test' or backup_interval == '1minute':
|
||||
# Test mode - every 1 minute
|
||||
scheduler.add_job(
|
||||
scheduled_backup_job,
|
||||
'interval',
|
||||
minutes=1,
|
||||
id='backup_job',
|
||||
replace_existing=True
|
||||
)
|
||||
logger.info("⏰ Backup scheduler started: EVERY 1 MINUTE (testing mode)")
|
||||
logger.warning("⚠️ WARNING: Test mode active! Change BACKUP_INTERVAL to 'weekly' for production")
|
||||
|
||||
elif backup_interval == 'daily':
|
||||
# Daily at 2 AM
|
||||
scheduler.add_job(
|
||||
scheduled_backup_job,
|
||||
'cron',
|
||||
hour=2,
|
||||
minute=0,
|
||||
id='backup_job',
|
||||
replace_existing=True
|
||||
)
|
||||
logger.info("⏰ Backup scheduler started: DAILY at 2:00 AM")
|
||||
|
||||
elif backup_interval == 'weekly':
|
||||
# Weekly - Sunday at 2 AM
|
||||
scheduler.add_job(
|
||||
scheduled_backup_job,
|
||||
'cron',
|
||||
day_of_week='sun',
|
||||
hour=2,
|
||||
minute=0,
|
||||
id='backup_job',
|
||||
replace_existing=True
|
||||
)
|
||||
logger.info("⏰ Backup scheduler started: WEEKLY on Sundays at 2:00 AM")
|
||||
|
||||
else:
|
||||
logger.warning(f"⚠️ Unknown BACKUP_INTERVAL: {backup_interval}, defaulting to weekly")
|
||||
scheduler.add_job(
|
||||
scheduled_backup_job,
|
||||
'cron',
|
||||
day_of_week='sun',
|
||||
hour=2,
|
||||
minute=0,
|
||||
id='backup_job',
|
||||
replace_existing=True
|
||||
)
|
||||
logger.info("⏰ Backup scheduler started: WEEKLY on Sundays at 2:00 AM")
|
||||
|
||||
scheduler.start()
|
||||
logger.info("✅ Backup scheduler is running")
|
||||
|
||||
|
||||
def stop_backup_scheduler():
|
||||
"""Stop the backup scheduler"""
|
||||
if scheduler.running:
|
||||
scheduler.shutdown()
|
||||
logger.info("🛑 Backup scheduler stopped")
|
||||
@ -54,10 +54,12 @@ def list_recipes_db() -> List[Dict[str, Any]]:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT id, name, meal_type, time_minutes,
|
||||
tags, ingredients, steps, image, made_by
|
||||
FROM recipes
|
||||
ORDER BY id
|
||||
SELECT r.id, r.name, r.meal_type, r.time_minutes,
|
||||
r.tags, r.ingredients, r.steps, r.image, r.made_by, r.user_id,
|
||||
u.display_name as owner_display_name
|
||||
FROM recipes r
|
||||
LEFT JOIN users u ON r.user_id = u.id
|
||||
ORDER BY r.id
|
||||
"""
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
@ -79,21 +81,21 @@ def update_recipe_db(recipe_id: int, recipe_data: Dict[str, Any]) -> Optional[Di
|
||||
SET name = %s,
|
||||
meal_type = %s,
|
||||
time_minutes = %s,
|
||||
tags = %s,
|
||||
ingredients = %s,
|
||||
steps = %s,
|
||||
tags = %s::text[],
|
||||
ingredients = %s::text[],
|
||||
steps = %s::text[],
|
||||
image = %s,
|
||||
made_by = %s
|
||||
WHERE id = %s
|
||||
RETURNING id, name, meal_type, time_minutes, tags, ingredients, steps, image, made_by
|
||||
RETURNING id, name, meal_type, time_minutes, tags, ingredients, steps, image, made_by, user_id
|
||||
""",
|
||||
(
|
||||
recipe_data["name"],
|
||||
recipe_data["meal_type"],
|
||||
recipe_data["time_minutes"],
|
||||
json.dumps(recipe_data.get("tags", [])),
|
||||
json.dumps(recipe_data.get("ingredients", [])),
|
||||
json.dumps(recipe_data.get("steps", [])),
|
||||
recipe_data.get("tags", []),
|
||||
recipe_data.get("ingredients", []),
|
||||
recipe_data.get("steps", []),
|
||||
recipe_data.get("image"),
|
||||
recipe_data.get("made_by"),
|
||||
recipe_id,
|
||||
@ -133,19 +135,20 @@ def create_recipe_db(recipe_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO recipes (name, meal_type, time_minutes, tags, ingredients, steps, image, made_by)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
|
||||
RETURNING id, name, meal_type, time_minutes, tags, ingredients, steps, image, made_by
|
||||
INSERT INTO recipes (name, meal_type, time_minutes, tags, ingredients, steps, image, made_by, user_id)
|
||||
VALUES (%s, %s, %s, %s::text[], %s::text[], %s::text[], %s, %s, %s)
|
||||
RETURNING id, name, meal_type, time_minutes, tags, ingredients, steps, image, made_by, user_id
|
||||
""",
|
||||
(
|
||||
recipe_data["name"],
|
||||
recipe_data["meal_type"],
|
||||
recipe_data["time_minutes"],
|
||||
json.dumps(recipe_data.get("tags", [])),
|
||||
json.dumps(recipe_data.get("ingredients", [])),
|
||||
json.dumps(recipe_data.get("steps", [])),
|
||||
recipe_data.get("tags", []),
|
||||
recipe_data.get("ingredients", []),
|
||||
recipe_data.get("steps", []),
|
||||
recipe_data.get("image"),
|
||||
recipe_data.get("made_by"),
|
||||
recipe_data.get("user_id"),
|
||||
),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
@ -162,19 +165,21 @@ def get_recipes_by_filters_db(
|
||||
conn = get_conn()
|
||||
try:
|
||||
query = """
|
||||
SELECT id, name, meal_type, time_minutes,
|
||||
tags, ingredients, steps, image, made_by
|
||||
FROM recipes
|
||||
SELECT r.id, r.name, r.meal_type, r.time_minutes,
|
||||
r.tags, r.ingredients, r.steps, r.image, r.made_by, r.user_id,
|
||||
u.display_name as owner_display_name
|
||||
FROM recipes r
|
||||
LEFT JOIN users u ON r.user_id = u.id
|
||||
WHERE 1=1
|
||||
"""
|
||||
params: List = []
|
||||
|
||||
if meal_type:
|
||||
query += " AND meal_type = %s"
|
||||
query += " AND r.meal_type = %s"
|
||||
params.append(meal_type.lower())
|
||||
|
||||
if max_time:
|
||||
query += " AND time_minutes <= %s"
|
||||
query += " AND r.time_minutes <= %s"
|
||||
params.append(max_time)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
@ -184,3 +189,4 @@ def get_recipes_by_filters_db(
|
||||
return rows
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
@ -1,33 +1,73 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15
|
||||
container_name: recipes-postgres
|
||||
backend:
|
||||
build: ./backend
|
||||
container_name: recipes-backend
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: recipes_user
|
||||
POSTGRES_PASSWORD: Aa123456
|
||||
POSTGRES_DB: recipes_db
|
||||
ports:
|
||||
- "5432:5432"
|
||||
- "8000:8000"
|
||||
environment:
|
||||
# Database Configuration (RDS)
|
||||
DATABASE_URL: postgresql://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}
|
||||
DB_USER: ${DB_USER}
|
||||
DB_PASSWORD: ${DB_PASSWORD}
|
||||
DB_NAME: ${DB_NAME}
|
||||
DB_HOST: ${DB_HOST}
|
||||
DB_PORT: ${DB_PORT:-5432}
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST: ${SMTP_HOST}
|
||||
SMTP_PORT: ${SMTP_PORT:-587}
|
||||
SMTP_USER: ${SMTP_USER}
|
||||
SMTP_PASSWORD: ${SMTP_PASSWORD}
|
||||
SMTP_FROM: ${SMTP_FROM}
|
||||
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID: ${GOOGLE_CLIENT_ID}
|
||||
GOOGLE_CLIENT_SECRET: ${GOOGLE_CLIENT_SECRET}
|
||||
GOOGLE_REDIRECT_URI: ${GOOGLE_REDIRECT_URI}
|
||||
|
||||
# Microsoft Azure OAuth
|
||||
AZURE_CLIENT_ID: ${AZURE_CLIENT_ID}
|
||||
AZURE_CLIENT_SECRET: ${AZURE_CLIENT_SECRET}
|
||||
AZURE_TENANT_ID: ${AZURE_TENANT_ID:-consumers}
|
||||
AZURE_REDIRECT_URI: ${AZURE_REDIRECT_URI}
|
||||
|
||||
# Frontend URL
|
||||
FRONTEND_URL: ${FRONTEND_URL}
|
||||
|
||||
# Cloudflare R2 Backup Configuration
|
||||
R2_ENDPOINT: ${R2_ENDPOINT}
|
||||
R2_ACCESS_KEY: ${R2_ACCESS_KEY}
|
||||
R2_SECRET_KEY: ${R2_SECRET_KEY}
|
||||
R2_BUCKET_NAME: ${R2_BUCKET_NAME}
|
||||
|
||||
# Backup Schedule
|
||||
BACKUP_INTERVAL: ${BACKUP_INTERVAL:-weekly}
|
||||
env_file:
|
||||
- .env.aws
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
- ./schema.sql:/docker-entrypoint-initdb.d/schema.sql:ro
|
||||
- ./backups:/app/backups
|
||||
- ./restores:/app/restores
|
||||
networks:
|
||||
- recipes-network
|
||||
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4:8
|
||||
container_name: recipes-pgadmin
|
||||
frontend:
|
||||
build: ./frontend
|
||||
container_name: recipes-frontend
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: admin@dvirlabs.com
|
||||
PGADMIN_DEFAULT_PASSWORD: Aa123456
|
||||
ports:
|
||||
- "5050:80"
|
||||
- "80:80"
|
||||
environment:
|
||||
VITE_API_URL: ${VITE_API_URL:-http://localhost:8000}
|
||||
depends_on:
|
||||
- postgres
|
||||
- backend
|
||||
networks:
|
||||
- recipes-network
|
||||
|
||||
volumes:
|
||||
pgdata:
|
||||
networks:
|
||||
recipes-network:
|
||||
driver: bridge
|
||||
|
||||
|
||||
|
||||
211
backend/email_utils.py
Normal file
211
backend/email_utils.py
Normal file
@ -0,0 +1,211 @@
|
||||
import os
|
||||
import random
|
||||
import aiosmtplib
|
||||
from email.mime.text import MIMEText
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from datetime import datetime, timedelta
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# In-memory storage for verification codes (in production, use Redis or database)
|
||||
verification_codes = {}
|
||||
password_reset_tokens = {}
|
||||
|
||||
def generate_verification_code():
|
||||
"""Generate a 6-digit verification code"""
|
||||
return str(random.randint(100000, 999999))
|
||||
|
||||
async def send_verification_email(email: str, code: str, purpose: str = "password_change"):
|
||||
"""Send verification code via email"""
|
||||
smtp_host = os.getenv("SMTP_HOST", "smtp.gmail.com")
|
||||
smtp_port = int(os.getenv("SMTP_PORT", "587"))
|
||||
smtp_user = os.getenv("SMTP_USER")
|
||||
smtp_password = os.getenv("SMTP_PASSWORD")
|
||||
smtp_from = os.getenv("SMTP_FROM", smtp_user)
|
||||
|
||||
if not smtp_user or not smtp_password:
|
||||
raise Exception("SMTP credentials not configured")
|
||||
|
||||
# Create message
|
||||
message = MIMEMultipart("alternative")
|
||||
message["Subject"] = "קוד אימות - מתכונים שלי"
|
||||
message["From"] = smtp_from
|
||||
message["To"] = email
|
||||
|
||||
# Email content
|
||||
if purpose == "password_change":
|
||||
text = f"""
|
||||
שלום,
|
||||
|
||||
קוד האימות שלך לשינוי סיסמה הוא: {code}
|
||||
|
||||
הקוד תקף ל-10 דקות.
|
||||
|
||||
אם לא ביקשת לשנות את הסיסמה, התעלם מהודעה זו.
|
||||
|
||||
בברכה,
|
||||
צוות מתכונים שלי
|
||||
"""
|
||||
|
||||
html = f"""
|
||||
<html dir="rtl">
|
||||
<body style="font-family: Arial, sans-serif; direction: rtl; text-align: right;">
|
||||
<h2>שינוי סיסמה</h2>
|
||||
<p>קוד האימות שלך הוא:</p>
|
||||
<h1 style="color: #22c55e; font-size: 32px; letter-spacing: 5px;">{code}</h1>
|
||||
<p>הקוד תקף ל-<strong>10 דקות</strong>.</p>
|
||||
<hr style="border: 1px solid #e5e7eb; margin: 20px 0;">
|
||||
<p style="color: #6b7280; font-size: 14px;">
|
||||
אם לא ביקשת לשנות את הסיסמה, התעלם מהודעה זו.
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
part1 = MIMEText(text, "plain")
|
||||
part2 = MIMEText(html, "html")
|
||||
message.attach(part1)
|
||||
message.attach(part2)
|
||||
|
||||
# Send email
|
||||
await aiosmtplib.send(
|
||||
message,
|
||||
hostname=smtp_host,
|
||||
port=smtp_port,
|
||||
username=smtp_user,
|
||||
password=smtp_password,
|
||||
start_tls=True,
|
||||
)
|
||||
|
||||
def store_verification_code(user_id: int, code: str):
|
||||
"""Store verification code with expiry"""
|
||||
expiry = datetime.now() + timedelta(minutes=10)
|
||||
verification_codes[user_id] = {
|
||||
"code": code,
|
||||
"expiry": expiry
|
||||
}
|
||||
|
||||
def verify_code(user_id: int, code: str) -> bool:
|
||||
"""Verify if code is correct and not expired"""
|
||||
if user_id not in verification_codes:
|
||||
return False
|
||||
|
||||
stored = verification_codes[user_id]
|
||||
|
||||
# Check if expired
|
||||
if datetime.now() > stored["expiry"]:
|
||||
del verification_codes[user_id]
|
||||
return False
|
||||
|
||||
# Check if code matches
|
||||
if stored["code"] != code:
|
||||
return False
|
||||
|
||||
# Code is valid, remove it
|
||||
del verification_codes[user_id]
|
||||
return True
|
||||
|
||||
|
||||
async def send_password_reset_email(email: str, token: str, frontend_url: str):
|
||||
"""Send password reset link via email"""
|
||||
smtp_host = os.getenv("SMTP_HOST", "smtp.gmail.com")
|
||||
smtp_port = int(os.getenv("SMTP_PORT", "587"))
|
||||
smtp_user = os.getenv("SMTP_USER")
|
||||
smtp_password = os.getenv("SMTP_PASSWORD")
|
||||
smtp_from = os.getenv("SMTP_FROM", smtp_user)
|
||||
|
||||
if not smtp_user or not smtp_password:
|
||||
raise Exception("SMTP credentials not configured")
|
||||
|
||||
reset_link = f"{frontend_url}?reset_token={token}"
|
||||
|
||||
# Create message
|
||||
message = MIMEMultipart("alternative")
|
||||
message["Subject"] = "איפוס סיסמה - מתכונים שלי"
|
||||
message["From"] = smtp_from
|
||||
message["To"] = email
|
||||
|
||||
text = f"""
|
||||
שלום,
|
||||
|
||||
קיבלנו בקשה לאיפוס הסיסמה שלך.
|
||||
|
||||
לחץ על הקישור הבא כדי לאפס את הסיסמה (תקף ל-30 דקות):
|
||||
{reset_link}
|
||||
|
||||
אם לא ביקשת לאפס את הסיסמה, התעלם מהודעה זו.
|
||||
|
||||
בברכה,
|
||||
צוות מתכונים שלי
|
||||
"""
|
||||
|
||||
html = f"""
|
||||
<html dir="rtl">
|
||||
<body style="font-family: Arial, sans-serif; direction: rtl; text-align: right;">
|
||||
<h2>איפוס סיסמה</h2>
|
||||
<p>קיבלנו בקשה לאיפוס הסיסמה שלך.</p>
|
||||
<p>לחץ על הכפתור למטה כדי לאפס את הסיסמה:</p>
|
||||
<div style="margin: 30px 0; text-align: center;">
|
||||
<a href="{reset_link}"
|
||||
style="background-color: #22c55e; color: white; padding: 12px 30px;
|
||||
text-decoration: none; border-radius: 6px; display: inline-block;
|
||||
font-weight: bold;">
|
||||
איפוס סיסמה
|
||||
</a>
|
||||
</div>
|
||||
<p style="color: #6b7280; font-size: 14px;">
|
||||
הקישור תקף ל-<strong>30 דקות</strong>.
|
||||
</p>
|
||||
<hr style="border: 1px solid #e5e7eb; margin: 20px 0;">
|
||||
<p style="color: #6b7280; font-size: 14px;">
|
||||
אם לא ביקשת לאפס את הסיסמה, התעלם מהודעה זו.
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
part1 = MIMEText(text, "plain")
|
||||
part2 = MIMEText(html, "html")
|
||||
message.attach(part1)
|
||||
message.attach(part2)
|
||||
|
||||
# Send email
|
||||
await aiosmtplib.send(
|
||||
message,
|
||||
hostname=smtp_host,
|
||||
port=smtp_port,
|
||||
username=smtp_user,
|
||||
password=smtp_password,
|
||||
start_tls=True,
|
||||
)
|
||||
|
||||
|
||||
def store_password_reset_token(email: str, token: str):
|
||||
"""Store password reset token with expiry"""
|
||||
expiry = datetime.now() + timedelta(minutes=30)
|
||||
password_reset_tokens[token] = {
|
||||
"email": email,
|
||||
"expiry": expiry
|
||||
}
|
||||
|
||||
|
||||
def verify_reset_token(token: str) -> str:
|
||||
"""Verify reset token and return email if valid"""
|
||||
if token not in password_reset_tokens:
|
||||
return None
|
||||
|
||||
stored = password_reset_tokens[token]
|
||||
|
||||
# Check if expired
|
||||
if datetime.now() > stored["expiry"]:
|
||||
del password_reset_tokens[token]
|
||||
return None
|
||||
|
||||
return stored["email"]
|
||||
|
||||
|
||||
def consume_reset_token(token: str):
|
||||
"""Remove token after use"""
|
||||
if token in password_reset_tokens:
|
||||
del password_reset_tokens[token]
|
||||
275
backend/grocery_db_utils.py
Normal file
275
backend/grocery_db_utils.py
Normal file
@ -0,0 +1,275 @@
|
||||
import os
|
||||
from typing import List, Optional, Dict, Any
|
||||
import psycopg2
|
||||
from psycopg2.extras import RealDictCursor
|
||||
|
||||
|
||||
def get_db_connection():
|
||||
"""Get database connection"""
|
||||
return psycopg2.connect(
|
||||
host=os.getenv("DB_HOST", "localhost"),
|
||||
port=int(os.getenv("DB_PORT", "5432")),
|
||||
database=os.getenv("DB_NAME", "recipes_db"),
|
||||
user=os.getenv("DB_USER", "recipes_user"),
|
||||
password=os.getenv("DB_PASSWORD", "recipes_password"),
|
||||
)
|
||||
|
||||
|
||||
def create_grocery_list(owner_id: int, name: str, items: List[str] = None) -> Dict[str, Any]:
|
||||
"""Create a new grocery list"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
items = items or []
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO grocery_lists (owner_id, name, items)
|
||||
VALUES (%s, %s, %s)
|
||||
RETURNING id, name, items, owner_id, created_at, updated_at
|
||||
""",
|
||||
(owner_id, name, items)
|
||||
)
|
||||
grocery_list = cur.fetchone()
|
||||
conn.commit()
|
||||
return dict(grocery_list)
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def get_user_grocery_lists(user_id: int) -> List[Dict[str, Any]]:
|
||||
"""Get all grocery lists owned by or shared with a user"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT DISTINCT gl.id, gl.name, gl.items, gl.owner_id, gl.is_pinned, gl.created_at, gl.updated_at,
|
||||
u.display_name as owner_display_name,
|
||||
CASE WHEN gl.owner_id = %s THEN TRUE ELSE gls.can_edit END as can_edit,
|
||||
CASE WHEN gl.owner_id = %s THEN TRUE ELSE FALSE END as is_owner
|
||||
FROM grocery_lists gl
|
||||
LEFT JOIN users u ON gl.owner_id = u.id
|
||||
LEFT JOIN grocery_list_shares gls ON gl.id = gls.list_id AND gls.shared_with_user_id = %s
|
||||
WHERE gl.owner_id = %s OR gls.shared_with_user_id = %s
|
||||
ORDER BY gl.updated_at DESC
|
||||
""",
|
||||
(user_id, user_id, user_id, user_id, user_id)
|
||||
)
|
||||
lists = cur.fetchall()
|
||||
return [dict(row) for row in lists]
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def get_grocery_list_by_id(list_id: int, user_id: int) -> Optional[Dict[str, Any]]:
|
||||
"""Get a specific grocery list if user has access"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT gl.id, gl.name, gl.items, gl.owner_id, gl.is_pinned, gl.created_at, gl.updated_at,
|
||||
u.display_name as owner_display_name,
|
||||
CASE WHEN gl.owner_id = %s THEN TRUE ELSE gls.can_edit END as can_edit,
|
||||
CASE WHEN gl.owner_id = %s THEN TRUE ELSE FALSE END as is_owner
|
||||
FROM grocery_lists gl
|
||||
LEFT JOIN users u ON gl.owner_id = u.id
|
||||
LEFT JOIN grocery_list_shares gls ON gl.id = gls.list_id AND gls.shared_with_user_id = %s
|
||||
WHERE gl.id = %s AND (gl.owner_id = %s OR gls.shared_with_user_id = %s)
|
||||
""",
|
||||
(user_id, user_id, user_id, list_id, user_id, user_id)
|
||||
)
|
||||
grocery_list = cur.fetchone()
|
||||
return dict(grocery_list) if grocery_list else None
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def update_grocery_list(list_id: int, name: str = None, items: List[str] = None) -> Optional[Dict[str, Any]]:
|
||||
"""Update a grocery list"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
updates = []
|
||||
params = []
|
||||
|
||||
if name is not None:
|
||||
updates.append("name = %s")
|
||||
params.append(name)
|
||||
|
||||
if items is not None:
|
||||
updates.append("items = %s")
|
||||
params.append(items)
|
||||
|
||||
if not updates:
|
||||
return None
|
||||
|
||||
updates.append("updated_at = CURRENT_TIMESTAMP")
|
||||
params.append(list_id)
|
||||
|
||||
query = f"UPDATE grocery_lists SET {', '.join(updates)} WHERE id = %s RETURNING id, name, items, owner_id, created_at, updated_at"
|
||||
|
||||
cur.execute(query, params)
|
||||
grocery_list = cur.fetchone()
|
||||
conn.commit()
|
||||
return dict(grocery_list) if grocery_list else None
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def delete_grocery_list(list_id: int) -> bool:
|
||||
"""Delete a grocery list"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
cur.execute("DELETE FROM grocery_lists WHERE id = %s", (list_id,))
|
||||
deleted = cur.rowcount > 0
|
||||
conn.commit()
|
||||
return deleted
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def share_grocery_list(list_id: int, shared_with_user_id: int, can_edit: bool = False) -> Dict[str, Any]:
|
||||
"""Share a grocery list with another user or update existing share permissions"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO grocery_list_shares (list_id, shared_with_user_id, can_edit)
|
||||
VALUES (%s, %s, %s)
|
||||
ON CONFLICT (list_id, shared_with_user_id)
|
||||
DO UPDATE SET can_edit = EXCLUDED.can_edit, shared_at = CURRENT_TIMESTAMP
|
||||
RETURNING id, list_id, shared_with_user_id, can_edit, shared_at
|
||||
""",
|
||||
(list_id, shared_with_user_id, can_edit)
|
||||
)
|
||||
share = cur.fetchone()
|
||||
conn.commit()
|
||||
return dict(share)
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def update_share_permission(list_id: int, shared_with_user_id: int, can_edit: bool) -> Optional[Dict[str, Any]]:
|
||||
"""Update edit permission for an existing share"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"""
|
||||
UPDATE grocery_list_shares
|
||||
SET can_edit = %s
|
||||
WHERE list_id = %s AND shared_with_user_id = %s
|
||||
RETURNING id, list_id, shared_with_user_id, can_edit, shared_at
|
||||
""",
|
||||
(can_edit, list_id, shared_with_user_id)
|
||||
)
|
||||
share = cur.fetchone()
|
||||
conn.commit()
|
||||
return dict(share) if share else None
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def unshare_grocery_list(list_id: int, user_id: int) -> bool:
|
||||
"""Remove sharing access for a user"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
cur.execute(
|
||||
"DELETE FROM grocery_list_shares WHERE list_id = %s AND shared_with_user_id = %s",
|
||||
(list_id, user_id)
|
||||
)
|
||||
deleted = cur.rowcount > 0
|
||||
conn.commit()
|
||||
return deleted
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def get_grocery_list_shares(list_id: int) -> List[Dict[str, Any]]:
|
||||
"""Get all users a grocery list is shared with"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT gls.id, gls.list_id, gls.shared_with_user_id, gls.can_edit, gls.shared_at,
|
||||
u.username, u.display_name, u.email
|
||||
FROM grocery_list_shares gls
|
||||
JOIN users u ON gls.shared_with_user_id = u.id
|
||||
WHERE gls.list_id = %s
|
||||
ORDER BY gls.shared_at DESC
|
||||
""",
|
||||
(list_id,)
|
||||
)
|
||||
shares = cur.fetchall()
|
||||
return [dict(row) for row in shares]
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def search_users(query: str, limit: int = 10) -> List[Dict[str, Any]]:
|
||||
"""Search users by username or display_name for autocomplete"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT id, username, display_name, email
|
||||
FROM users
|
||||
WHERE username ILIKE %s OR display_name ILIKE %s
|
||||
ORDER BY username
|
||||
LIMIT %s
|
||||
""",
|
||||
(f"%{query}%", f"%{query}%", limit)
|
||||
)
|
||||
users = cur.fetchall()
|
||||
return [dict(row) for row in users]
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def toggle_grocery_list_pin(list_id: int, user_id: int) -> Optional[Dict[str, Any]]:
|
||||
"""Toggle pin status for a grocery list (owner only)"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
# Check if user is owner
|
||||
cur.execute(
|
||||
"SELECT id, is_pinned FROM grocery_lists WHERE id = %s AND owner_id = %s",
|
||||
(list_id, user_id)
|
||||
)
|
||||
result = cur.fetchone()
|
||||
if not result:
|
||||
return None
|
||||
|
||||
# Toggle pin status
|
||||
new_pin_status = not result["is_pinned"]
|
||||
cur.execute(
|
||||
"""
|
||||
UPDATE grocery_lists
|
||||
SET is_pinned = %s, updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = %s
|
||||
RETURNING id, name, items, owner_id, is_pinned, created_at, updated_at
|
||||
""",
|
||||
(new_pin_status, list_id)
|
||||
)
|
||||
updated = cur.fetchone()
|
||||
conn.commit()
|
||||
return dict(updated) if updated else None
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
1067
backend/main.py
1067
backend/main.py
File diff suppressed because it is too large
Load Diff
124
backend/notification_db_utils.py
Normal file
124
backend/notification_db_utils.py
Normal file
@ -0,0 +1,124 @@
|
||||
"""
|
||||
Database utilities for managing notifications.
|
||||
"""
|
||||
|
||||
from db_utils import get_conn
|
||||
|
||||
|
||||
def create_notification(user_id: int, type: str, message: str, related_id: int = None):
|
||||
"""Create a new notification for a user."""
|
||||
conn = get_conn()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO notifications (user_id, type, message, related_id)
|
||||
VALUES (%s, %s, %s, %s)
|
||||
RETURNING id, user_id, type, message, related_id, is_read, created_at
|
||||
""",
|
||||
(user_id, type, message, related_id)
|
||||
)
|
||||
row = cur.fetchone()
|
||||
conn.commit()
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
return {
|
||||
"id": row["id"],
|
||||
"user_id": row["user_id"],
|
||||
"type": row["type"],
|
||||
"message": row["message"],
|
||||
"related_id": row["related_id"],
|
||||
"is_read": row["is_read"],
|
||||
"created_at": row["created_at"]
|
||||
}
|
||||
return None
|
||||
|
||||
|
||||
def get_user_notifications(user_id: int, unread_only: bool = False):
|
||||
"""Get all notifications for a user."""
|
||||
conn = get_conn()
|
||||
cur = conn.cursor()
|
||||
|
||||
query = """
|
||||
SELECT id, user_id, type, message, related_id, is_read, created_at
|
||||
FROM notifications
|
||||
WHERE user_id = %s
|
||||
"""
|
||||
|
||||
if unread_only:
|
||||
query += " AND is_read = FALSE"
|
||||
|
||||
query += " ORDER BY created_at DESC"
|
||||
|
||||
cur.execute(query, (user_id,))
|
||||
rows = cur.fetchall()
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
notifications = []
|
||||
for row in rows:
|
||||
notifications.append({
|
||||
"id": row["id"],
|
||||
"user_id": row["user_id"],
|
||||
"type": row["type"],
|
||||
"message": row["message"],
|
||||
"related_id": row["related_id"],
|
||||
"is_read": row["is_read"],
|
||||
"created_at": row["created_at"]
|
||||
})
|
||||
|
||||
return notifications
|
||||
|
||||
|
||||
def mark_notification_as_read(notification_id: int, user_id: int):
|
||||
"""Mark a notification as read."""
|
||||
conn = get_conn()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
UPDATE notifications
|
||||
SET is_read = TRUE
|
||||
WHERE id = %s AND user_id = %s
|
||||
""",
|
||||
(notification_id, user_id)
|
||||
)
|
||||
conn.commit()
|
||||
cur.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
|
||||
def mark_all_notifications_as_read(user_id: int):
|
||||
"""Mark all notifications for a user as read."""
|
||||
conn = get_conn()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
UPDATE notifications
|
||||
SET is_read = TRUE
|
||||
WHERE user_id = %s AND is_read = FALSE
|
||||
""",
|
||||
(user_id,)
|
||||
)
|
||||
conn.commit()
|
||||
cur.close()
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
|
||||
def delete_notification(notification_id: int, user_id: int):
|
||||
"""Delete a notification."""
|
||||
conn = get_conn()
|
||||
cur = conn.cursor()
|
||||
cur.execute(
|
||||
"""
|
||||
DELETE FROM notifications
|
||||
WHERE id = %s AND user_id = %s
|
||||
""",
|
||||
(notification_id, user_id)
|
||||
)
|
||||
conn.commit()
|
||||
cur.close()
|
||||
conn.close()
|
||||
return True
|
||||
33
backend/oauth_utils.py
Normal file
33
backend/oauth_utils.py
Normal file
@ -0,0 +1,33 @@
|
||||
import os
|
||||
from authlib.integrations.starlette_client import OAuth
|
||||
from starlette.config import Config
|
||||
|
||||
# Load config
|
||||
config = Config('.env')
|
||||
|
||||
# Initialize OAuth
|
||||
oauth = OAuth(config)
|
||||
|
||||
# Register Google OAuth
|
||||
oauth.register(
|
||||
name='google',
|
||||
client_id=os.getenv('GOOGLE_CLIENT_ID'),
|
||||
client_secret=os.getenv('GOOGLE_CLIENT_SECRET'),
|
||||
server_metadata_url='https://accounts.google.com/.well-known/openid-configuration',
|
||||
client_kwargs={
|
||||
'scope': 'openid email profile'
|
||||
}
|
||||
)
|
||||
|
||||
# Register Microsoft Entra ID (Azure AD) OAuth
|
||||
# Use 'common' for multi-tenant + personal accounts, or 'consumers' for personal accounts only
|
||||
tenant_id = os.getenv('AZURE_TENANT_ID', 'common')
|
||||
oauth.register(
|
||||
name='azure',
|
||||
client_id=os.getenv('AZURE_CLIENT_ID'),
|
||||
client_secret=os.getenv('AZURE_CLIENT_SECRET'),
|
||||
server_metadata_url=f'https://login.microsoftonline.com/{tenant_id}/v2.0/.well-known/openid-configuration',
|
||||
client_kwargs={
|
||||
'scope': 'openid email profile'
|
||||
}
|
||||
)
|
||||
@ -2,6 +2,25 @@ fastapi==0.115.0
|
||||
uvicorn[standard]==0.30.1
|
||||
|
||||
pydantic==2.7.4
|
||||
pydantic[email]==2.7.4
|
||||
python-dotenv==1.0.1
|
||||
|
||||
psycopg2-binary==2.9.9
|
||||
|
||||
# Authentication
|
||||
python-jose[cryptography]==3.3.0
|
||||
passlib[bcrypt]==1.7.4
|
||||
python-multipart==0.0.9
|
||||
bcrypt==4.1.2
|
||||
|
||||
# Email
|
||||
aiosmtplib==3.0.2
|
||||
|
||||
# OAuth
|
||||
authlib==1.3.0
|
||||
httpx==0.27.0
|
||||
itsdangerous==2.1.2
|
||||
|
||||
# Backup to R2
|
||||
boto3==1.34.17
|
||||
APScheduler==3.10.4
|
||||
|
||||
41
backend/reset_admin_password.py
Normal file
41
backend/reset_admin_password.py
Normal file
@ -0,0 +1,41 @@
|
||||
import psycopg2
|
||||
import bcrypt
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# New password for admin
|
||||
new_password = "admin123" # Change this to whatever you want
|
||||
|
||||
# Hash the password
|
||||
salt = bcrypt.gensalt()
|
||||
password_hash = bcrypt.hashpw(new_password.encode('utf-8'), salt).decode('utf-8')
|
||||
|
||||
# Update in database
|
||||
conn = psycopg2.connect(os.getenv('DATABASE_URL'))
|
||||
cur = conn.cursor()
|
||||
|
||||
# Update admin password
|
||||
cur.execute(
|
||||
"UPDATE users SET password_hash = %s WHERE username = %s",
|
||||
(password_hash, 'admin')
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
# Verify
|
||||
cur.execute("SELECT username, email, is_admin FROM users WHERE username = 'admin'")
|
||||
user = cur.fetchone()
|
||||
if user:
|
||||
print(f"✓ Admin password updated successfully!")
|
||||
print(f" Username: {user[0]}")
|
||||
print(f" Email: {user[1]}")
|
||||
print(f" Is Admin: {user[2]}")
|
||||
print(f"\nYou can now login with:")
|
||||
print(f" Username: admin")
|
||||
print(f" Password: {new_password}")
|
||||
else:
|
||||
print("✗ Admin user not found!")
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
222
backend/restore_db.py
Normal file
222
backend/restore_db.py
Normal file
@ -0,0 +1,222 @@
|
||||
"""
|
||||
Database restore script from R2 storage
|
||||
Downloads compressed backup from R2 and restores to PostgreSQL
|
||||
"""
|
||||
import os
|
||||
import subprocess
|
||||
import gzip
|
||||
from pathlib import Path
|
||||
import boto3
|
||||
from botocore.config import Config
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# R2 Configuration
|
||||
R2_ENDPOINT = os.getenv("R2_ENDPOINT")
|
||||
R2_ACCESS_KEY = os.getenv("R2_ACCESS_KEY")
|
||||
R2_SECRET_KEY = os.getenv("R2_SECRET_KEY")
|
||||
R2_BUCKET = os.getenv("R2_BUCKET")
|
||||
|
||||
# Database Configuration
|
||||
DB_HOST = os.getenv("DB_HOST", "localhost")
|
||||
DB_PORT = os.getenv("DB_PORT", "5432")
|
||||
DB_NAME = os.getenv("DB_NAME", "recipes_db")
|
||||
DB_USER = os.getenv("DB_USER", "recipes_user")
|
||||
DB_PASSWORD = os.getenv("DB_PASSWORD", "recipes_password")
|
||||
|
||||
# Restore directory
|
||||
RESTORE_DIR = Path(__file__).parent / "restores"
|
||||
RESTORE_DIR.mkdir(exist_ok=True)
|
||||
|
||||
|
||||
def list_r2_backups():
|
||||
"""List all available backups in R2"""
|
||||
s3_client = boto3.client(
|
||||
's3',
|
||||
endpoint_url=R2_ENDPOINT,
|
||||
aws_access_key_id=R2_ACCESS_KEY,
|
||||
aws_secret_access_key=R2_SECRET_KEY,
|
||||
config=Config(
|
||||
signature_version='s3v4',
|
||||
s3={'addressing_style': 'path'}
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
response = s3_client.list_objects_v2(Bucket=R2_BUCKET)
|
||||
|
||||
if 'Contents' not in response:
|
||||
return []
|
||||
|
||||
backups = sorted(response['Contents'], key=lambda x: x['LastModified'], reverse=True)
|
||||
return backups
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Error listing backups: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def download_from_r2(backup_name):
|
||||
"""Download backup file from R2"""
|
||||
local_file = RESTORE_DIR / backup_name
|
||||
|
||||
print(f"Downloading {backup_name} from R2...")
|
||||
|
||||
s3_client = boto3.client(
|
||||
's3',
|
||||
endpoint_url=R2_ENDPOINT,
|
||||
aws_access_key_id=R2_ACCESS_KEY,
|
||||
aws_secret_access_key=R2_SECRET_KEY,
|
||||
config=Config(
|
||||
signature_version='s3v4',
|
||||
s3={'addressing_style': 'path'}
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
s3_client.download_file(R2_BUCKET, backup_name, str(local_file))
|
||||
size_mb = local_file.stat().st_size / (1024 * 1024)
|
||||
print(f"✓ Downloaded: {local_file.name} ({size_mb:.2f} MB)")
|
||||
return local_file
|
||||
except Exception as e:
|
||||
print(f"✗ Error downloading from R2: {e}")
|
||||
raise
|
||||
|
||||
|
||||
def decompress_file(compressed_file):
|
||||
"""Decompress gzip file"""
|
||||
decompressed_file = Path(str(compressed_file).replace('.gz', ''))
|
||||
|
||||
print(f"Decompressing {compressed_file.name}...")
|
||||
|
||||
with gzip.open(compressed_file, 'rb') as f_in:
|
||||
with open(decompressed_file, 'wb') as f_out:
|
||||
f_out.write(f_in.read())
|
||||
|
||||
compressed_size = compressed_file.stat().st_size
|
||||
decompressed_size = decompressed_file.stat().st_size
|
||||
|
||||
print(f"✓ Decompressed to {decompressed_file.name}")
|
||||
print(f" Compressed: {compressed_size / 1024:.2f} KB")
|
||||
print(f" Decompressed: {decompressed_size / 1024:.2f} KB")
|
||||
|
||||
return decompressed_file
|
||||
|
||||
|
||||
def restore_database(sql_file):
|
||||
"""Restore PostgreSQL database from SQL file"""
|
||||
print(f"\nRestoring database from {sql_file.name}...")
|
||||
print("WARNING: This will overwrite the current database!")
|
||||
|
||||
response = input("Are you sure you want to continue? (yes/no): ")
|
||||
if response.lower() != 'yes':
|
||||
print("Restore cancelled")
|
||||
return False
|
||||
|
||||
# Set PGPASSWORD environment variable
|
||||
env = os.environ.copy()
|
||||
env['PGPASSWORD'] = DB_PASSWORD
|
||||
|
||||
# Drop and recreate database (optional, comment out if you want to merge)
|
||||
print("Dropping existing tables...")
|
||||
drop_cmd = [
|
||||
"psql",
|
||||
"-h", DB_HOST,
|
||||
"-p", DB_PORT,
|
||||
"-U", DB_USER,
|
||||
"-d", DB_NAME,
|
||||
"-c", "DROP SCHEMA public CASCADE; CREATE SCHEMA public;"
|
||||
]
|
||||
|
||||
try:
|
||||
subprocess.run(drop_cmd, env=env, check=True, capture_output=True, text=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Warning: Could not drop schema: {e.stderr}")
|
||||
|
||||
# Restore from backup
|
||||
print("Restoring database...")
|
||||
restore_cmd = [
|
||||
"psql",
|
||||
"-h", DB_HOST,
|
||||
"-p", DB_PORT,
|
||||
"-U", DB_USER,
|
||||
"-d", DB_NAME,
|
||||
"-f", str(sql_file)
|
||||
]
|
||||
|
||||
try:
|
||||
subprocess.run(restore_cmd, env=env, check=True, capture_output=True, text=True)
|
||||
print("✓ Database restored successfully!")
|
||||
return True
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"✗ Error restoring database: {e.stderr}")
|
||||
raise
|
||||
|
||||
|
||||
def main():
|
||||
"""Main restore process"""
|
||||
print("=" * 60)
|
||||
print("Database Restore from Cloudflare R2")
|
||||
print("=" * 60)
|
||||
print()
|
||||
|
||||
try:
|
||||
# Verify R2 credentials
|
||||
if not all([R2_ENDPOINT, R2_ACCESS_KEY, R2_SECRET_KEY, R2_BUCKET]):
|
||||
raise ValueError("Missing R2 credentials in environment variables")
|
||||
|
||||
# List available backups
|
||||
print("Available backups:")
|
||||
backups = list_r2_backups()
|
||||
|
||||
if not backups:
|
||||
print("No backups found in R2")
|
||||
return
|
||||
|
||||
for i, backup in enumerate(backups, 1):
|
||||
size_mb = backup['Size'] / (1024 * 1024)
|
||||
print(f"{i}. {backup['Key']}")
|
||||
print(f" Size: {size_mb:.2f} MB, Date: {backup['LastModified']}")
|
||||
print()
|
||||
|
||||
# Select backup
|
||||
choice = input(f"Select backup to restore (1-{len(backups)}) or 'q' to quit: ")
|
||||
|
||||
if choice.lower() == 'q':
|
||||
print("Restore cancelled")
|
||||
return
|
||||
|
||||
try:
|
||||
backup_index = int(choice) - 1
|
||||
if backup_index < 0 or backup_index >= len(backups):
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
print("Invalid selection")
|
||||
return
|
||||
|
||||
selected_backup = backups[backup_index]['Key']
|
||||
|
||||
# Download backup
|
||||
compressed_file = download_from_r2(selected_backup)
|
||||
|
||||
# Decompress backup
|
||||
sql_file = decompress_file(compressed_file)
|
||||
|
||||
# Restore database
|
||||
restore_database(sql_file)
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("✓ Restore completed successfully!")
|
||||
print("=" * 60)
|
||||
|
||||
except Exception as e:
|
||||
print("\n" + "=" * 60)
|
||||
print(f"✗ Restore failed: {e}")
|
||||
print("=" * 60)
|
||||
raise
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1496
backend/restores/recipes_db_20251221_030046.sql
Normal file
1496
backend/restores/recipes_db_20251221_030046.sql
Normal file
File diff suppressed because one or more lines are too long
BIN
backend/restores/recipes_db_20251221_030046.sql.gz
Normal file
BIN
backend/restores/recipes_db_20251221_030046.sql.gz
Normal file
Binary file not shown.
3
backend/run_backup.bat
Normal file
3
backend/run_backup.bat
Normal file
@ -0,0 +1,3 @@
|
||||
@echo off
|
||||
cd /d "%~dp0"
|
||||
python backup_db.py >> backup.log 2>&1
|
||||
8
backend/run_backup.sh
Normal file
8
backend/run_backup.sh
Normal file
@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
# Cron job setup for automated database backups
|
||||
|
||||
# Navigate to backend directory
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# Run backup script
|
||||
python3 backup_db.py >> backup.log 2>&1
|
||||
@ -1,14 +1,36 @@
|
||||
-- Create users table
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
username TEXT UNIQUE NOT NULL,
|
||||
email TEXT UNIQUE NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
first_name TEXT,
|
||||
last_name TEXT,
|
||||
display_name TEXT UNIQUE NOT NULL,
|
||||
is_admin BOOLEAN DEFAULT FALSE,
|
||||
auth_provider VARCHAR(50) DEFAULT 'local' NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_users_username ON users (username);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_email ON users (email);
|
||||
CREATE INDEX IF NOT EXISTS idx_users_auth_provider ON users (auth_provider);
|
||||
|
||||
-- Create recipes table
|
||||
CREATE TABLE IF NOT EXISTS recipes (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
meal_type TEXT NOT NULL, -- breakfast / lunch / dinner / snack
|
||||
time_minutes INTEGER NOT NULL,
|
||||
made_by TEXT, -- Person who created this recipe version
|
||||
tags JSONB NOT NULL DEFAULT '[]', -- ["מהיר", "בריא"]
|
||||
ingredients JSONB NOT NULL DEFAULT '[]', -- ["ביצה", "עגבניה", "מלח"]
|
||||
steps JSONB NOT NULL DEFAULT '[]', -- ["לחתוך", "לבשל", ...]
|
||||
image TEXT -- Base64-encoded image or image URL
|
||||
image TEXT, -- Base64-encoded image or image URL
|
||||
made_by TEXT, -- Person who created this recipe version
|
||||
user_id INTEGER REFERENCES users(id) ON DELETE SET NULL, -- Recipe owner
|
||||
visibility VARCHAR(20) DEFAULT 'public', -- public / private / friends / groups
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT recipes_visibility_check CHECK (visibility IN ('public', 'private', 'friends', 'groups'))
|
||||
);
|
||||
|
||||
-- Optional: index for filters
|
||||
@ -21,9 +43,194 @@ CREATE INDEX IF NOT EXISTS idx_recipes_time_minutes
|
||||
CREATE INDEX IF NOT EXISTS idx_recipes_made_by
|
||||
ON recipes (made_by);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_recipes_tags_jsonb
|
||||
ON recipes USING GIN (tags);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipes_user_id
|
||||
ON recipes (user_id);
|
||||
|
||||
-- Create grocery lists table
|
||||
CREATE TABLE IF NOT EXISTS grocery_lists (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
items TEXT[] NOT NULL DEFAULT '{}', -- Array of grocery items
|
||||
owner_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
is_pinned BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Create grocery list shares table
|
||||
CREATE TABLE IF NOT EXISTS grocery_list_shares (
|
||||
id SERIAL PRIMARY KEY,
|
||||
list_id INTEGER NOT NULL REFERENCES grocery_lists(id) ON DELETE CASCADE,
|
||||
shared_with_user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
can_edit BOOLEAN DEFAULT FALSE,
|
||||
shared_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(list_id, shared_with_user_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_grocery_lists_owner_id ON grocery_lists (owner_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_grocery_list_shares_list_id ON grocery_list_shares (list_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_grocery_list_shares_user_id ON grocery_list_shares (shared_with_user_id);
|
||||
|
||||
-- Create notifications table
|
||||
CREATE TABLE IF NOT EXISTS notifications (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
type TEXT NOT NULL, -- 'grocery_share', etc.
|
||||
message TEXT NOT NULL,
|
||||
related_id INTEGER, -- Related entity ID (e.g., list_id)
|
||||
is_read BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_notifications_user_id ON notifications (user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_notifications_is_read ON notifications (is_read);
|
||||
|
||||
-- Create friend requests table
|
||||
CREATE TABLE IF NOT EXISTS friend_requests (
|
||||
id SERIAL PRIMARY KEY,
|
||||
sender_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
receiver_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
status VARCHAR(20) DEFAULT 'pending',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT friend_requests_check CHECK (sender_id <> receiver_id),
|
||||
CONSTRAINT friend_requests_status_check CHECK (status IN ('pending', 'accepted', 'rejected')),
|
||||
UNIQUE(sender_id, receiver_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_friend_requests_sender_id ON friend_requests (sender_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_friend_requests_receiver_id ON friend_requests (receiver_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_friend_requests_status ON friend_requests (status);
|
||||
|
||||
-- Create friendships table
|
||||
CREATE TABLE IF NOT EXISTS friendships (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
friend_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT friendships_check CHECK (user_id <> friend_id),
|
||||
UNIQUE(user_id, friend_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_friendships_user_id ON friendships (user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_friendships_friend_id ON friendships (friend_id);
|
||||
|
||||
-- Create groups table
|
||||
CREATE TABLE IF NOT EXISTS groups (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
description TEXT,
|
||||
created_by INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
is_private BOOLEAN DEFAULT FALSE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_groups_created_by ON groups (created_by);
|
||||
|
||||
-- Create group members table
|
||||
CREATE TABLE IF NOT EXISTS group_members (
|
||||
id SERIAL PRIMARY KEY,
|
||||
group_id INTEGER NOT NULL REFERENCES groups(id) ON DELETE CASCADE,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
role VARCHAR(20) DEFAULT 'member',
|
||||
joined_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT group_members_role_check CHECK (role IN ('admin', 'moderator', 'member')),
|
||||
UNIQUE(group_id, user_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_group_members_group_id ON group_members (group_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_group_members_user_id ON group_members (user_id);
|
||||
|
||||
-- Create conversations table
|
||||
CREATE TABLE IF NOT EXISTS conversations (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100),
|
||||
is_group BOOLEAN DEFAULT FALSE,
|
||||
created_by INTEGER REFERENCES users(id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_conversations_created_by ON conversations (created_by);
|
||||
|
||||
-- Create conversation members table
|
||||
CREATE TABLE IF NOT EXISTS conversation_members (
|
||||
id SERIAL PRIMARY KEY,
|
||||
conversation_id INTEGER NOT NULL REFERENCES conversations(id) ON DELETE CASCADE,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
joined_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_read_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(conversation_id, user_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_conversation_members_conversation_id ON conversation_members (conversation_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_conversation_members_user_id ON conversation_members (user_id);
|
||||
|
||||
-- Create messages table
|
||||
CREATE TABLE IF NOT EXISTS messages (
|
||||
id SERIAL PRIMARY KEY,
|
||||
conversation_id INTEGER NOT NULL REFERENCES conversations(id) ON DELETE CASCADE,
|
||||
sender_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
content TEXT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
edited_at TIMESTAMP,
|
||||
is_deleted BOOLEAN DEFAULT FALSE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_conversation_id ON messages (conversation_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_sender_id ON messages (sender_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_created_at ON messages (created_at);
|
||||
|
||||
-- Create recipe shares table
|
||||
CREATE TABLE IF NOT EXISTS recipe_shares (
|
||||
id SERIAL PRIMARY KEY,
|
||||
recipe_id INTEGER NOT NULL REFERENCES recipes(id) ON DELETE CASCADE,
|
||||
group_id INTEGER NOT NULL REFERENCES groups(id) ON DELETE CASCADE,
|
||||
shared_by INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
shared_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
UNIQUE(recipe_id, group_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_shares_recipe_id ON recipe_shares (recipe_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_shares_group_id ON recipe_shares (group_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_shares_shared_by ON recipe_shares (shared_by);
|
||||
|
||||
-- Create recipe ratings table
|
||||
CREATE TABLE IF NOT EXISTS recipe_ratings (
|
||||
id SERIAL PRIMARY KEY,
|
||||
recipe_id INTEGER NOT NULL REFERENCES recipes(id) ON DELETE CASCADE,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
rating INTEGER NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
CONSTRAINT recipe_ratings_rating_check CHECK (rating >= 1 AND rating <= 5),
|
||||
UNIQUE(recipe_id, user_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_ratings_recipe_id ON recipe_ratings (recipe_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_ratings_user_id ON recipe_ratings (user_id);
|
||||
|
||||
-- Create recipe comments table
|
||||
CREATE TABLE IF NOT EXISTS recipe_comments (
|
||||
id SERIAL PRIMARY KEY,
|
||||
recipe_id INTEGER NOT NULL REFERENCES recipes(id) ON DELETE CASCADE,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
content TEXT NOT NULL,
|
||||
parent_comment_id INTEGER REFERENCES recipe_comments(id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
is_deleted BOOLEAN DEFAULT FALSE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_comments_recipe_id ON recipe_comments (recipe_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_comments_user_id ON recipe_comments (user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_recipe_comments_parent_comment_id ON recipe_comments (parent_comment_id);
|
||||
|
||||
-- Create default admin user (password: admin123)
|
||||
-- Password hash generated with bcrypt for 'admin123'
|
||||
INSERT INTO users (username, email, password_hash, first_name, last_name, display_name, is_admin)
|
||||
VALUES ('admin', 'admin@myrecipes.local', '$2b$12$LQv3c1yqBWVHxkd0LHAkCOYz6TtxMQJqhN8/LewY5lE7UGf3rCvHC', 'Admin', 'User', 'מנהל', TRUE)
|
||||
ON CONFLICT (username) DO NOTHING;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_recipes_ingredients_jsonb
|
||||
ON recipes USING GIN (ingredients);
|
||||
|
||||
|
||||
97
backend/setup_cron.sh
Normal file
97
backend/setup_cron.sh
Normal file
@ -0,0 +1,97 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Automated Backup Cron Setup Script
|
||||
# This script sets up automated backups for production
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
BACKUP_SCRIPT="$SCRIPT_DIR/run_backup.sh"
|
||||
|
||||
echo "🛡️ Database Backup Cron Setup"
|
||||
echo "================================"
|
||||
echo ""
|
||||
|
||||
# Make backup script executable
|
||||
chmod +x "$BACKUP_SCRIPT"
|
||||
echo "✅ Made run_backup.sh executable"
|
||||
|
||||
# Get current crontab
|
||||
crontab -l > /tmp/current_cron 2>/dev/null || touch /tmp/current_cron
|
||||
|
||||
# Check if backup job already exists
|
||||
if grep -q "run_backup.sh" /tmp/current_cron; then
|
||||
echo "⚠️ Backup cron job already exists!"
|
||||
echo ""
|
||||
echo "Current backup schedule:"
|
||||
grep "run_backup.sh" /tmp/current_cron
|
||||
echo ""
|
||||
read -p "Do you want to replace it? (y/n): " replace
|
||||
if [ "$replace" != "y" ]; then
|
||||
echo "Cancelled."
|
||||
exit 0
|
||||
fi
|
||||
# Remove existing backup job
|
||||
grep -v "run_backup.sh" /tmp/current_cron > /tmp/new_cron
|
||||
else
|
||||
cp /tmp/current_cron /tmp/new_cron
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Select backup schedule:"
|
||||
echo "1) Every 1 minute (for testing)"
|
||||
echo "2) Weekly (Sunday at 2:00 AM)"
|
||||
echo "3) Daily (at 2:00 AM)"
|
||||
read -p "Enter choice (1-3): " choice
|
||||
|
||||
case $choice in
|
||||
1)
|
||||
# Every 1 minute for testing
|
||||
echo "* * * * * cd $SCRIPT_DIR && ./run_backup.sh" >> /tmp/new_cron
|
||||
echo "✅ Set to run EVERY 1 MINUTE (testing only!)"
|
||||
echo "⚠️ WARNING: This will create many backups. Change to weekly after testing!"
|
||||
;;
|
||||
2)
|
||||
# Weekly - Sunday at 2 AM
|
||||
echo "0 2 * * 0 cd $SCRIPT_DIR && ./run_backup.sh" >> /tmp/new_cron
|
||||
echo "✅ Set to run WEEKLY on Sundays at 2:00 AM"
|
||||
;;
|
||||
3)
|
||||
# Daily at 2 AM
|
||||
echo "0 2 * * * cd $SCRIPT_DIR && ./run_backup.sh" >> /tmp/new_cron
|
||||
echo "✅ Set to run DAILY at 2:00 AM"
|
||||
;;
|
||||
*)
|
||||
echo "❌ Invalid choice"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Install new crontab
|
||||
crontab /tmp/new_cron
|
||||
rm /tmp/current_cron /tmp/new_cron
|
||||
|
||||
echo ""
|
||||
echo "✅ Cron job installed successfully!"
|
||||
echo ""
|
||||
echo "Current crontab:"
|
||||
crontab -l | grep "run_backup.sh"
|
||||
echo ""
|
||||
echo "📝 Logs will be written to: $SCRIPT_DIR/backup.log"
|
||||
echo ""
|
||||
echo "To view logs:"
|
||||
echo " tail -f $SCRIPT_DIR/backup.log"
|
||||
echo ""
|
||||
echo "To remove cron job:"
|
||||
echo " crontab -e"
|
||||
echo " (then delete the line with run_backup.sh)"
|
||||
echo ""
|
||||
|
||||
if [ "$choice" = "1" ]; then
|
||||
echo "⚠️ TESTING MODE: Backup runs every minute"
|
||||
echo "Wait 2-3 minutes and check:"
|
||||
echo " 1. tail -f $SCRIPT_DIR/backup.log"
|
||||
echo " 2. Check R2 bucket for new files"
|
||||
echo ""
|
||||
echo "To change to weekly schedule after testing:"
|
||||
echo " ./setup_cron.sh"
|
||||
echo " (select option 2)"
|
||||
fi
|
||||
202
backend/social_db_utils.py
Normal file
202
backend/social_db_utils.py
Normal file
@ -0,0 +1,202 @@
|
||||
import os
|
||||
import psycopg2
|
||||
from psycopg2.extras import RealDictCursor
|
||||
from psycopg2 import errors
|
||||
|
||||
|
||||
def get_db_connection():
|
||||
"""Get database connection"""
|
||||
return psycopg2.connect(
|
||||
host=os.getenv("DB_HOST", "localhost"),
|
||||
port=int(os.getenv("DB_PORT", "5432")),
|
||||
database=os.getenv("DB_NAME", "recipes_db"),
|
||||
user=os.getenv("DB_USER", "recipes_user"),
|
||||
password=os.getenv("DB_PASSWORD", "recipes_password"),
|
||||
)
|
||||
|
||||
|
||||
# ============= Friends System =============
|
||||
|
||||
def send_friend_request(sender_id: int, receiver_id: int):
|
||||
"""Send a friend request"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
# Check if already friends
|
||||
cur.execute(
|
||||
"SELECT 1 FROM friendships WHERE (user_id = %s AND friend_id = %s) OR (user_id = %s AND friend_id = %s)",
|
||||
(sender_id, receiver_id, receiver_id, sender_id)
|
||||
)
|
||||
if cur.fetchone():
|
||||
return {"error": "Already friends"}
|
||||
|
||||
# Check if request already exists
|
||||
cur.execute(
|
||||
"SELECT * FROM friend_requests WHERE sender_id = %s AND receiver_id = %s AND status = 'pending'",
|
||||
(sender_id, receiver_id)
|
||||
)
|
||||
existing = cur.fetchone()
|
||||
if existing:
|
||||
return dict(existing)
|
||||
|
||||
try:
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO friend_requests (sender_id, receiver_id)
|
||||
VALUES (%s, %s)
|
||||
RETURNING id, sender_id, receiver_id, status, created_at
|
||||
""",
|
||||
(sender_id, receiver_id)
|
||||
)
|
||||
request = cur.fetchone()
|
||||
conn.commit()
|
||||
return dict(request)
|
||||
except errors.UniqueViolation:
|
||||
# Request already exists, fetch and return it
|
||||
conn.rollback()
|
||||
cur.execute(
|
||||
"SELECT id, sender_id, receiver_id, status, created_at FROM friend_requests WHERE sender_id = %s AND receiver_id = %s",
|
||||
(sender_id, receiver_id)
|
||||
)
|
||||
existing_request = cur.fetchone()
|
||||
if existing_request:
|
||||
return dict(existing_request)
|
||||
return {"error": "Friend request already exists"}
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def accept_friend_request(request_id: int):
|
||||
"""Accept a friend request and create friendship"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
# Get request details
|
||||
cur.execute(
|
||||
"SELECT sender_id, receiver_id FROM friend_requests WHERE id = %s AND status = 'pending'",
|
||||
(request_id,)
|
||||
)
|
||||
request = cur.fetchone()
|
||||
if not request:
|
||||
return {"error": "Request not found or already processed"}
|
||||
|
||||
sender_id = request["sender_id"]
|
||||
receiver_id = request["receiver_id"]
|
||||
|
||||
# Create bidirectional friendship
|
||||
cur.execute(
|
||||
"INSERT INTO friendships (user_id, friend_id) VALUES (%s, %s), (%s, %s) ON CONFLICT DO NOTHING",
|
||||
(sender_id, receiver_id, receiver_id, sender_id)
|
||||
)
|
||||
|
||||
# Update request status
|
||||
cur.execute(
|
||||
"UPDATE friend_requests SET status = 'accepted', updated_at = CURRENT_TIMESTAMP WHERE id = %s",
|
||||
(request_id,)
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
return {"success": True}
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def reject_friend_request(request_id: int):
|
||||
"""Reject a friend request"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
cur.execute(
|
||||
"UPDATE friend_requests SET status = 'rejected', updated_at = CURRENT_TIMESTAMP WHERE id = %s AND status = 'pending'",
|
||||
(request_id,)
|
||||
)
|
||||
conn.commit()
|
||||
return {"success": True}
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def get_friend_requests(user_id: int):
|
||||
"""Get pending friend requests for a user"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT fr.id AS request_id, fr.sender_id, fr.receiver_id, fr.status, fr.created_at,
|
||||
u.username AS sender_username, u.display_name AS sender_display_name, u.email AS sender_email
|
||||
FROM friend_requests fr
|
||||
JOIN users u ON u.id = fr.sender_id
|
||||
WHERE fr.receiver_id = %s AND fr.status = 'pending'
|
||||
ORDER BY fr.created_at DESC
|
||||
""",
|
||||
(user_id,)
|
||||
)
|
||||
return [dict(row) for row in cur.fetchall()]
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def get_friends(user_id: int):
|
||||
"""Get list of user's friends"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT u.id, u.username, u.display_name, u.email, f.created_at AS friends_since
|
||||
FROM friendships f
|
||||
JOIN users u ON u.id = f.friend_id
|
||||
WHERE f.user_id = %s
|
||||
ORDER BY u.display_name
|
||||
""",
|
||||
(user_id,)
|
||||
)
|
||||
return [dict(row) for row in cur.fetchall()]
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def remove_friend(user_id: int, friend_id: int):
|
||||
"""Remove a friend"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
cur.execute(
|
||||
"DELETE FROM friendships WHERE (user_id = %s AND friend_id = %s) OR (user_id = %s AND friend_id = %s)",
|
||||
(user_id, friend_id, friend_id, user_id)
|
||||
)
|
||||
conn.commit()
|
||||
return {"success": True}
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def search_users(query: str, current_user_id: int, limit: int = 20):
|
||||
"""Search for users by username or display name"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
search_pattern = f"%{query}%"
|
||||
cur.execute(
|
||||
"""
|
||||
SELECT u.id, u.username, u.display_name, u.email,
|
||||
EXISTS(SELECT 1 FROM friendships WHERE user_id = %s AND friend_id = u.id) AS is_friend,
|
||||
EXISTS(SELECT 1 FROM friend_requests WHERE sender_id = %s AND receiver_id = u.id AND status = 'pending') AS request_sent
|
||||
FROM users u
|
||||
WHERE (u.username ILIKE %s OR u.display_name ILIKE %s) AND u.id != %s
|
||||
ORDER BY u.display_name
|
||||
LIMIT %s
|
||||
""",
|
||||
(current_user_id, current_user_id, search_pattern, search_pattern, current_user_id, limit)
|
||||
)
|
||||
return [dict(row) for row in cur.fetchall()]
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
117
backend/user_db_utils.py
Normal file
117
backend/user_db_utils.py
Normal file
@ -0,0 +1,117 @@
|
||||
import os
|
||||
import psycopg2
|
||||
from psycopg2.extras import RealDictCursor
|
||||
|
||||
|
||||
def get_db_connection():
|
||||
"""Get database connection"""
|
||||
return psycopg2.connect(
|
||||
host=os.getenv("DB_HOST", "localhost"),
|
||||
port=int(os.getenv("DB_PORT", "5432")),
|
||||
database=os.getenv("DB_NAME", "recipes_db"),
|
||||
user=os.getenv("DB_USER", "recipes_user"),
|
||||
password=os.getenv("DB_PASSWORD", "recipes_password"),
|
||||
)
|
||||
|
||||
|
||||
def create_user(username: str, email: str, password_hash: str, first_name: str = None, last_name: str = None, display_name: str = None, is_admin: bool = False, auth_provider: str = "local"):
|
||||
"""Create a new user"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
# Use display_name if provided, otherwise use username
|
||||
final_display_name = display_name if display_name else username
|
||||
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO users (username, email, password_hash, first_name, last_name, display_name, is_admin, auth_provider)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
|
||||
RETURNING id, username, email, first_name, last_name, display_name, is_admin, auth_provider, created_at
|
||||
""",
|
||||
(username, email, password_hash, first_name, last_name, final_display_name, is_admin, auth_provider)
|
||||
)
|
||||
user = cur.fetchone()
|
||||
conn.commit()
|
||||
return dict(user)
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def get_user_by_username(username: str):
|
||||
"""Get user by username"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"SELECT id, username, email, password_hash, first_name, last_name, display_name, is_admin, auth_provider, created_at FROM users WHERE username = %s",
|
||||
(username,)
|
||||
)
|
||||
user = cur.fetchone()
|
||||
return dict(user) if user else None
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def get_user_by_email(email: str):
|
||||
"""Get user by email"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"SELECT id, username, email, password_hash, first_name, last_name, display_name, is_admin, auth_provider, created_at FROM users WHERE email = %s",
|
||||
(email,)
|
||||
)
|
||||
user = cur.fetchone()
|
||||
return dict(user) if user else None
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def get_user_by_id(user_id: int):
|
||||
"""Get user by ID"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"SELECT id, username, email, password_hash, first_name, last_name, display_name, is_admin, auth_provider, created_at FROM users WHERE id = %s",
|
||||
(user_id,)
|
||||
)
|
||||
user = cur.fetchone()
|
||||
return dict(user) if user else None
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def get_user_by_display_name(display_name: str):
|
||||
"""Get user by display name"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor(cursor_factory=RealDictCursor)
|
||||
try:
|
||||
cur.execute(
|
||||
"SELECT id, username, email, display_name, is_admin, created_at FROM users WHERE display_name = %s",
|
||||
(display_name,)
|
||||
)
|
||||
user = cur.fetchone()
|
||||
return dict(user) if user else None
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
|
||||
def update_user_auth_provider(user_id: int, auth_provider: str):
|
||||
"""Update user's auth provider"""
|
||||
conn = get_db_connection()
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
cur.execute(
|
||||
"UPDATE users SET auth_provider = %s WHERE id = %s",
|
||||
(auth_provider, user_id)
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
cur.close()
|
||||
conn.close()
|
||||
178
demo-recipes.sql
Normal file
178
demo-recipes.sql
Normal file
@ -0,0 +1,178 @@
|
||||
-- Demo recipes for user dvir (id=3)
|
||||
|
||||
-- Recipe 1: שקשוקה
|
||||
INSERT INTO recipes (name, meal_type, time_minutes, tags, ingredients, steps, image, made_by, user_id)
|
||||
VALUES (
|
||||
'שקשוקה',
|
||||
'breakfast',
|
||||
25,
|
||||
'["מהיר", "בריא", "צמחוני"]'::jsonb,
|
||||
'["4 ביצים", "4 עגבניות", "1 בצל", "2 שיני שום", "פלפל חריף", "כמון", "מלח", "שמן זית"]'::jsonb,
|
||||
'[
|
||||
"לחתוך את הבצל והשום דק",
|
||||
"לחמם שמן בסיר ולטגן את הבצל עד שקוף",
|
||||
"להוסיף שום ופלפל חריף ולטגן דקה",
|
||||
"לקצוץ עגבניות ולהוסיף לסיר",
|
||||
"לתבל בכמון ומלח, לבשל 10 דקות",
|
||||
"לפתוח גומות ברוטב ולשבור ביצה בכל גומה",
|
||||
"לכסות ולבשל עד שהביצים מתקשות"
|
||||
]'::jsonb,
|
||||
'https://images.unsplash.com/photo-1525351484163-7529414344d8?w=500',
|
||||
'דביר',
|
||||
3
|
||||
);
|
||||
|
||||
-- Recipe 2: פסטה ברוטב עגבניות
|
||||
INSERT INTO recipes (name, meal_type, time_minutes, tags, ingredients, steps, image, made_by, user_id)
|
||||
VALUES (
|
||||
'פסטה ברוטב עגבניות',
|
||||
'lunch',
|
||||
20,
|
||||
'["מהיר", "צמחוני", "ילדים אוהבים"]'::jsonb,
|
||||
'["500 גרם פסטה", "רסק עגבניות", "בזיליקום טרי", "3 שיני שום", "שמן זית", "מלח", "פלפל"]'::jsonb,
|
||||
'[
|
||||
"להרתיח מים מלוחים ולבשל את הפסטה לפי ההוראות",
|
||||
"בינתיים, לחמם שמן בסיר",
|
||||
"לטגן שום כתוש דקה",
|
||||
"להוסיף רסק עגבניות ולתבל",
|
||||
"לבשל על אש בינונית 10 דקות",
|
||||
"להוסיף בזיליקום קרוע",
|
||||
"לערבב את הפסטה המסוננת עם הרוטב"
|
||||
]'::jsonb,
|
||||
'https://images.unsplash.com/photo-1621996346565-e3dbc646d9a9?w=500',
|
||||
'דביר',
|
||||
3
|
||||
);
|
||||
|
||||
-- Recipe 3: סלט ישראלי
|
||||
INSERT INTO recipes (name, meal_type, time_minutes, tags, ingredients, steps, image, made_by, user_id)
|
||||
VALUES (
|
||||
'סלט ישראלי',
|
||||
'snack',
|
||||
10,
|
||||
'["מהיר", "בריא", "טבעוני", "צמחוני"]'::jsonb,
|
||||
'["4 עגבניות", "2 מלפפונים", "1 בצל", "פטרוזיליה", "לימון", "שמן זית", "מלח"]'::jsonb,
|
||||
'[
|
||||
"לחתוך עגבניות ומלפפונים לקוביות קטנות",
|
||||
"לקצוץ בצל דק",
|
||||
"לקצוץ פטרוזיליה",
|
||||
"לערבב הכל בקערה",
|
||||
"להוסיף מיץ לימון ושמן זית",
|
||||
"לתבל במלח ולערבב היטב"
|
||||
]'::jsonb,
|
||||
'https://images.unsplash.com/photo-1512621776951-a57141f2eefd?w=500',
|
||||
'דביר',
|
||||
3
|
||||
);
|
||||
|
||||
-- Recipe 4: חביתה עם ירקות
|
||||
INSERT INTO recipes (name, meal_type, time_minutes, tags, ingredients, steps, image, made_by, user_id)
|
||||
VALUES (
|
||||
'חביתה עם ירקות',
|
||||
'breakfast',
|
||||
15,
|
||||
'["מהיר", "בריא", "חלבוני", "צמחוני"]'::jsonb,
|
||||
'["3 ביצים", "1 בצל", "1 פלפל", "עגבניה", "גבינה צהובה", "מלח", "פלפל שחור", "שמן"]'::jsonb,
|
||||
'[
|
||||
"לקצוץ את הירקות לקוביות קטנות",
|
||||
"לטגן את הירקות בשמן עד שמתרככים",
|
||||
"להקציף את הביצים במזלג",
|
||||
"לשפוך את הביצים על הירקות",
|
||||
"לפזר גבינה קצוצה",
|
||||
"לבשל עד שהתחתית מוזהבת",
|
||||
"להפוך או לקפל לחצי ולהגיש"
|
||||
]'::jsonb,
|
||||
'https://images.unsplash.com/photo-1525351159099-122d10e7960e?w=500',
|
||||
'דביר',
|
||||
3
|
||||
);
|
||||
|
||||
-- Recipe 5: עוף בתנור עם תפוחי אדמה
|
||||
INSERT INTO recipes (name, meal_type, time_minutes, tags, ingredients, steps, image, made_by, user_id)
|
||||
VALUES (
|
||||
'עוף בתנור עם תפוחי אדמה',
|
||||
'dinner',
|
||||
60,
|
||||
'["משפחתי", "חגיגי"]'::jsonb,
|
||||
'["1 עוף שלם", "1 ק״ג תפוחי אדמה", "פפריקה", "כורכום", "שום", "מלח", "פלפל", "שמן זית", "לימון"]'::jsonb,
|
||||
'[
|
||||
"לחמם תנור ל-200 מעלות",
|
||||
"לחתוך תפוחי אדמה לרבעים",
|
||||
"לשפשף את העוף בתבלינים, שמן ומיץ לימון",
|
||||
"לסדר תפוחי אדמה בתבנית",
|
||||
"להניח את העוף על התפוחי אדמה",
|
||||
"לאפות כשעה עד שהעוף מוזהב",
|
||||
"להוציא, לחתוך ולהגיש עם הירקות"
|
||||
]'::jsonb,
|
||||
'https://images.unsplash.com/photo-1598103442097-8b74394b95c6?w=500',
|
||||
'דביר',
|
||||
3
|
||||
);
|
||||
|
||||
-- Recipe 6: סנדוויץ טונה
|
||||
INSERT INTO recipes (name, meal_type, time_minutes, tags, ingredients, steps, image, made_by, user_id)
|
||||
VALUES (
|
||||
'סנדוויץ טונה',
|
||||
'lunch',
|
||||
5,
|
||||
'["מהיר", "קר", "חלבוני"]'::jsonb,
|
||||
'["קופסת טונה", "2 פרוסות לחם", "מיונז", "חסה", "עגבניה", "מלפפון חמוץ", "מלח", "פלפל"]'::jsonb,
|
||||
'[
|
||||
"לסנן את הטונה",
|
||||
"לערבב את הטונה עם מיונז",
|
||||
"לתבל במלח ופלפל",
|
||||
"למרוח על פרוסת לחם",
|
||||
"להוסיף חסה, עגבניה ומלפפון",
|
||||
"לכסות בפרוסת לחם שנייה"
|
||||
]'::jsonb,
|
||||
'https://images.unsplash.com/photo-1528735602780-2552fd46c7af?w=500',
|
||||
'דביר',
|
||||
3
|
||||
);
|
||||
|
||||
-- Recipe 7: בראוניז שוקולד
|
||||
INSERT INTO recipes (name, meal_type, time_minutes, tags, ingredients, steps, image, made_by, user_id)
|
||||
VALUES (
|
||||
'בראוניז שוקולד',
|
||||
'snack',
|
||||
35,
|
||||
'["קינוח", "שוקולד", "אפייה"]'::jsonb,
|
||||
'["200 גרם שוקולד מריר", "150 גרם חמאה", "3 ביצים", "כוס סוכר", "חצי כוס קמח", "אבקת קקאו", "וניל"]'::jsonb,
|
||||
'[
|
||||
"לחמם תנור ל-180 מעלות",
|
||||
"להמיס שוקולד וחמאה במיקרוגל",
|
||||
"להקציף ביצים וסוכר",
|
||||
"להוסיף את תערובת השוקולד",
|
||||
"להוסיף קמח וקקאו ולערבב",
|
||||
"לשפוך לתבנית משומנת",
|
||||
"לאפות 25 דקות",
|
||||
"להוציא ולהניח להתקרר לפני חיתוך"
|
||||
]'::jsonb,
|
||||
'https://images.unsplash.com/photo-1606313564200-e75d5e30476c?w=500',
|
||||
'דביר',
|
||||
3
|
||||
);
|
||||
|
||||
-- Recipe 8: מרק עדשים
|
||||
INSERT INTO recipes (name, meal_type, time_minutes, tags, ingredients, steps, image, made_by, user_id)
|
||||
VALUES (
|
||||
'מרק עדשים',
|
||||
'dinner',
|
||||
40,
|
||||
'["בריא", "צמחוני", "טבעוני", "חם"]'::jsonb,
|
||||
'["2 כוסות עדשים כתומות", "בצל", "גזר", "3 שיני שום", "כמון", "כורכום", "מלח", "לימון"]'::jsonb,
|
||||
'[
|
||||
"לשטוף את העדשים",
|
||||
"לקצוץ בצל, גזר ושום",
|
||||
"לטגן את הבצל עד שקוף",
|
||||
"להוסיף שום ותבלינים",
|
||||
"להוסיף גזר ועדשים",
|
||||
"להוסיף 6 כוסות מים",
|
||||
"לבשל 30 דקות עד שהעדשים רכים",
|
||||
"לטחון חלק מהמרק לקבלת מרקם עבה",
|
||||
"להוסיף מיץ לימון לפני הגשה"
|
||||
]'::jsonb,
|
||||
'https://images.unsplash.com/photo-1547592166-23ac45744acd?w=500',
|
||||
'דביר',
|
||||
3
|
||||
);
|
||||
71
docker-compose.yaml
Normal file
71
docker-compose.yaml
Normal file
@ -0,0 +1,71 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
backend:
|
||||
build: ./backend
|
||||
container_name: recipes-backend
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "8000:8000"
|
||||
environment:
|
||||
# Database Configuration (RDS)
|
||||
DATABASE_URL: postgresql://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}
|
||||
DB_USER: ${DB_USER}
|
||||
DB_PASSWORD: ${DB_PASSWORD}
|
||||
DB_NAME: ${DB_NAME}
|
||||
DB_HOST: ${DB_HOST}
|
||||
DB_PORT: ${DB_PORT:-5432}
|
||||
|
||||
# Email Configuration
|
||||
SMTP_HOST: ${SMTP_HOST}
|
||||
SMTP_PORT: ${SMTP_PORT:-587}
|
||||
SMTP_USER: ${SMTP_USER}
|
||||
SMTP_PASSWORD: ${SMTP_PASSWORD}
|
||||
SMTP_FROM: ${SMTP_FROM}
|
||||
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID: ${GOOGLE_CLIENT_ID}
|
||||
GOOGLE_CLIENT_SECRET: ${GOOGLE_CLIENT_SECRET}
|
||||
GOOGLE_REDIRECT_URI: ${GOOGLE_REDIRECT_URI}
|
||||
|
||||
# Microsoft Azure OAuth
|
||||
AZURE_CLIENT_ID: ${AZURE_CLIENT_ID}
|
||||
AZURE_CLIENT_SECRET: ${AZURE_CLIENT_SECRET}
|
||||
AZURE_TENANT_ID: ${AZURE_TENANT_ID:-consumers}
|
||||
AZURE_REDIRECT_URI: ${AZURE_REDIRECT_URI}
|
||||
|
||||
# Frontend URL
|
||||
FRONTEND_URL: ${FRONTEND_URL}
|
||||
|
||||
# Cloudflare R2 Backup Configuration
|
||||
R2_ENDPOINT: ${R2_ENDPOINT}
|
||||
R2_ACCESS_KEY: ${R2_ACCESS_KEY}
|
||||
R2_SECRET_KEY: ${R2_SECRET_KEY}
|
||||
R2_BUCKET_NAME: ${R2_BUCKET_NAME}
|
||||
|
||||
# Backup Schedule
|
||||
BACKUP_INTERVAL: ${BACKUP_INTERVAL:-weekly}
|
||||
env_file:
|
||||
- ./backend/.env.aws
|
||||
volumes:
|
||||
- ./backend/backups:/app/backups
|
||||
- ./backend/restores:/app/restores
|
||||
networks:
|
||||
- recipes-network
|
||||
|
||||
frontend:
|
||||
build: ./frontend
|
||||
container_name: recipes-frontend
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
environment:
|
||||
VITE_API_URL: ${VITE_API_URL:-http://localhost:8000}
|
||||
depends_on:
|
||||
- backend
|
||||
networks:
|
||||
- recipes-network
|
||||
|
||||
networks:
|
||||
recipes-network:
|
||||
driver: bridge
|
||||
@ -34,4 +34,4 @@ RUN chmod +x /docker-entrypoint.d/10-generate-env.sh && \
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
# nginx will start automatically; our script in /docker-entrypoint.d runs first
|
||||
# nginx will start automatically; our script in /docker-entrypoint.d runs first
|
||||
@ -1,12 +1,14 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<html lang="he" dir="rtl">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<link rel="icon" type="image/png" href="/src/assets/my-recipes-logo-light.png" />
|
||||
<link rel="apple-touch-icon" href="/src/assets/my-recipes-logo-light.png" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>frontend</title>
|
||||
<meta name="google-site-verification" content="xzgN8wy4yNKqR0_qZZeUqj-wfnje0v7koYpUyU1ti2I" />
|
||||
<title>My Recipes | המתכונים שלי</title>
|
||||
<!-- Load environment variables before app starts -->
|
||||
<script src="/env.js"></script>
|
||||
<script src="/env.js?v=20251219"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
|
||||
6
frontend/public/sitemap.xml
Normal file
6
frontend/public/sitemap.xml
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
||||
<url>
|
||||
<loc>https://my-recipes.dvirlabs.com/</loc>
|
||||
</url>
|
||||
</urlset>
|
||||
1056
frontend/src/App.css
1056
frontend/src/App.css
File diff suppressed because it is too large
Load Diff
@ -5,12 +5,34 @@ import TopBar from "./components/TopBar";
|
||||
import RecipeSearchList from "./components/RecipeSearchList";
|
||||
import RecipeDetails from "./components/RecipeDetails";
|
||||
import RecipeFormDrawer from "./components/RecipeFormDrawer";
|
||||
import GroceryLists from "./components/GroceryLists";
|
||||
import PinnedGroceryLists from "./components/PinnedGroceryLists";
|
||||
import AdminPanel from "./components/AdminPanel";
|
||||
import Modal from "./components/Modal";
|
||||
import ToastContainer from "./components/ToastContainer";
|
||||
import ThemeToggle from "./components/ThemeToggle";
|
||||
import Login from "./components/Login";
|
||||
import Register from "./components/Register";
|
||||
import ResetPassword from "./components/ResetPassword";
|
||||
import { getRecipes, getRandomRecipe, createRecipe, updateRecipe, deleteRecipe } from "./api";
|
||||
import { getToken, removeToken, getMe } from "./authApi";
|
||||
|
||||
function App() {
|
||||
const [isAuthenticated, setIsAuthenticated] = useState(false);
|
||||
const [user, setUser] = useState(null);
|
||||
const [authView, setAuthView] = useState("login"); // "login" or "register"
|
||||
const [loadingAuth, setLoadingAuth] = useState(true);
|
||||
const [resetToken, setResetToken] = useState(null);
|
||||
const [currentView, setCurrentView] = useState(() => {
|
||||
try {
|
||||
return localStorage.getItem("currentView") || "recipes";
|
||||
} catch {
|
||||
return "recipes";
|
||||
}
|
||||
}); // "recipes", "grocery-lists", or "admin"
|
||||
|
||||
const [selectedGroceryListId, setSelectedGroceryListId] = useState(null);
|
||||
|
||||
const [recipes, setRecipes] = useState([]);
|
||||
const [selectedRecipe, setSelectedRecipe] = useState(null);
|
||||
|
||||
@ -19,7 +41,7 @@ function App() {
|
||||
const [filterMealType, setFilterMealType] = useState("");
|
||||
const [filterMaxTime, setFilterMaxTime] = useState("");
|
||||
const [filterTags, setFilterTags] = useState([]);
|
||||
const [filterMadeBy, setFilterMadeBy] = useState("");
|
||||
const [filterOwner, setFilterOwner] = useState("");
|
||||
|
||||
// Random recipe filters
|
||||
const [mealTypeFilter, setMealTypeFilter] = useState("");
|
||||
@ -33,6 +55,7 @@ function App() {
|
||||
const [editingRecipe, setEditingRecipe] = useState(null);
|
||||
|
||||
const [deleteModal, setDeleteModal] = useState({ isOpen: false, recipeId: null, recipeName: "" });
|
||||
const [logoutModal, setLogoutModal] = useState(false);
|
||||
const [toasts, setToasts] = useState([]);
|
||||
const [theme, setTheme] = useState(() => {
|
||||
try {
|
||||
@ -41,7 +64,86 @@ function App() {
|
||||
return "dark";
|
||||
}
|
||||
});
|
||||
const [showPinnedSidebar, setShowPinnedSidebar] = useState(false);
|
||||
|
||||
// Swipe gesture handling for mobile sidebar
|
||||
const [touchStart, setTouchStart] = useState(null);
|
||||
const [touchEnd, setTouchEnd] = useState(null);
|
||||
|
||||
// Minimum swipe distance (in px)
|
||||
const minSwipeDistance = 50;
|
||||
|
||||
const onTouchStart = (e) => {
|
||||
setTouchEnd(null);
|
||||
setTouchStart(e.targetTouches[0].clientX);
|
||||
};
|
||||
|
||||
const onTouchMove = (e) => {
|
||||
setTouchEnd(e.targetTouches[0].clientX);
|
||||
};
|
||||
|
||||
const onTouchEnd = () => {
|
||||
if (!touchStart || !touchEnd) return;
|
||||
|
||||
const distance = touchStart - touchEnd;
|
||||
const isLeftSwipe = distance > minSwipeDistance;
|
||||
|
||||
if (isLeftSwipe) {
|
||||
setShowPinnedSidebar(false);
|
||||
}
|
||||
|
||||
setTouchStart(null);
|
||||
setTouchEnd(null);
|
||||
};
|
||||
|
||||
// Check authentication on mount
|
||||
useEffect(() => {
|
||||
const checkAuth = async () => {
|
||||
// Check for reset token in URL
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const resetTokenParam = urlParams.get('reset_token');
|
||||
if (resetTokenParam) {
|
||||
setResetToken(resetTokenParam);
|
||||
// Clean URL
|
||||
window.history.replaceState({}, document.title, window.location.pathname);
|
||||
setLoadingAuth(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const token = getToken();
|
||||
if (token) {
|
||||
try {
|
||||
const userData = await getMe(token);
|
||||
setUser(userData);
|
||||
setIsAuthenticated(true);
|
||||
} catch (err) {
|
||||
// Only remove token on authentication errors (401), not network errors
|
||||
if (err.status === 401) {
|
||||
console.log("Token invalid or expired, logging out");
|
||||
removeToken();
|
||||
setIsAuthenticated(false);
|
||||
} else {
|
||||
// Network error or server error - keep user logged in
|
||||
console.warn("Auth check failed but keeping session:", err.message);
|
||||
setIsAuthenticated(true); // Assume still authenticated
|
||||
}
|
||||
}
|
||||
}
|
||||
setLoadingAuth(false);
|
||||
};
|
||||
checkAuth();
|
||||
}, []);
|
||||
|
||||
// Save currentView to localStorage
|
||||
useEffect(() => {
|
||||
try {
|
||||
localStorage.setItem("currentView", currentView);
|
||||
} catch (err) {
|
||||
console.error("Unable to save view", err);
|
||||
}
|
||||
}, [currentView]);
|
||||
|
||||
// Load recipes for everyone (readonly for non-authenticated)
|
||||
useEffect(() => {
|
||||
loadRecipes();
|
||||
}, []);
|
||||
@ -96,8 +198,8 @@ function App() {
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by made_by
|
||||
if (filterMadeBy && (!recipe.made_by || recipe.made_by !== filterMadeBy)) {
|
||||
// Filter by made_by (username)
|
||||
if (filterOwner && (!recipe.made_by || recipe.made_by !== filterOwner)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -134,7 +236,8 @@ function App() {
|
||||
|
||||
const handleCreateRecipe = async (payload) => {
|
||||
try {
|
||||
const created = await createRecipe(payload);
|
||||
const token = getToken();
|
||||
const created = await createRecipe(payload, token);
|
||||
setDrawerOpen(false);
|
||||
setEditingRecipe(null);
|
||||
await loadRecipes();
|
||||
@ -153,7 +256,8 @@ function App() {
|
||||
|
||||
const handleUpdateRecipe = async (payload) => {
|
||||
try {
|
||||
await updateRecipe(editingRecipe.id, payload);
|
||||
const token = getToken();
|
||||
await updateRecipe(editingRecipe.id, payload, token);
|
||||
setDrawerOpen(false);
|
||||
setEditingRecipe(null);
|
||||
await loadRecipes();
|
||||
@ -177,7 +281,8 @@ function App() {
|
||||
setDeleteModal({ isOpen: false, recipeId: null, recipeName: "" });
|
||||
|
||||
try {
|
||||
await deleteRecipe(recipeId);
|
||||
const token = getToken();
|
||||
await deleteRecipe(recipeId, token);
|
||||
await loadRecipes();
|
||||
setSelectedRecipe(null);
|
||||
addToast("המתכון נמחק בהצלחה!", "success");
|
||||
@ -208,51 +313,224 @@ function App() {
|
||||
}
|
||||
};
|
||||
|
||||
const handleLoginSuccess = async () => {
|
||||
const token = getToken();
|
||||
const userData = await getMe(token);
|
||||
setUser(userData);
|
||||
setIsAuthenticated(true);
|
||||
await loadRecipes();
|
||||
};
|
||||
|
||||
const handleLogout = () => {
|
||||
setLogoutModal(true);
|
||||
};
|
||||
|
||||
const confirmLogout = () => {
|
||||
removeToken();
|
||||
setUser(null);
|
||||
setIsAuthenticated(false);
|
||||
setRecipes([]);
|
||||
setSelectedRecipe(null);
|
||||
setLogoutModal(false);
|
||||
addToast('התנתקת בהצלחה', 'success');
|
||||
};
|
||||
|
||||
// Show loading state while checking auth
|
||||
if (loadingAuth) {
|
||||
return (
|
||||
<div className="app-root">
|
||||
<div style={{ textAlign: "center", padding: "3rem", color: "var(--text-muted)" }}>
|
||||
טוען...
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Show main app (readonly if not authenticated)
|
||||
return (
|
||||
<div className="app-root">
|
||||
<ThemeToggle theme={theme} onToggleTheme={() => setTheme((t) => (t === "dark" ? "light" : "dark"))} hidden={drawerOpen} />
|
||||
<TopBar onAddClick={() => setDrawerOpen(true)} />
|
||||
|
||||
{/* Pinned notes toggle button - only visible on recipes view for authenticated users */}
|
||||
{isAuthenticated && currentView === "recipes" && (
|
||||
<button
|
||||
className="pinned-toggle-btn mobile-only"
|
||||
onClick={() => setShowPinnedSidebar(!showPinnedSidebar)}
|
||||
aria-label="הצג תזכירים"
|
||||
title="תזכירים נעוצים"
|
||||
>
|
||||
<span className="note-icon-lines">
|
||||
<span></span>
|
||||
<span></span>
|
||||
<span></span>
|
||||
</span>
|
||||
</button>
|
||||
)}
|
||||
|
||||
{/* User greeting above TopBar */}
|
||||
{isAuthenticated && user && (
|
||||
<div className="user-greeting-header">
|
||||
שלום, {user.display_name || user.username} 👋
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Show login/register option in TopBar if not authenticated */}
|
||||
{!isAuthenticated ? (
|
||||
<header className="topbar">
|
||||
<div className="topbar-left">
|
||||
<span className="logo-emoji" role="img" aria-label="plate">🍽</span>
|
||||
<div className="brand">
|
||||
<div className="brand-title">מה לבשל היום?</div>
|
||||
<div className="brand-subtitle">מנהל המתכונים האישי שלך</div>
|
||||
</div>
|
||||
</div>
|
||||
<div style={{ display: "flex", gap: "0.6rem", alignItems: "center" }}>
|
||||
<button className="btn ghost" onClick={() => setAuthView("login")}>
|
||||
התחבר
|
||||
</button>
|
||||
<button className="btn primary" onClick={() => setAuthView("register")}>
|
||||
הירשם
|
||||
</button>
|
||||
</div>
|
||||
</header>
|
||||
) : (
|
||||
<TopBar
|
||||
onAddClick={() => setDrawerOpen(true)}
|
||||
user={user}
|
||||
onLogout={handleLogout}
|
||||
onShowToast={addToast}
|
||||
onNotificationClick={(listId) => {
|
||||
setCurrentView("grocery-lists");
|
||||
setSelectedGroceryListId(listId);
|
||||
}}
|
||||
onAdminClick={() => setCurrentView("admin")}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Show auth modal if needed */}
|
||||
{!isAuthenticated && authView !== null && !resetToken && (
|
||||
<div className="drawer-backdrop" onClick={() => setAuthView(null)}>
|
||||
<div className="auth-modal" onClick={(e) => e.stopPropagation()}>
|
||||
{authView === "login" ? (
|
||||
<Login
|
||||
onSuccess={handleLoginSuccess}
|
||||
onSwitchToRegister={() => setAuthView("register")}
|
||||
/>
|
||||
) : (
|
||||
<Register
|
||||
onSuccess={handleLoginSuccess}
|
||||
onSwitchToLogin={() => setAuthView("login")}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Show reset password if token present */}
|
||||
{!isAuthenticated && resetToken && (
|
||||
<div className="drawer-backdrop">
|
||||
<div className="auth-modal">
|
||||
<ResetPassword
|
||||
token={resetToken}
|
||||
onSuccess={() => {
|
||||
setResetToken(null);
|
||||
setAuthView("login");
|
||||
addToast("הסיסמה עודכנה בהצלחה! כעת תוכל להתחבר עם הסיסמה החדשה", "success");
|
||||
}}
|
||||
onBack={() => {
|
||||
setResetToken(null);
|
||||
setAuthView("login");
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{isAuthenticated && (
|
||||
<nav className="main-navigation">
|
||||
<button
|
||||
className={`nav-tab ${currentView === "recipes" ? "active" : ""}`}
|
||||
onClick={() => setCurrentView("recipes")}
|
||||
>
|
||||
📖 מתכונים
|
||||
</button>
|
||||
<button
|
||||
className={`nav-tab ${currentView === "grocery-lists" ? "active" : ""}`}
|
||||
onClick={() => setCurrentView("grocery-lists")}
|
||||
>
|
||||
🛒 רשימות קניות
|
||||
</button>
|
||||
{user?.is_admin && (
|
||||
<button
|
||||
className={`nav-tab ${currentView === "admin" ? "active" : ""}`}
|
||||
onClick={() => setCurrentView("admin")}
|
||||
>
|
||||
🛡️ ניהול
|
||||
</button>
|
||||
)}
|
||||
</nav>
|
||||
)}
|
||||
|
||||
<main className="layout">
|
||||
<section className="sidebar">
|
||||
<RecipeSearchList
|
||||
allRecipes={recipes}
|
||||
recipes={getFilteredRecipes()}
|
||||
selectedId={selectedRecipe?.id}
|
||||
onSelect={setSelectedRecipe}
|
||||
searchQuery={searchQuery}
|
||||
onSearchChange={setSearchQuery}
|
||||
filterMealType={filterMealType}
|
||||
onMealTypeChange={setFilterMealType}
|
||||
filterMaxTime={filterMaxTime}
|
||||
onMaxTimeChange={setFilterMaxTime}
|
||||
filterTags={filterTags}
|
||||
onTagsChange={setFilterTags}
|
||||
filterMadeBy={filterMadeBy}
|
||||
onMadeByChange={setFilterMadeBy}
|
||||
{currentView === "admin" ? (
|
||||
<div className="admin-view">
|
||||
<AdminPanel onShowToast={addToast} />
|
||||
</div>
|
||||
) : currentView === "grocery-lists" ? (
|
||||
<GroceryLists
|
||||
user={user}
|
||||
onShowToast={addToast}
|
||||
selectedListIdFromNotification={selectedGroceryListId}
|
||||
onListSelected={() => setSelectedGroceryListId(null)}
|
||||
/>
|
||||
</section>
|
||||
|
||||
<section className="content">
|
||||
{error && <div className="error-banner">{error}</div>}
|
||||
|
||||
{/* Random Recipe Suggester - Top Left */}
|
||||
<section className="panel filter-panel">
|
||||
<h3>חיפוש מתכון רנדומלי</h3>
|
||||
<div className="panel-grid">
|
||||
<div className="field">
|
||||
<label>סוג ארוחה</label>
|
||||
<select
|
||||
value={mealTypeFilter}
|
||||
onChange={(e) => setMealTypeFilter(e.target.value)}
|
||||
) : (
|
||||
<>
|
||||
{isAuthenticated && (
|
||||
<>
|
||||
<aside
|
||||
className={`pinned-lists-sidebar ${showPinnedSidebar ? 'mobile-visible' : ''}`}
|
||||
onTouchStart={onTouchStart}
|
||||
onTouchMove={onTouchMove}
|
||||
onTouchEnd={onTouchEnd}
|
||||
>
|
||||
<option value="">לא משנה</option>
|
||||
<option value="breakfast">בוקר</option>
|
||||
<option value="lunch">צהריים</option>
|
||||
<option value="dinner">ערב</option>
|
||||
<option value="snack">נשנוש</option>
|
||||
</select>
|
||||
</div>
|
||||
<button
|
||||
className="close-sidebar-btn mobile-only"
|
||||
onClick={() => setShowPinnedSidebar(false)}
|
||||
aria-label="סגור תזכירים"
|
||||
>
|
||||
✕
|
||||
</button>
|
||||
<PinnedGroceryLists onShowToast={addToast} />
|
||||
</aside>
|
||||
{showPinnedSidebar && (
|
||||
<div
|
||||
className="sidebar-backdrop mobile-only"
|
||||
onClick={() => setShowPinnedSidebar(false)}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
<section className="content-wrapper">
|
||||
<section className="content">
|
||||
{error && <div className="error-banner">{error}</div>}
|
||||
|
||||
{/* Random Recipe Suggester - Top Left */}
|
||||
<section className="panel filter-panel">
|
||||
<h3>חיפוש מתכון רנדומלי</h3>
|
||||
<div className="panel-grid">
|
||||
<div className="field">
|
||||
<label>סוג ארוחה</label>
|
||||
<select
|
||||
value={mealTypeFilter}
|
||||
onChange={(e) => setMealTypeFilter(e.target.value)}
|
||||
>
|
||||
<option value="">לא משנה</option>
|
||||
<option value="breakfast">בוקר</option>
|
||||
<option value="lunch">צהריים</option>
|
||||
<option value="dinner">ערב</option>
|
||||
<option value="snack">קינוחים</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div className="field">
|
||||
<label>זמן מקסימלי (דקות)</label>
|
||||
@ -289,19 +567,47 @@ function App() {
|
||||
recipe={selectedRecipe}
|
||||
onEditClick={handleEditRecipe}
|
||||
onShowDeleteModal={handleShowDeleteModal}
|
||||
isAuthenticated={isAuthenticated}
|
||||
currentUser={user}
|
||||
/>
|
||||
</section>
|
||||
|
||||
<section className="sidebar">
|
||||
<RecipeSearchList
|
||||
allRecipes={recipes}
|
||||
recipes={getFilteredRecipes()}
|
||||
selectedId={selectedRecipe?.id}
|
||||
onSelect={setSelectedRecipe}
|
||||
searchQuery={searchQuery}
|
||||
onSearchChange={setSearchQuery}
|
||||
filterMealType={filterMealType}
|
||||
onMealTypeChange={setFilterMealType}
|
||||
filterMaxTime={filterMaxTime}
|
||||
onMaxTimeChange={setFilterMaxTime}
|
||||
filterTags={filterTags}
|
||||
onTagsChange={setFilterTags}
|
||||
filterOwner={filterOwner}
|
||||
onOwnerChange={setFilterOwner}
|
||||
/>
|
||||
</section>
|
||||
</section>
|
||||
</>
|
||||
)}
|
||||
</main>
|
||||
|
||||
<RecipeFormDrawer
|
||||
open={drawerOpen}
|
||||
onClose={() => {
|
||||
setDrawerOpen(false);
|
||||
setEditingRecipe(null);
|
||||
}}
|
||||
onSubmit={handleFormSubmit}
|
||||
editingRecipe={editingRecipe}
|
||||
/>
|
||||
{isAuthenticated && (
|
||||
<RecipeFormDrawer
|
||||
open={drawerOpen}
|
||||
onClose={() => {
|
||||
setDrawerOpen(false);
|
||||
setEditingRecipe(null);
|
||||
}}
|
||||
onSubmit={handleFormSubmit}
|
||||
editingRecipe={editingRecipe}
|
||||
currentUser={user}
|
||||
allRecipes={recipes}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Modal
|
||||
isOpen={deleteModal.isOpen}
|
||||
@ -314,6 +620,17 @@ function App() {
|
||||
onCancel={handleCancelDelete}
|
||||
/>
|
||||
|
||||
<Modal
|
||||
isOpen={logoutModal}
|
||||
title="התנתקות"
|
||||
message="האם אתה בטוח שברצונך להתנתק?"
|
||||
confirmText="התנתק"
|
||||
cancelText="ביטול"
|
||||
isDangerous={false}
|
||||
onConfirm={confirmLogout}
|
||||
onCancel={() => setLogoutModal(false)}
|
||||
/>
|
||||
|
||||
<ToastContainer toasts={toasts} onRemove={removeToast} />
|
||||
</div>
|
||||
);
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
// Get API base from injected env.js or fallback to /api relative path
|
||||
const getApiBase = () => {
|
||||
export const getApiBase = () => {
|
||||
if (typeof window !== "undefined" && window.__ENV__ && window.__ENV__.API_BASE) {
|
||||
return window.__ENV__.API_BASE;
|
||||
}
|
||||
@ -37,10 +37,14 @@ export async function getRandomRecipe(filters) {
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function createRecipe(recipe) {
|
||||
export async function createRecipe(recipe, token) {
|
||||
const headers = { "Content-Type": "application/json" };
|
||||
if (token) {
|
||||
headers["Authorization"] = `Bearer ${token}`;
|
||||
}
|
||||
const res = await fetch(`${API_BASE}/recipes`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
headers,
|
||||
body: JSON.stringify(recipe),
|
||||
});
|
||||
if (!res.ok) {
|
||||
@ -49,10 +53,14 @@ export async function createRecipe(recipe) {
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function updateRecipe(id, payload) {
|
||||
export async function updateRecipe(id, payload, token) {
|
||||
const headers = { "Content-Type": "application/json" };
|
||||
if (token) {
|
||||
headers["Authorization"] = `Bearer ${token}`;
|
||||
}
|
||||
const res = await fetch(`${API_BASE}/recipes/${id}`, {
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
headers,
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
if (!res.ok) {
|
||||
@ -61,9 +69,14 @@ export async function updateRecipe(id, payload) {
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function deleteRecipe(id) {
|
||||
export async function deleteRecipe(id, token) {
|
||||
const headers = {};
|
||||
if (token) {
|
||||
headers["Authorization"] = `Bearer ${token}`;
|
||||
}
|
||||
const res = await fetch(`${API_BASE}/recipes/${id}`, {
|
||||
method: "DELETE",
|
||||
headers,
|
||||
});
|
||||
if (!res.ok && res.status !== 204) {
|
||||
throw new Error("Failed to delete recipe");
|
||||
|
||||
BIN
frontend/src/assets/my-recipes-logo-dark.png
Normal file
BIN
frontend/src/assets/my-recipes-logo-dark.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 173 KiB |
BIN
frontend/src/assets/my-recipes-logo-light.png
Normal file
BIN
frontend/src/assets/my-recipes-logo-light.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 129 KiB |
BIN
frontend/src/assets/placeholder-dark.png
Normal file
BIN
frontend/src/assets/placeholder-dark.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 92 KiB |
BIN
frontend/src/assets/placeholder-light.png
Normal file
BIN
frontend/src/assets/placeholder-light.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 118 KiB |
103
frontend/src/authApi.js
Normal file
103
frontend/src/authApi.js
Normal file
@ -0,0 +1,103 @@
|
||||
// Get API base from injected env.js or fallback to /api relative path
|
||||
const getApiBase = () => {
|
||||
if (typeof window !== "undefined" && window.__ENV__ && window.__ENV__.API_BASE) {
|
||||
return window.__ENV__.API_BASE;
|
||||
}
|
||||
return "/api";
|
||||
};
|
||||
|
||||
const API_BASE = getApiBase();
|
||||
|
||||
export async function register(username, email, password, firstName, lastName, displayName) {
|
||||
const res = await fetch(`${API_BASE}/auth/register`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
username,
|
||||
email,
|
||||
password,
|
||||
first_name: firstName,
|
||||
last_name: lastName,
|
||||
display_name: displayName
|
||||
}),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const error = await res.json();
|
||||
throw new Error(error.detail || "Failed to register");
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function login(username, password) {
|
||||
const res = await fetch(`${API_BASE}/auth/login`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ username, password }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const error = await res.json();
|
||||
throw new Error(error.detail || "Failed to login");
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function getMe(token) {
|
||||
const res = await fetch(`${API_BASE}/auth/me`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
});
|
||||
if (!res.ok) {
|
||||
const error = new Error("Failed to get user info");
|
||||
error.status = res.status;
|
||||
throw error;
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function requestPasswordChangeCode(token) {
|
||||
const res = await fetch(`${API_BASE}/auth/request-password-change-code`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
});
|
||||
if (!res.ok) {
|
||||
const error = await res.json();
|
||||
throw new Error(error.detail || "Failed to send verification code");
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
export async function changePassword(verificationCode, currentPassword, newPassword, token) {
|
||||
const res = await fetch(`${API_BASE}/auth/change-password`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
verification_code: verificationCode,
|
||||
current_password: currentPassword,
|
||||
new_password: newPassword,
|
||||
}),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const error = await res.json();
|
||||
throw new Error(error.detail || "Failed to change password");
|
||||
}
|
||||
return res.json();
|
||||
}
|
||||
|
||||
// Auth helpers
|
||||
export function saveToken(token) {
|
||||
localStorage.setItem("auth_token", token);
|
||||
}
|
||||
|
||||
export function getToken() {
|
||||
return localStorage.getItem("auth_token");
|
||||
}
|
||||
|
||||
export function removeToken() {
|
||||
localStorage.removeItem("auth_token");
|
||||
}
|
||||
66
frontend/src/backupApi.js
Normal file
66
frontend/src/backupApi.js
Normal file
@ -0,0 +1,66 @@
|
||||
import { getToken } from './authApi';
|
||||
|
||||
const API_BASE_URL = window.__ENV__?.API_BASE || 'http://localhost:8000';
|
||||
|
||||
/**
|
||||
* Trigger a manual database backup (admin only)
|
||||
*/
|
||||
export async function triggerBackup() {
|
||||
const token = getToken();
|
||||
const response = await fetch(`${API_BASE_URL}/admin/backup`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw new Error(error.detail || 'Failed to create backup');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* List all available backups (admin only)
|
||||
*/
|
||||
export async function listBackups() {
|
||||
const token = getToken();
|
||||
const response = await fetch(`${API_BASE_URL}/admin/backups`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw new Error(error.detail || 'Failed to list backups');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore database from a backup (admin only)
|
||||
*/
|
||||
export async function restoreBackup(filename) {
|
||||
const token = getToken();
|
||||
const response = await fetch(`${API_BASE_URL}/admin/restore?filename=${encodeURIComponent(filename)}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
throw new Error(error.detail || 'Failed to restore backup');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
189
frontend/src/components/AdminPanel.jsx
Normal file
189
frontend/src/components/AdminPanel.jsx
Normal file
@ -0,0 +1,189 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { triggerBackup, listBackups, restoreBackup } from '../backupApi';
|
||||
import Modal from './Modal';
|
||||
|
||||
function AdminPanel({ onShowToast }) {
|
||||
const [backups, setBackups] = useState([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [restoreModal, setRestoreModal] = useState({ isOpen: false, filename: '' });
|
||||
const [restoring, setRestoring] = useState(false);
|
||||
const [environment, setEnvironment] = useState('dev');
|
||||
const [bucket, setBucket] = useState('');
|
||||
|
||||
useEffect(() => {
|
||||
loadBackups();
|
||||
}, []);
|
||||
|
||||
const loadBackups = async () => {
|
||||
try {
|
||||
setLoading(true);
|
||||
const data = await listBackups();
|
||||
setBackups(data.backups || []);
|
||||
setEnvironment(data.environment || 'dev');
|
||||
setBucket(data.bucket || '');
|
||||
} catch (error) {
|
||||
onShowToast(error.message || 'שגיאה בטעינת גיבויים', 'error');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCreateBackup = async () => {
|
||||
try {
|
||||
setLoading(true);
|
||||
const result = await triggerBackup();
|
||||
onShowToast('גיבוי נוצר בהצלחה! 📦', 'success');
|
||||
loadBackups(); // Refresh list
|
||||
} catch (error) {
|
||||
onShowToast(error.message || 'שגיאה ביצירת גיבוי', 'error');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRestoreClick = (filename) => {
|
||||
setRestoreModal({ isOpen: true, filename });
|
||||
};
|
||||
|
||||
const handleRestoreConfirm = async () => {
|
||||
console.log('Restore confirm clicked, filename:', restoreModal.filename);
|
||||
setRestoreModal({ isOpen: false, filename: '' });
|
||||
setRestoring(true);
|
||||
|
||||
try {
|
||||
console.log('Starting restore...');
|
||||
const result = await restoreBackup(restoreModal.filename);
|
||||
console.log('Restore result:', result);
|
||||
onShowToast('שחזור הושלם בהצלחה! ♻️ מרענן את הדף...', 'success');
|
||||
|
||||
// Refresh page after 2 seconds to reload all data
|
||||
setTimeout(() => {
|
||||
window.location.reload();
|
||||
}, 2000);
|
||||
} catch (error) {
|
||||
console.error('Restore error:', error);
|
||||
onShowToast(error.message || 'שגיאה בשחזור גיבוי', 'error');
|
||||
setRestoring(false);
|
||||
}
|
||||
};
|
||||
|
||||
const formatBytes = (bytes) => {
|
||||
if (bytes === 0) return '0 Bytes';
|
||||
const k = 1024;
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
return Math.round(bytes / Math.pow(k, i) * 100) / 100 + ' ' + sizes[i];
|
||||
};
|
||||
|
||||
const formatDate = (isoString) => {
|
||||
const date = new Date(isoString);
|
||||
return date.toLocaleString('he-IL', {
|
||||
year: 'numeric',
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit'
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="admin-panel">
|
||||
<div className="admin-header">
|
||||
<div>
|
||||
<h2>ניהול גיבויים 🛡️</h2>
|
||||
<div className="environment-info">
|
||||
<span className={`env-badge ${environment}`}>
|
||||
{environment === 'prod' ? '🔴 PRODUCTION' : '🟢 DEVELOPMENT'}
|
||||
</span>
|
||||
<span className="bucket-name">{bucket}</span>
|
||||
</div>
|
||||
</div>
|
||||
<button
|
||||
className="btn primary"
|
||||
onClick={handleCreateBackup}
|
||||
disabled={loading}
|
||||
>
|
||||
{loading ? 'יוצר גיבוי...' : 'צור גיבוי חדש'}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{loading && backups.length === 0 ? (
|
||||
<div className="loading">טוען גיבויים...</div>
|
||||
) : backups.length === 0 ? (
|
||||
<div className="empty-state">אין גיבויים זמינים</div>
|
||||
) : (
|
||||
<div className="backups-list">
|
||||
<table className="backups-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th className="col-filename">קובץ</th>
|
||||
<th className="col-date">תאריך</th>
|
||||
<th className="col-size">גודל</th>
|
||||
<th className="col-actions">פעולות</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{backups.map((backup) => (
|
||||
<tr key={backup.filename}>
|
||||
<td className="filename">{backup.filename}</td>
|
||||
<td className="date">{formatDate(backup.last_modified)}</td>
|
||||
<td className="size">{formatBytes(backup.size)}</td>
|
||||
<td className="actions">
|
||||
<button
|
||||
className="btn ghost small"
|
||||
onClick={() => handleRestoreClick(backup.filename)}
|
||||
disabled={loading}
|
||||
>
|
||||
שחזר
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<Modal
|
||||
isOpen={restoreModal.isOpen || restoring}
|
||||
onClose={() => !restoring && setRestoreModal({ isOpen: false, filename: '' })}
|
||||
title={restoring ? "⏳ משחזר גיבוי..." : "⚠️ אישור שחזור גיבוי"}
|
||||
>
|
||||
{restoring ? (
|
||||
<div className="restore-progress">
|
||||
<div className="progress-bar-container">
|
||||
<div className="progress-bar-fill"></div>
|
||||
</div>
|
||||
<p className="progress-text">מוריד גיבוי...</p>
|
||||
<p className="progress-text">משחזר מסד נתונים...</p>
|
||||
<p className="progress-text-muted">אנא המתן, התהליך עשוי לקחת מספר דקות</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="restore-warning">
|
||||
<p>פעולה זו תמחק את כל הנתונים הנוכחיים!</p>
|
||||
<p>האם אתה בטוח שברצונך לשחזר מהגיבוי:</p>
|
||||
<p className="filename-highlight">{restoreModal.filename}</p>
|
||||
<div className="modal-actions">
|
||||
<button
|
||||
className="btn ghost"
|
||||
onClick={() => setRestoreModal({ isOpen: false, filename: '' })}
|
||||
disabled={loading}
|
||||
>
|
||||
ביטול
|
||||
</button>
|
||||
<button
|
||||
className="btn danger"
|
||||
onClick={handleRestoreConfirm}
|
||||
disabled={loading}
|
||||
>
|
||||
שחזר
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</Modal>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default AdminPanel;
|
||||
49
frontend/src/components/AutocompleteInput.jsx
Normal file
49
frontend/src/components/AutocompleteInput.jsx
Normal file
@ -0,0 +1,49 @@
|
||||
import { useState, useEffect, useRef } from "react";
|
||||
|
||||
function AutocompleteInput({ value, onChange, onKeyDown, suggestions = [], placeholder, inputRef, ...props }) {
|
||||
const [suggestion, setSuggestion] = useState("");
|
||||
const localRef = useRef(null);
|
||||
const ref = inputRef || localRef;
|
||||
|
||||
useEffect(() => {
|
||||
if (value && suggestions.length > 0) {
|
||||
const match = suggestions.find(s =>
|
||||
s.toLowerCase().startsWith(value.toLowerCase()) && s.toLowerCase() !== value.toLowerCase()
|
||||
);
|
||||
setSuggestion(match || "");
|
||||
} else {
|
||||
setSuggestion("");
|
||||
}
|
||||
}, [value, suggestions]);
|
||||
|
||||
const handleKeyDown = (e) => {
|
||||
if (e.key === 'Tab' && suggestion) {
|
||||
e.preventDefault();
|
||||
onChange({ target: { value: suggestion } });
|
||||
setSuggestion("");
|
||||
} else if (onKeyDown) {
|
||||
onKeyDown(e);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="autocomplete-wrapper">
|
||||
<input
|
||||
ref={ref}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
placeholder={placeholder}
|
||||
{...props}
|
||||
/>
|
||||
{suggestion && (
|
||||
<div className="autocomplete-suggestion">
|
||||
<span className="autocomplete-typed">{value}</span>
|
||||
<span className="autocomplete-rest">{suggestion.slice(value.length)}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default AutocompleteInput;
|
||||
176
frontend/src/components/ChangePassword.jsx
Normal file
176
frontend/src/components/ChangePassword.jsx
Normal file
@ -0,0 +1,176 @@
|
||||
import { useState } from "react";
|
||||
import { changePassword, requestPasswordChangeCode } from "../authApi";
|
||||
|
||||
export default function ChangePassword({ token, onClose, onSuccess }) {
|
||||
const [step, setStep] = useState(1); // 1: request code, 2: enter code & passwords
|
||||
const [verificationCode, setVerificationCode] = useState("");
|
||||
const [currentPassword, setCurrentPassword] = useState("");
|
||||
const [newPassword, setNewPassword] = useState("");
|
||||
const [confirmPassword, setConfirmPassword] = useState("");
|
||||
const [error, setError] = useState("");
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [codeSent, setCodeSent] = useState(false);
|
||||
|
||||
const handleRequestCode = async () => {
|
||||
setError("");
|
||||
setLoading(true);
|
||||
|
||||
try {
|
||||
await requestPasswordChangeCode(token);
|
||||
setCodeSent(true);
|
||||
setStep(2);
|
||||
} catch (err) {
|
||||
setError(err.message);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSubmit = async (e) => {
|
||||
e.preventDefault();
|
||||
setError("");
|
||||
|
||||
// Validation
|
||||
if (!verificationCode || !currentPassword || !newPassword || !confirmPassword) {
|
||||
setError("נא למלא את כל השדות");
|
||||
return;
|
||||
}
|
||||
|
||||
if (verificationCode.length !== 6) {
|
||||
setError("קוד האימות חייב להכיל 6 ספרות");
|
||||
return;
|
||||
}
|
||||
|
||||
if (newPassword !== confirmPassword) {
|
||||
setError("הסיסמאות החדשות אינן תואמות");
|
||||
return;
|
||||
}
|
||||
|
||||
if (newPassword.length < 6) {
|
||||
setError("הסיסמה חייבת להכיל לפחות 6 תווים");
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
|
||||
try {
|
||||
await changePassword(verificationCode, currentPassword, newPassword, token);
|
||||
onSuccess?.();
|
||||
onClose();
|
||||
} catch (err) {
|
||||
setError(err.message);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="modal-overlay" onClick={onClose}>
|
||||
<div className="modal-content" onClick={(e) => e.stopPropagation()}>
|
||||
<div className="modal-header">
|
||||
<h2>שינוי סיסמה</h2>
|
||||
<button className="close-btn" onClick={onClose}>
|
||||
×
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="modal-body">
|
||||
{error && <div className="error-message">{error}</div>}
|
||||
|
||||
{step === 1 && (
|
||||
<div>
|
||||
<p style={{ marginBottom: "1rem", color: "var(--text-muted)" }}>
|
||||
קוד אימות יישלח לכתובת המייל שלך. הקוד תקף ל-10 דקות.
|
||||
</p>
|
||||
<button
|
||||
className="btn btn-primary full"
|
||||
onClick={handleRequestCode}
|
||||
disabled={loading}
|
||||
>
|
||||
{loading ? "שולח..." : "שלח קוד אימות"}
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{step === 2 && (
|
||||
<form onSubmit={handleSubmit}>
|
||||
{codeSent && (
|
||||
<div style={{
|
||||
padding: "0.75rem",
|
||||
background: "rgba(34, 197, 94, 0.1)",
|
||||
borderRadius: "8px",
|
||||
marginBottom: "1rem",
|
||||
color: "var(--accent)"
|
||||
}}>
|
||||
✓ קוד אימות נשלח לכתובת המייל שלך
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="field">
|
||||
<label>קוד אימות (6 ספרות)</label>
|
||||
<input
|
||||
type="text"
|
||||
value={verificationCode}
|
||||
onChange={(e) => setVerificationCode(e.target.value.replace(/\D/g, '').slice(0, 6))}
|
||||
disabled={loading}
|
||||
autoFocus
|
||||
placeholder="123456"
|
||||
maxLength={6}
|
||||
style={{ fontSize: "1.2rem", letterSpacing: "0.3rem", textAlign: "center" }}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="field">
|
||||
<label>סיסמה נוכחית</label>
|
||||
<input
|
||||
type="password"
|
||||
value={currentPassword}
|
||||
onChange={(e) => setCurrentPassword(e.target.value)}
|
||||
disabled={loading}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="field">
|
||||
<label>סיסמה חדשה</label>
|
||||
<input
|
||||
type="password"
|
||||
value={newPassword}
|
||||
onChange={(e) => setNewPassword(e.target.value)}
|
||||
disabled={loading}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="field">
|
||||
<label>אימות סיסמה חדשה</label>
|
||||
<input
|
||||
type="password"
|
||||
value={confirmPassword}
|
||||
onChange={(e) => setConfirmPassword(e.target.value)}
|
||||
disabled={loading}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="modal-footer">
|
||||
<button
|
||||
type="button"
|
||||
className="btn btn-secondary"
|
||||
onClick={onClose}
|
||||
disabled={loading}
|
||||
>
|
||||
ביטול
|
||||
</button>
|
||||
<button
|
||||
type="submit"
|
||||
className="btn btn-primary"
|
||||
disabled={loading}
|
||||
>
|
||||
{loading ? "משנה..." : "שמור סיסמה"}
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
100
frontend/src/components/ForgotPassword.jsx
Normal file
100
frontend/src/components/ForgotPassword.jsx
Normal file
@ -0,0 +1,100 @@
|
||||
import { useState } from "react";
|
||||
import { getApiBase } from "../api";
|
||||
|
||||
function ForgotPassword({ onBack }) {
|
||||
const [email, setEmail] = useState("");
|
||||
const [message, setMessage] = useState("");
|
||||
const [error, setError] = useState("");
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const handleSubmit = async (e) => {
|
||||
e.preventDefault();
|
||||
setError("");
|
||||
setMessage("");
|
||||
setLoading(true);
|
||||
|
||||
try {
|
||||
const response = await fetch(`${getApiBase()}/forgot-password`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ email }),
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
setMessage(data.message);
|
||||
setEmail("");
|
||||
} else {
|
||||
setError(data.detail || "שגיאה בשליחת הבקשה");
|
||||
}
|
||||
} catch (err) {
|
||||
setError("שגיאה בשליחת הבקשה");
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="auth-container">
|
||||
<div className="auth-card">
|
||||
<h1 className="auth-title">שכחת סיסמה?</h1>
|
||||
<p className="auth-subtitle">
|
||||
הזן את כתובת המייל שלך ונשלח לך קישור לאיפוס הסיסמה
|
||||
</p>
|
||||
|
||||
<form onSubmit={handleSubmit} className="auth-form">
|
||||
{error && <div className="error-banner">{error}</div>}
|
||||
{message && (
|
||||
<div
|
||||
style={{
|
||||
padding: "1rem",
|
||||
background: "var(--success-bg, #dcfce7)",
|
||||
border: "1px solid var(--success-border, #22c55e)",
|
||||
borderRadius: "6px",
|
||||
color: "var(--success-text, #166534)",
|
||||
marginBottom: "1rem",
|
||||
textAlign: "center",
|
||||
}}
|
||||
>
|
||||
{message}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="field">
|
||||
<label>כתובת מייל</label>
|
||||
<input
|
||||
type="email"
|
||||
value={email}
|
||||
onChange={(e) => setEmail(e.target.value)}
|
||||
required
|
||||
placeholder="הזן כתובת מייל"
|
||||
autoComplete="email"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<button
|
||||
type="submit"
|
||||
className="btn primary full-width"
|
||||
disabled={loading}
|
||||
>
|
||||
{loading ? "שולח..." : "שלח קישור לאיפוס"}
|
||||
</button>
|
||||
</form>
|
||||
|
||||
<div className="auth-footer">
|
||||
<p>
|
||||
נזכרת בסיסמה?{" "}
|
||||
<button className="link-btn" onClick={onBack}>
|
||||
חזור להתחברות
|
||||
</button>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default ForgotPassword;
|
||||
1038
frontend/src/components/GroceryLists.jsx
Normal file
1038
frontend/src/components/GroceryLists.jsx
Normal file
File diff suppressed because it is too large
Load Diff
181
frontend/src/components/Login.jsx
Normal file
181
frontend/src/components/Login.jsx
Normal file
@ -0,0 +1,181 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { login, saveToken } from "../authApi";
|
||||
import ForgotPassword from "./ForgotPassword";
|
||||
|
||||
function Login({ onSuccess, onSwitchToRegister }) {
|
||||
const [username, setUsername] = useState("");
|
||||
const [password, setPassword] = useState("");
|
||||
const [error, setError] = useState("");
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [showForgotPassword, setShowForgotPassword] = useState(false);
|
||||
|
||||
// Check for token in URL (from Google OAuth redirect)
|
||||
useEffect(() => {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const token = urlParams.get('token');
|
||||
if (token) {
|
||||
saveToken(token);
|
||||
// Clean URL
|
||||
window.history.replaceState({}, document.title, window.location.pathname);
|
||||
onSuccess();
|
||||
}
|
||||
}, [onSuccess]);
|
||||
|
||||
const handleSubmit = async (e) => {
|
||||
e.preventDefault();
|
||||
setError("");
|
||||
setLoading(true);
|
||||
|
||||
try {
|
||||
const data = await login(username, password);
|
||||
saveToken(data.access_token);
|
||||
onSuccess();
|
||||
} catch (err) {
|
||||
setError(err.message);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleGoogleLogin = () => {
|
||||
const apiBase = window.__ENV__?.API_BASE || "http://localhost:8000";
|
||||
window.location.href = `${apiBase}/auth/google/login`;
|
||||
};
|
||||
|
||||
const handleAzureLogin = () => {
|
||||
const apiBase = window.__ENV__?.API_BASE || "http://localhost:8000";
|
||||
window.location.href = `${apiBase}/auth/azure/login`;
|
||||
};
|
||||
|
||||
if (showForgotPassword) {
|
||||
return <ForgotPassword onBack={() => setShowForgotPassword(false)} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="auth-container">
|
||||
<div className="auth-card">
|
||||
<h1 className="auth-title">התחברות</h1>
|
||||
<p className="auth-subtitle">ברוכים השבים למתכונים שלכם</p>
|
||||
|
||||
<form onSubmit={handleSubmit} className="auth-form">
|
||||
{error && <div className="error-banner">{error}</div>}
|
||||
|
||||
<div className="field">
|
||||
<label>שם משתמש</label>
|
||||
<input
|
||||
type="text"
|
||||
value={username}
|
||||
onChange={(e) => setUsername(e.target.value)}
|
||||
required
|
||||
placeholder="הזן שם משתמש"
|
||||
autoComplete="username"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="field">
|
||||
<label>סיסמה</label>
|
||||
<input
|
||||
type="password"
|
||||
value={password}
|
||||
onChange={(e) => setPassword(e.target.value)}
|
||||
required
|
||||
placeholder="הזן סיסמה"
|
||||
autoComplete="current-password"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<button type="submit" className="btn primary full-width" disabled={loading}>
|
||||
{loading ? "מתחבר..." : "התחבר"}
|
||||
</button>
|
||||
|
||||
<div style={{ textAlign: "center", marginTop: "0.75rem" }}>
|
||||
<button
|
||||
type="button"
|
||||
className="link-btn"
|
||||
style={{ fontSize: "0.875rem", color: "var(--text-muted)" }}
|
||||
onClick={() => setShowForgotPassword(true)}
|
||||
>
|
||||
שכחת סיסמה?
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div style={{
|
||||
margin: "1rem 0",
|
||||
textAlign: "center",
|
||||
color: "var(--text-muted)",
|
||||
position: "relative"
|
||||
}}>
|
||||
<div style={{
|
||||
position: "absolute",
|
||||
top: "50%",
|
||||
left: 0,
|
||||
right: 0,
|
||||
borderTop: "1px solid var(--border-subtle)",
|
||||
zIndex: 0
|
||||
}}></div>
|
||||
<span style={{
|
||||
background: "var(--card)",
|
||||
padding: "0 1rem",
|
||||
position: "relative",
|
||||
zIndex: 1
|
||||
}}>או</span>
|
||||
</div>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleGoogleLogin}
|
||||
className="btn ghost full-width"
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
justifyContent: "center",
|
||||
gap: "0.5rem",
|
||||
border: "1px solid var(--border-subtle)"
|
||||
}}
|
||||
>
|
||||
<svg width="18" height="18" viewBox="0 0 18 18">
|
||||
<path fill="#4285F4" d="M17.64 9.2c0-.637-.057-1.251-.164-1.84H9v3.481h4.844c-.209 1.125-.843 2.078-1.796 2.717v2.258h2.908c1.702-1.567 2.684-3.874 2.684-6.615z"/>
|
||||
<path fill="#34A853" d="M9 18c2.43 0 4.467-.806 5.956-2.184l-2.908-2.258c-.806.54-1.837.86-3.048.86-2.344 0-4.328-1.584-5.036-3.711H.957v2.332C2.438 15.983 5.482 18 9 18z"/>
|
||||
<path fill="#FBBC05" d="M3.964 10.707c-.18-.54-.282-1.117-.282-1.707 0-.593.102-1.17.282-1.709V4.958H.957C.347 6.173 0 7.548 0 9c0 1.452.348 2.827.957 4.042l3.007-2.335z"/>
|
||||
<path fill="#EA4335" d="M9 3.58c1.321 0 2.508.454 3.44 1.345l2.582-2.58C13.463.891 11.426 0 9 0 5.482 0 2.438 2.017.957 4.958L3.964 7.29C4.672 5.163 6.656 3.58 9 3.58z"/>
|
||||
</svg>
|
||||
המשך עם Google
|
||||
</button>
|
||||
|
||||
{/* <button
|
||||
type="button"
|
||||
onClick={handleAzureLogin}
|
||||
className="btn ghost full-width"
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
justifyContent: "center",
|
||||
gap: "0.5rem",
|
||||
border: "1px solid var(--border-subtle)",
|
||||
marginTop: "0.5rem"
|
||||
}}
|
||||
>
|
||||
<svg width="18" height="18" viewBox="0 0 23 23">
|
||||
<path fill="#f25022" d="M1 1h10v10H1z"/>
|
||||
<path fill="#00a4ef" d="M12 1h10v10H12z"/>
|
||||
<path fill="#7fba00" d="M1 12h10v10H1z"/>
|
||||
<path fill="#ffb900" d="M12 12h10v10H12z"/>
|
||||
</svg>
|
||||
המשך עם Microsoft
|
||||
</button> */}
|
||||
|
||||
<div className="auth-footer">
|
||||
<p>
|
||||
עדיין אין לך חשבון?{" "}
|
||||
<button className="link-btn" onClick={onSwitchToRegister}>
|
||||
הירשם עכשיו
|
||||
</button>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default Login;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user