diff --git a/.gitignore b/.gitignore index 6769e21..299e09a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +auth + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/README.md b/README.md index 6335ccd..ddef416 100644 --- a/README.md +++ b/README.md @@ -80,3 +80,28 @@ The easiest way to run ChatGPT Clone is by using docker docker-compose up ``` +### Kubernetes + +1. Run `kubectl apply -f kubernetes/`. It will create a deployment and ClusterIP Ingress in the namespace "chatgpt-clone" +2. Create a secret containing your OPENAI-API-TOKEN with: `kubectl create secret generic openai-api-key --from-literal=openai-api-key=[YOUR_OPENAI_TOKEN] -n chatgpt-clone` +3. Either change the service to load balancer or ingress (don't forget to password protect it!!), or port-forward it to your local machine: `kubectl port-forward deployment/chatgpt-clone 1338:1338 -n chatgpt-clone` + +#### Kubernetes: (optional) Hosting at a domain (ingress) + +If you want to host the app at a domain follow these steps: + +1. Install - if you haven't already - the nginx ingress +``` +helm upgrade --install ingress-nginx ingress-nginx/ingress-nginx +--namespace ingress-nginx +--create-namespace +``` +2. Create a basic auth secret which will password protect the app + - `htpasswd -c auth user` (you might need _apt-get install apache2-utils_) + - Enter your password + - This creates a file "auth" +3. "Submit" the password as secret to Kubernetes with: + - `kubectl create secret generic basic-auth --from-file=auth -n chatgpt-clone` +4. Insert your domain in the "kubernetes/nginxingress/chatgpt-clone-ingress.yaml" file. +5. Point your matching domain to your K8s IP +6. `kubectl apply -f kubernetes/nginxingress/` \ No newline at end of file diff --git a/config.json b/config.json index 87580ad..b8a4574 100644 --- a/config.json +++ b/config.json @@ -8,7 +8,7 @@ "openai_api_base": "https://api.openai.com", "proxy": { - "enable": true, + "enable": false, "http": "127.0.0.1:7890", "https": "127.0.0.1:7890" } diff --git a/docker-compose.yml b/docker-compose.yml index 6d818eb..1987cee 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,10 +2,13 @@ version: '3.5' services: chatgpt-clone: - image: chatgpt-clone + image: guestros/chatgpt-clone:latest container_name: chatgpt-clone build: context: . dockerfile: Dockerfile ports: - "1338:1338" + environment: + - OPENAI_API_KEY=akjshdjkhasd + - OPENAI_API_BASE=https://api.openai.com diff --git a/kubernetes/chatgpt-clone-deployment.yaml b/kubernetes/chatgpt-clone-deployment.yaml new file mode 100644 index 0000000..7279e42 --- /dev/null +++ b/kubernetes/chatgpt-clone-deployment.yaml @@ -0,0 +1,58 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: chatgpt-clone +--- +apiVersion: v1 +kind: Service +metadata: + labels: + app: chatgpt-clone + name: chatgpt-clone-service + namespace: chatgpt-clone +spec: + ports: + - name: "1338" + port: 1338 + targetPort: 1338 + selector: + app: chatgpt-clone +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: chatgpt-clone + name: chatgpt-clone + namespace: chatgpt-clone +spec: + replicas: 1 + selector: + matchLabels: + app: chatgpt-clone + template: + metadata: + labels: + app: chatgpt-clone + spec: + containers: + - image: guestros/chatgpt-clone:latest + name: chatgpt-clone + ports: + - containerPort: 1338 + resources: + requests: + cpu: 10m + memory: 50Mi + limits: + cpu: "500m" + memory: 500Mi + env: + - name: OPENAI_API_BASE + value: "https://api.openai.com" + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: openai-api-key + key: openai-api-key + restartPolicy: Always \ No newline at end of file diff --git a/kubernetes/nginxingress/chatgpt-clone-ingress.yaml b/kubernetes/nginxingress/chatgpt-clone-ingress.yaml new file mode 100644 index 0000000..874674a --- /dev/null +++ b/kubernetes/nginxingress/chatgpt-clone-ingress.yaml @@ -0,0 +1,36 @@ +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: ingress-chatgpt-clone + namespace: chatgpt-clone + annotations: + nginx.ingress.kubernetes.io/ssl-redirect: "true" + nginx.ingress.kubernetes.io/force-ssl-redirect: "true" + # nginx.ingress.kubernetes.io/enable-cors: "true" + # nginx.ingress.kubernetes.io/cors-allow-origin: "*" + # nginx.ingress.kubernetes.io/configuration-snippet: | + # more_set_headers "X-Robots-Tag: noindex, nofollow"; + cert-manager.io/cluster-issuer: "letsencrypt-prod" + # # type of authentication + nginx.ingress.kubernetes.io/auth-type: basic + # name of the secret that contains the user/password definitions + nginx.ingress.kubernetes.io/auth-secret: basic-auth + # message to display with an appropriate context why the authentication is required + nginx.ingress.kubernetes.io/auth-realm: 'Authentication Required' +spec: + ingressClassName: nginx + tls: + - hosts: + - chatgpt.datafortress.cloud + secretName: tls-secret + rules: + - host: chatgpt.datafortress.cloud + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: chatgpt-clone-service + port: + number: 1338 \ No newline at end of file diff --git a/server/backend.py b/server/backend.py index 73bba12..a69c0da 100644 --- a/server/backend.py +++ b/server/backend.py @@ -1,12 +1,11 @@ -from json import dumps +import os +from datetime import datetime +from hashlib import sha256 +from json import dumps, loads from time import time + from flask import request -from hashlib import sha256 -from datetime import datetime -from requests import get -from requests import post -from json import loads -import os +from requests import get, post from server.config import special_instructions