diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 8ac81ae9..c4310eee 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -22,7 +22,6 @@ jobs: needs: tag-pr secrets: API_OVH_TOKEN: ${{ secrets.API_OVH_TOKEN }} - OS_PASSWORD: ${{ secrets.OS_PASSWORD }} - OS_PROJECT_NAME: ${{ secrets.OS_PROJECT_NAME }} - OS_USERNAME: ${{ secrets.OS_USERNAME }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} X_OVH_TOKEN: ${{ secrets.PREPROD_OVH_TOKEN }} diff --git a/.github/workflows/preprod.yml b/.github/workflows/preprod.yml index 1cd2991f..de1da6fa 100644 --- a/.github/workflows/preprod.yml +++ b/.github/workflows/preprod.yml @@ -50,12 +50,11 @@ jobs: domain: preprod.basegun.fr secrets: API_OVH_TOKEN: ${{ secrets.API_OVH_TOKEN }} - OS_PASSWORD: ${{ secrets.OS_PASSWORD }} - OS_PROJECT_NAME: ${{ secrets.OS_PROJECT_NAME }} - OS_USERNAME: ${{ secrets.OS_USERNAME }} - X_OVH_TOKEN: ${{ secrets.PREPROD_OVH_TOKEN }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} JOB_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} KUBECONFIG: ${{ secrets.PREPROD_K8_CONFIG }} + X_OVH_TOKEN: ${{ secrets.PREPROD_OVH_TOKEN }} test: runs-on: ubuntu-latest diff --git a/.github/workflows/prod.yml b/.github/workflows/prod.yml index 13b497b1..51521ceb 100644 --- a/.github/workflows/prod.yml +++ b/.github/workflows/prod.yml @@ -69,12 +69,11 @@ jobs: domain: basegun.fr secrets: API_OVH_TOKEN: ${{ secrets.API_OVH_TOKEN }} - OS_PASSWORD: ${{ secrets.OS_PASSWORD }} - OS_PROJECT_NAME: ${{ secrets.OS_PROJECT_NAME }} - OS_USERNAME: ${{ secrets.OS_USERNAME }} - X_OVH_TOKEN: ${{ secrets.PROD_OVH_TOKEN }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} JOB_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - KUBECONFIG: ${{ secrets.PROD_K8_CONFIG }} + KUBECONFIG: ${{ secrets.PREPROD_K8_CONFIG }} + X_OVH_TOKEN: ${{ secrets.PREPROD_OVH_TOKEN }} test: runs-on: ubuntu-latest diff --git a/.github/workflows/test-on-kube.yml b/.github/workflows/test-on-kube.yml index 0c5dd123..eaa350f2 100644 --- a/.github/workflows/test-on-kube.yml +++ b/.github/workflows/test-on-kube.yml @@ -5,11 +5,9 @@ on: secrets: API_OVH_TOKEN: required: true - OS_PASSWORD: + AWS_ACCESS_KEY_ID: required: true - OS_PROJECT_NAME: - required: true - OS_USERNAME: + AWS_SECRET_ACCESS_KEY: required: true X_OVH_TOKEN: required: true @@ -64,9 +62,8 @@ jobs: --set frontend.image.repository="basegun-frontend" \ --set frontend.image.tag="$(make get-current-tag)-prod" \ --set backend.secret.create="true" \ - --set-string backend.secret.values.OS_USERNAME="${{ secrets.OS_USERNAME }}" \ - --set-string backend.secret.values.OS_PASSWORD="${{ secrets.OS_PASSWORD }}" \ - --set-string backend.secret.values.OS_PROJECT_NAME="${{ secrets.OS_PROJECT_NAME }}" \ + --set-string backend.secret.values.AWS_ACCESS_KEY_ID="${{ secrets.AWS_ACCESS_KEY_ID }}" \ + --set-string backend.secret.values.AWS_SECRET_ACCESS_KEY="${{ secrets.AWS_SECRET_ACCESS_KEY }}" \ --set-string backend.secret.values.X_OVH_TOKEN="${{ secrets.X_OVH_TOKEN }}" \ --set-string backend.secret.values.API_OVH_TOKEN="${{ secrets.API_OVH_TOKEN }}" for i in $(kubectl get deploy -o name); do kubectl rollout status $i -w --timeout=130s; done diff --git a/backend/requirements.txt b/backend/requirements.txt index ed67a7bd..f012d335 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -6,7 +6,6 @@ gelf-formatter==0.2.1 pyyaml>=5.4.1 user-agents==2.2.0 ua-parser==0.10.0 -python-openstackclient==5.8.0 -python-swiftclient==4.0.0 +boto3==1.28.39 torch==1.13.0 torchvision==0.14.0 \ No newline at end of file diff --git a/backend/src/main.py b/backend/src/main.py index 8648906b..6c5ddfd3 100644 --- a/backend/src/main.py +++ b/backend/src/main.py @@ -1,47 +1,43 @@ import os import logging -from logging.handlers import TimedRotatingFileHandler -from datetime import datetime import time import json +from logging.handlers import TimedRotatingFileHandler +from datetime import datetime from uuid import uuid4 from typing import Union + +import boto3 +from botocore.client import ClientError from fastapi import BackgroundTasks, Cookie, FastAPI, File, Form, HTTPException, Request, Response, UploadFile from fastapi.responses import PlainTextResponse from fastapi.middleware.cors import CORSMiddleware from gelfformatter import GelfFormatter from user_agents import parse -import swiftclient from src.model import load_model_inference, predict_image CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) -WORKSPACE = os.environ.get("WORKSPACE") -CLOUD_PATH = f'https://storage.gra.cloud.ovh.net/v1/' + \ - 'AUTH_df731a99a3264215b973b3dee70a57af/basegun-public/' + \ - f'uploaded-images/{os.environ["WORKSPACE"]}/' - -def init_variable(var_name: str, path: str) -> str: +def init_variable(key: str, value: str) -> str: """Inits global variable for folder path Args: - var_name (str): variable name in environ - path (str): folder path + key (str): variable key in environ + value (str): value to give if variable does not exist yet Returns: str: final variable value """ - if var_name in os.environ: - VAR = os.environ[var_name] + if key in os.environ: + VAR = os.environ[key] else: - VAR = os.path.abspath(os.path.join( - CURRENT_DIR, - path)) - print("WARNING: The variable "+var_name+" is not set. Using", VAR) - os.makedirs(VAR, exist_ok = True) + VAR = value + print("WARNING: The variable "+key+" is not set. Using", VAR) + if os.path.isabs(VAR): + os.makedirs(VAR, exist_ok = True) return VAR @@ -116,48 +112,25 @@ def get_base_logs(user_agent, user_id: str) -> dict: return extras_logging -def upload_image_ovh(content: bytes, img_name: str): - """Uploads an image to basegun ovh swift container + +def upload_image(content: bytes, image_key: str): + """Uploads an image to s3 bucket path uploaded-images/WORKSPACE/img_name where WORKSPACE is dev, preprod or prod Args: content (bytes): file content - img_name (str): name we want to give on ovh + image_key (str): path we want to have """ - num_tries = 0 - LIMIT_TRIES = 5 - image_path = os.path.join(CLOUD_PATH, img_name) start = time.time() - - if not conn: - logger.exception("Variables not set for using OVH swift.", extra={ - "bg_error_type": "NameError" - }) - return - - while num_tries <= LIMIT_TRIES: - num_tries += 1 - extras_logging = { - "bg_date": datetime.now().isoformat(), - "bg_upload_time": time.time()-start, - "bg_image_url": image_path - } - try: - conn.put_object("basegun-public", - f'uploaded-images/{os.environ["WORKSPACE"]}/{img_name}', - contents=content) - # if success, get out of the loop - logger.info("Upload to OVH successful", extra=extras_logging) - break - except Exception as e: - if (num_tries <= LIMIT_TRIES and e.__class__.__name__ == "ClientException"): - # we try uploading another time - time.sleep(30) - continue - else: - extras_logging["bg_error_type"] = e.__class__.__name__ - logger.exception(e, extra=extras_logging) + object = s3.Object(S3_BUCKET_NAME, image_key) + object.put(Body=content) + extras_logging = { + "bg_date": datetime.now().isoformat(), + "bg_upload_time": time.time()-start, + "bg_image_url": image_key + } + logger.info("Upload successful", extra=extras_logging) #################### @@ -184,7 +157,8 @@ def upload_image_ovh(content: bytes, img_name: str): ) # Logs -PATH_LOGS = init_variable("PATH_LOGS", "../logs") +PATH_LOGS = init_variable("PATH_LOGS", + os.path.abspath(os.path.join(CURRENT_DIR,"/tmp/logs"))) logger = setup_logs(PATH_LOGS) # Load model @@ -197,6 +171,18 @@ def upload_image_ovh(content: bytes, img_name: str): if not model: raise RuntimeError("Model not found") +# Object storage +S3_URL_ENDPOINT = init_variable("S3_URL_ENDPOINT", "https://s3.gra.io.cloud.ovh.net/") +S3_BUCKET_NAME = "basegun-s3" +S3_PREFIX = os.path.join("uploaded-images/", os.environ['WORKSPACE']) +s3 = boto3.resource("s3", endpoint_url=S3_URL_ENDPOINT) +""" TODO : check if connection successful +try: + s3.meta.client.head_bucket(Bucket=S3_BUCKET_NAME) +except ClientError: + logger.exception("Cannot find s3 bucket ! Are you sure your credentials are correct ?") +""" + # Versions if "versions.json" in os.listdir(os.path.dirname(CURRENT_DIR)): with open("versions.json", "r") as f: @@ -209,27 +195,6 @@ def upload_image_ovh(content: bytes, img_name: str): MODEL_VERSION = "-1" -conn = None -if all(var in os.environ for var in ["OS_USERNAME", "OS_PASSWORD", "OS_PROJECT_NAME"]) : - try: - # Connection to OVH cloud - conn = swiftclient.Connection( - authurl="https://auth.cloud.ovh.net/v3", - user=os.environ["OS_USERNAME"], - key=os.environ["OS_PASSWORD"], - os_options={ - "project_name": os.environ["OS_PROJECT_NAME"], - "region_name": "GRA" - }, - auth_version='3' - ) - conn.get_account() - except Exception as e: - logger.exception(e) -else: - logger.warn('Variables necessary for OVH connection not set !') - - #################### # ROUTES # #################### @@ -272,13 +237,13 @@ async def imageupload( extras_logging["bg_upload_time"] = round(time.time() - date, 2) try: - img_name = str(uuid4()) + os.path.splitext(image.filename)[1] + img_key = os.path.join(S3_PREFIX, + str(uuid4()) + os.path.splitext(image.filename)[1].lower()) img_bytes = image.file.read() # upload image to OVH Cloud - background_tasks.add_task(upload_image_ovh, img_bytes, img_name) - image_path = os.path.join(CLOUD_PATH, img_name) - extras_logging["bg_image_url"] = image_path + background_tasks.add_task(upload_image, img_bytes, img_key) + extras_logging["bg_image_url"] = img_key # set user id if not user_id: @@ -302,7 +267,7 @@ async def imageupload( logger.info("Identification request", extra=extras_logging) return { - "path": image_path, + "path": img_key, "label": label, "confidence": confidence, "confidence_level": extras_logging["bg_confidence_level"] diff --git a/backend/tests/test_api.py b/backend/tests/test_api.py index d84ec6b1..a27ff7ff 100644 --- a/backend/tests/test_api.py +++ b/backend/tests/test_api.py @@ -1,26 +1,52 @@ import unittest import os import time +import boto3 +import json + from io import BytesIO import requests from PIL import Image, ImageChops +from src.main import app, S3_BUCKET_NAME, S3_URL_ENDPOINT +from fastapi import FastAPI +from fastapi.testclient import TestClient -class TestModel(unittest.TestCase): - def __init__(self, *args, **kwargs): - super(TestModel, self).__init__(*args, **kwargs) - self.url = "http://localhost:5000" + +client = TestClient(app) + +BUCKET_POLICY = { + 'Version': '2012-10-17', + 'Statement': [{ + 'Sid': 'AddPerm', + 'Effect': 'Allow', + 'Principal': '*', + 'Action': ['s3:GetObject'], + 'Resource': f"arn:aws:s3:::{S3_BUCKET_NAME}/*" + }] +} + + +def create_bucket(): + s3 = boto3.resource("s3", endpoint_url=S3_URL_ENDPOINT) + bucket = s3.Bucket(S3_BUCKET_NAME) + if bucket.creation_date is None: + bucket.create() + bucket.Policy().put(Policy=json.dumps(BUCKET_POLICY)) + + +class TestModel(unittest.TestCase): def test_home(self): """Checks that the route / is alive""" - r = requests.get(self.url) - self.assertEqual(r.text, "Basegun backend") + response = client.get("/") + self.assertEqual(response.text, "Basegun backend") def test_version(self): """Checks that the route /version sends a version""" - r = requests.get(self.url + '/version') - self.assertNotEqual(r.text, "-1") - self.assertEqual(len(r.text.split('.')), 2) # checks version has format X.Y + response = client.get("/version") + self.assertNotEqual(response.text, "-1") + self.assertEqual(len(response.text.split('.')), 2) # checks version has format X.Y def check_log_base(self, log): self.assertTrue( @@ -31,16 +57,17 @@ def check_log_base(self, log): self.assertEqual(log["level"], 6) self.assertTrue(log["_bg_model"].startswith("EffB")) - def test_upload_and_logs(self): + def test_upload(self): """Checks that the file upload works properly""" + if os.environ["WORKSPACE"]=="dev": + create_bucket() path = os.path.join( os.path.dirname(os.path.abspath(__file__)), "revolver.jpg") geoloc = "12.666,7.666" - self.assertTrue("OS_USERNAME" in os.environ) with open(path, 'rb') as f: - r = requests.post(self.url + "/upload", + r = client.post("/upload", files={"image": f}, data={"date": time.time(), "geolocation": geoloc}) self.assertEqual(r.status_code, 200) @@ -50,21 +77,12 @@ def test_upload_and_logs(self): self.assertEqual(res["label"], "revolver") self.assertAlmostEqual(res["confidence"], 98.43, places=1) self.assertTrue(res["confidence_level"], "high") - self.assertTrue("ovh" in res["path"]) - # checks that written file is exactly the same as input file - time.sleep(10) - response = requests.get(res["path"]) - with Image.open(path) as image_one: - with Image.open(BytesIO(response.content)) as image_two: - self.assertEqual(image_one.size, image_two.size) - diff = ImageChops.difference(image_one, image_two) - self.assertFalse(diff.getbbox()) # checks that the result is written in logs - r = requests.get(self.url + "/logs") + r = client.get("/logs") self.assertEqual(r.status_code, 200) - # checks the latest log "Upload to OVH" + # checks the latest log with validates upload to object storage self.assertEqual(r.json()[0]["_bg_image_url"], r.json()[1]["_bg_image_url"]) - self.assertEqual(r.json()[0]["short_message"], "Upload to OVH successful") + self.assertEqual(r.json()[0]["short_message"], "Upload successful") # checks the previous log "Identification request" log = r.json()[1] self.check_log_base(log) @@ -81,11 +99,11 @@ def test_feedback_and_logs(self): label = "revolver" confidence_level = "high" image_url = "https://storage.gra.cloud.ovh.net/v1/test" - r = requests.post(self.url + "/identification-feedback", + r = client.post("/identification-feedback", json={"image_url": image_url, "feedback": True, "confidence": confidence, "label": label, "confidence_level": confidence_level}) self.assertEqual(r.status_code, 200) - r = requests.get(self.url + "/logs") + r = client.get("/logs") self.assertEqual(r.status_code, 200) log = r.json()[0] self.check_log_base(log) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 0a42b30c..2237f26b 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -12,10 +12,9 @@ services: target: ${BUILD_TARGET:-dev} container_name: basegun-backend environment: - - PATH_LOGS=/tmp/logs - - OS_USERNAME - - OS_PASSWORD - - OS_PROJECT_NAME + - S3_URL_ENDPOINT=${S3_URL_ENDPOINT:-http://minio:9000} + - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-minioadmin} + - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-minioadmin} - http_proxy - https_proxy - UVICORN_LOG_LEVEL=${UVICORN_LOG_LEVEL} @@ -46,7 +45,13 @@ services: ports: - 8080:80 # if BUILD_TARGET = prod - 3000:5173 - # - 4173:4173 volumes: - $PWD/frontend/src:/app/src - /app/node_modules + + minio: + image: minio/minio + command: server /data --console-address ":9001" + ports: + - 9000:9000 + - 9001:9001 diff --git a/docker-compose-prod.yml b/docker-compose-prod.yml index dad8464e..afd2f375 100644 --- a/docker-compose-prod.yml +++ b/docker-compose-prod.yml @@ -11,9 +11,8 @@ services: container_name: basegun-backend environment: - PATH_LOGS=/tmp/logs - - OS_USERNAME - - OS_PASSWORD - - OS_PROJECT_NAME + - AWS_ACCESS_KEY_ID + - AWS_SECRET_ACCESS_KEY - WORKSPACE=${WORKSPACE:-prod} image: basegun-backend:${TAG}-prod ports: diff --git a/infra/kube/helm/templates/sops.enc.yaml b/infra/kube/helm/templates/sops.enc.yaml index 29b2ea56..7c8c04f1 100644 --- a/infra/kube/helm/templates/sops.enc.yaml +++ b/infra/kube/helm/templates/sops.enc.yaml @@ -8,17 +8,15 @@ spec: - name: ENC[AES256_GCM,data:C7eCSGXEdWtuVa+WYplXamERpCuF2A==,iv:oDDv384GFK0ynFybO0GJXKPjXPUe/++jxUh6oPgSROI=,tag:7auyaf2Pk1gUNCKI+lQjxA==,type:str] stringData: API_OVH_TOKEN: ENC[AES256_GCM,data:9fDrMsKCWW4qU5EFsaWhQdA6TIWNueA5sSknmUydichzF1zczSj3nrPtfF7O+dwuWqXUxg==,iv:E8vw8EdDzAigbonjNa57RfTfVpGG9K/Xil+yIAAxPSE=,tag:4qqNEc5RJb1w/WL2dIvt+w==,type:str] - OS_PASSWORD: ENC[AES256_GCM,data:vGHEXzNVjviNsyOam48tvdLbvM+XGBwo204jiH6AruY=,iv:4QXRGhyRjQYyovR68tJzbhzzBiOPHsyNBvruCtk8pl0=,tag:snlHGsj+i5nEIV4aeFz2nQ==,type:str] - OS_PROJECT_NAME: ENC[AES256_GCM,data:Oe4oIqDnNMxjBA1xAHDuSQ==,iv:3pfX8fZ/3hy5LAP0Z0C+joleY33WnXAHUKa377rObto=,tag:CGqewlAyweMW5BSB80qVrg==,type:str] - OS_USERNAME: ENC[AES256_GCM,data:T1BTSS3/nRKMTS7Nk5ZCYi4=,iv:BtWpyd/zxiQPogucbpSzrR6Nn6oIHdbCCpkNhXYzxo8=,tag:mnd+6VxolKMO0vUR/acy4Q==,type:str] X_OVH_TOKEN: ENC[AES256_GCM,data:Qz1uggOKElNvNBS9qxDfybUMBYEIOfuppySaoXEBx00jWv0u,iv:+cklaR+WWjjJLnD1gmZ38atrqCPNrje0BWofWJstIWA=,tag:czA3G3fU4VC7njajF7xaRw==,type:str] + AWS_ACCESS_KEY_ID: ENC[AES256_GCM,data:v/11MAFHVjkdMst9ehcniM/8rPelArIoA7gbCuoj/8w=,iv:XNJV0ULPpOTrqtbSKx0nsB6wqdCSrzuXEjJfqa/+mbc=,tag:gakaj3onimHoywKl36BjaQ==,type:str] + AWS_SECRET_ACCESS_KEY: ENC[AES256_GCM,data:HFoCNvfwGovBX5dqPTo2Oq4XvL6rnZh7CuX6rOsSaOA=,iv:Oha7tkWiS3catm7xrzWl0qJUOfS6N8EHyW0VDSB6RLA=,tag:pIDqH6Y2E1qIyjnlyYypOw==,type:str] - name: ENC[AES256_GCM,data:N8b/GxqS/MdpK/ZH1cFzYyppfw==,iv:HIKkI1y6FIVP323NhZMjrf1Ulp7N29jQ0zlMIv3Y7gg=,tag:ZVG+z7ncvoNsi47lofXjZw==,type:str] stringData: API_OVH_TOKEN: ENC[AES256_GCM,data:T9TY8BUSKH2fJfhcSX71mD+kpB7Ac9WVNyYOIV1FQpumc5XNsVFad015f3MizRn+rJiHkQ==,iv:bZ74ywut3HGCMbb+9US8n9VWQt5YJmPY1hN1+PefoJY=,tag:0cRa6vMOyWOx5Dd1sqigtg==,type:str] - OS_PASSWORD: ENC[AES256_GCM,data:uT2J6nJyIZEpXwN9L4lvpoMDv/hZXkIbfyZQK5qVRaM=,iv:ZwgDZOaS7Pt4+/1XBZ4sOshuyuSMIkvSPeadZMk2OSQ=,tag:n3djDDeafNArb+p+nF1pGg==,type:str] - OS_PROJECT_NAME: ENC[AES256_GCM,data:TXud2R//KeDgYY1NUH8NnQ==,iv:MydfYwEV58wNKpSn9Mj7tP40RDdOhini4zbByNdvf00=,tag:wI0c8ZCjTQiqmvDRj3p9/A==,type:str] - OS_USERNAME: ENC[AES256_GCM,data:Qule8RjaVy5+zfAtdhxYEQM=,iv:8qPLyyjn1Vr+TgM5Vp9lXLsI8MGExXXecScWaRXeSE0=,tag:j2dcIrxE8fecfr31Rtq3SQ==,type:str] X_OVH_TOKEN: ENC[AES256_GCM,data:5Zsze+3JSqxle08ePuvHyHDfTelvnrQ2/INbbbwcvOHvPu/9,iv:xlcyVOkwGl0QGAFlWUT+/2LR4lGLcAGDzswkuq6cDUU=,tag:UviczjiBLQ61jEbpbE9YXA==,type:str] + AWS_ACCESS_KEY_ID: ENC[AES256_GCM,data:v/11MAFHVjkdMst9ehcniM/8rPelArIoA7gbCuoj/8w=,iv:XNJV0ULPpOTrqtbSKx0nsB6wqdCSrzuXEjJfqa/+mbc=,tag:gakaj3onimHoywKl36BjaQ==,type:str] + AWS_SECRET_ACCESS_KEY: ENC[AES256_GCM,data:HFoCNvfwGovBX5dqPTo2Oq4XvL6rnZh7CuX6rOsSaOA=,iv:Oha7tkWiS3catm7xrzWl0qJUOfS6N8EHyW0VDSB6RLA=,tag:pIDqH6Y2E1qIyjnlyYypOw==,type:str] sops: kms: [] gcp_kms: []