Skip to content
Snippets Groups Projects
Unverified Commit b3187620 authored by Martin Weise's avatar Martin Weise
Browse files

Implemented parts of the sidecar

parent 2fcd8230
No related branches found
No related tags found
4 merge requests!231CI: Remove build for log-service,!228Better error message handling in the frontend,!223Release of version 1.4.0,!212Resolve "Modify storage solutions in regards to cloud readiness"
FROM python:3.10-alpine
RUN apk add bash curl jq && adduser -D alpine
RUN apk add bash curl jq
WORKDIR /home/alpine
WORKDIR /app
COPY Pipfile Pipfile.lock ./
......@@ -10,13 +10,17 @@ RUN pip install pipenv && \
pipenv install gunicorn && \
pipenv install --system --deploy
USER 1000
COPY ./clients ./clients
COPY ./ds-yml ./ds-yml
COPY ./app.py ./app.py
ENV UPLOAD_ENDPOINT="http://upload-service:1080/api/upload"
ENV S3_STORAGE_ENDPOINT="http://storage-service:9000"
ENV S3_ACCESS_KEY_ID="minioadmin"
ENV S3_SECRET_ACCESS_KEY="minioadmin"
RUN chown -R alpine:alpine ./
USER alpine
RUN ls -la ./clients
EXPOSE 5000
......
......
......@@ -13,6 +13,7 @@ prometheus-flask-exporter = "*"
python-dotenv = "~=1.0"
sqlalchemy-utils = "*"
gunicorn = "*"
boto3 = "*"
[dev-packages]
......
......
This diff is collapsed.
import json
import os
import logging
from urllib.error import URLError, ContentTooShortError, HTTPError
from flasgger import LazyJSONEncoder, Swagger
from flask import Flask, request, Response
from flasgger.utils import swag_from
import urllib.request
from clients.minio_client import MinioClient
from prometheus_flask_exporter import PrometheusMetrics
logging.basicConfig(level=logging.DEBUG)
......@@ -99,33 +99,26 @@ app.json_encoder = LazyJSONEncoder
@app.route("/health", methods=["GET"], endpoint="actuator_health")
@swag_from("ds-yml/health.yml")
def health():
return Response({"status": "UP"}, mimetype="application/json"), 200
return Response(json.dumps({"status": "UP"}), mimetype="application/json"), 200
@app.route("/sidecar/import", methods=["POST"], endpoint="sidecar_import")
@app.route("/sidecar/import/<string:filename>", methods=["POST"], endpoint="sidecar_import")
@swag_from("ds-yml/import.yml")
def import_csv():
logging.debug('endpoint import csv, body=%s', request)
input_json = request.get_json()
filepath = str(input_json['filepath'])
api = os.getenv("UPLOAD_ENDPOINT", "http://localhost:1080/api/upload")
try:
urllib.request.urlretrieve(api + "/files/" + filepath, "/tmp/" + filepath)
except URLError as e:
logging.error('Failed to import .csv: %s', e)
return Response(), 503
return Response(), 202
def import_csv(filename):
logging.debug('endpoint import csv, filename=%s, body=%s', filename, request)
minio_client = MinioClient()
response = minio_client.download_file(filename)
if response is False:
return Response(), 400
return Response(json.dumps(response)), 202
@app.route("/sidecar/export/<string:filename>", methods=["PUT"], endpoint="sidecar_export")
@app.route("/sidecar/export/<string:filename>", methods=["POST"], endpoint="sidecar_export")
@swag_from("ds-yml/export.yml")
def import_csv(filename):
logging.debug('endpoint export csv, filename=%s, body=%s', filename, request)
api = os.getenv("UPLOAD_ENDPOINT", "http://localhost:1080/api/upload")
try:
# upload
urllib.request.urlretrieve(api + "/files/" + filepath, "/tmp/" + filepath)
except URLError as e:
logging.error('Failed to import .csv: %s', e)
return Response(), 503
minio_client = MinioClient()
response = minio_client.upload_file(filename)
if response is False:
return Response(), 400
return Response(), 202
import os
import boto3
import logging
import sys
from botocore.exceptions import ClientError
class MinioClient:
def __init__(self):
endpoint_url = os.getenv('S3_STORAGE_ENDPOINT', 'http://localhost:9000')
aws_access_key_id = os.getenv('S3_ACCESS_KEY_ID', 'minioadmin')
aws_secret_access_key = os.getenv('S3_SECRET_ACCESS_KEY', 'minioadmin')
logging.info("retrieve file from S3, endpoint_url=%s, aws_access_key_id=%s, aws_secret_access_key=(hidden)",
endpoint_url, aws_access_key_id)
self.client = boto3.client(service_name='s3', endpoint_url=endpoint_url, aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key)
self.bucket_exists_or_exit("dbrepo-upload")
self.bucket_exists_or_exit("dbrepo-download")
def upload_file(self, filename) -> bool:
"""
Uploads a file to the blob storage.
Follows the official API https://boto3.amazonaws.com/v1/documentation/api/latest/guide/s3-uploading-files.html.
:param filename: The filename.
:return: True if the file was uploaded.
"""
filepath = os.path.join("/tmp/", filename)
try:
self.client.upload_file(filepath, "dbrepo-download", filename)
logging.info("Uploaded .csv %s with key %s", filepath, filename)
return True
except ClientError as e:
logging.error(e)
return False
def download_file(self, filename) -> bool:
"""
Downloads a file from the blob storage.
Follows the official API https://boto3.amazonaws.com/v1/documentation/api/latest/guide/s3-example-download-file.html
:param filename: The filename.
:return: True if the file was downloaded and saved.
"""
self.file_exists("dbrepo-upload", filename)
filepath = os.path.join("/tmp/", filename)
try:
self.client.download_file("dbrepo-upload", filename, filepath)
logging.info("Downloaded .csv with key %s into %s", filename, filepath)
return True
except ClientError as e:
logging.error(e)
return False
def file_exists(self, bucket, filename):
try:
self.client.head_object(Bucket=bucket, Key=filename)
except ClientError as e:
if e.response["Error"]["Code"] == "404":
logging.error("Failed to find key %s in bucket %s", filename, bucket)
else:
logging.error("Unexpected error when finding key %s in bucket %s: %s", filename, bucket,
e.response["Error"]["Code"])
raise e
def bucket_exists_or_exit(self, bucket):
try:
self.client.head_bucket(Bucket=bucket)
except ClientError as e:
if e.response["Error"]["Code"] == "404":
logging.error("Failed to find bucket %s", bucket)
else:
logging.error("Unexpected error when finding bucket %s: %s", bucket,
e.response["Error"]["Code"])
sys.exit(1)
summary: Exports a .csv to the Storage Service
description: |
Exports a specific .csv file to the Storage Service via S3
consumes:
- application/json
produces:
- application/json
parameters:
- in: path
name: filename
description: Name of the object file to export to the Storage Service
required: true
responses:
202:
description: Exported the .csv
content: { }
400:
description: The Storage Service could not be contacted or .csv was not found.
tags:
- sidecar
summary: Imports a .csv from the Upload Service
summary: Imports a .csv from the Storage Service
description: |
Imports a specific .csv file from the Upload Service via HTTP
Imports a specific .csv file from the Storage Service via S3
consumes:
- application/json
produces:
- application/json
parameters:
- in: "body"
name: "body"
description: "Payload to import the .csv"
- in: path
name: filename
description: Name of the object file to import from the Storage Service
required: true
schema:
$ref: "#/components/schemas/Import"
responses:
202:
description: Imported the .csv
content: { }
503:
description: The Upload Service could not be contacted or .csv was not found.
400:
description: The Storage Service could not be contacted or .csv was not found.
tags:
- sidecar
components:
schemas:
Import:
type: "object"
properties:
filepath:
type: "string"
example: "sample.csv"
\ No newline at end of file
......@@ -26,10 +26,6 @@ upstream ui {
server ui:3000;
}
upstream upload {
server upload-service:1080;
}
upstream search-db-dashboard {
server search-db-dashboard:5601;
}
......@@ -95,15 +91,6 @@ server {
proxy_read_timeout 90;
}
location /api/upload {
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_pass http://upload;
proxy_read_timeout 90;
}
location /broker {
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
......
......
......@@ -264,7 +264,9 @@ services:
- "3600:5000"
environment:
FLASK_DEBUG: ${SEARCH_DEBUG_MODE:-true}
UPLOAD_ENDPOINT: "${UPLOAD_ENDPOINT:-http://upload-service:1080/api/upload/files}"
S3_STORAGE_ENDPOINT: "${STORAGE_ENDPOINT:-http://storage-service:9000}"
S3_ACCESS_KEY_ID: "${STORAGE_USERNAME:-minioadmin}"
S3_SECRET_ACCESS_KEY: ${STORAGE_PASSWORD:-minioadmin}
volumes:
- "${SHARED_FILESYSTEM:-/tmp}:/tmp"
healthcheck:
......@@ -377,9 +379,6 @@ services:
MINIO_BROWSER_REDIRECT_URL: "${STORAGE_BASE_URL:-http://localhost/admin/storage/}"
ports:
- 9000:9000
depends_on:
dbrepo-authentication-service:
condition: service_healthy
healthcheck:
test: [ "CMD", "mc", "ready", "local" ]
interval: 5s
......
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment