Refactor Docker setup and migrate to PostgreSQL

Streamlined Dockerfiles with clearer ENV variables and build args. Switched backend database from MySQL to PostgreSQL, updated configurations accordingly, and added robust Docker Compose services for better orchestration, including health checks and persistent storage.
This commit is contained in:
GotthardG
2025-04-09 15:09:22 +02:00
parent 248085b3c4
commit bb6cca4f23
11 changed files with 192 additions and 95 deletions

View File

@@ -15,6 +15,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
gpg && \ gpg && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
# May need to install postgreSQL and run the server within the docker
# Download and install the msodbcsql18 driver for arm64-compatible base image # Download and install the msodbcsql18 driver for arm64-compatible base image
RUN apt-get update && apt-get install -y --no-install-recommends unixodbc-dev curl apt-transport-https gnupg && \ RUN apt-get update && apt-get install -y --no-install-recommends unixodbc-dev curl apt-transport-https gnupg && \
curl -sSL https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > /etc/apt/trusted.gpg.d/microsoft.asc.gpg && \ curl -sSL https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > /etc/apt/trusted.gpg.d/microsoft.asc.gpg && \

View File

@@ -26,7 +26,9 @@ else: # Default is dev
db_host = os.getenv("DB_HOST", "localhost") db_host = os.getenv("DB_HOST", "localhost")
db_name = os.getenv("DB_NAME", "aare_dev_db") db_name = os.getenv("DB_NAME", "aare_dev_db")
SQLALCHEMY_DATABASE_URL = f"mysql://{db_username}:{db_password}@{db_host}/{db_name}" SQLALCHEMY_DATABASE_URL = (
f"postgresql://{db_username}:{db_password}@{db_host}/{db_name}"
)
# Create engine and session # Create engine and session
engine = create_engine(SQLALCHEMY_DATABASE_URL) engine = create_engine(SQLALCHEMY_DATABASE_URL)

View File

@@ -1,10 +1,10 @@
{ {
"ssl_cert_path": "ssl/cert.pem", "ssl_cert_path": "/app/backend/ssl/cert.pem",
"ssl_key_path": "ssl/key.pem", "ssl_key_path": "/app/backend/ssl/key.pem",
"OPENAPI_URL": "https://127.0.0.1:8000/openapi.json", "OPENAPI_URL": "https://backend:8000/openapi.json",
"SCHEMA_PATH": "./src/openapi.json", "SCHEMA_PATH": "/app/src/openapi.json",
"OUTPUT_DIRECTORY": "./openapi", "OUTPUT_DIRECTORY": "/app/openapi",
"PORT": 8000, "PORT": 8000,
"SSL_KEY_PATH": "../backend/ssl/key.pem", "SSL_KEY_PATH": "/app/backend/ssl/key.pem",
"SSL_CERT_PATH": "../backend/ssl/cert.pem" "SSL_CERT_PATH": "/app/backend/ssl/cert.pem"
} }

View File

@@ -63,7 +63,7 @@ def run_server():
print(f"[INFO] Running on port {port}") print(f"[INFO] Running on port {port}")
uvicorn.run( uvicorn.run(
app, app,
host="127.0.0.1" if environment in ["dev", "test"] else "0.0.0.0", host="0.0.0.0" if environment in ["dev", "test"] else "0.0.0.0",
port=port, port=port,
log_level="debug", log_level="debug",
ssl_keyfile=key_path, ssl_keyfile=key_path,

View File

@@ -28,7 +28,8 @@ dependencies = [
"uvicorn==0.23.1", "uvicorn==0.23.1",
"python-dateutil~=2.8.2", "python-dateutil~=2.8.2",
"tomli>=2.0.1", "tomli>=2.0.1",
"python-dotenv" "python-dotenv",
"psycopg2-binary"
] ]
[tool.pytest.ini_options] [tool.pytest.ini_options]
norecursedirs = ["backend/python-client"] norecursedirs = ["backend/python-client"]

View File

@@ -1,6 +1,7 @@
version: "3.9" version: "3.9"
services: services:
backend: backend:
container_name: backend
build: build:
context: . # Build the image from the parent directory context: . # Build the image from the parent directory
dockerfile: backend/Dockerfile dockerfile: backend/Dockerfile
@@ -11,22 +12,76 @@ services:
- ./backend:/app/backend # Map backend directory to /app/backend - ./backend:/app/backend # Map backend directory to /app/backend
- ./app:/app/app # Map app directory to /app/app - ./app:/app/app # Map app directory to /app/app
- ./config_dev.json:/app/backend/config_dev.json # Explicitly map config_dev.json - ./config_dev.json:/app/backend/config_dev.json # Explicitly map config_dev.json
- ./backend/ssl:/app/backend/ssl # clearly mount SSL files explicitly into Docker
working_dir: /app/backend # Set working directory to backend/ working_dir: /app/backend # Set working directory to backend/
command: python main.py # Command to run main.py command: python main.py # Command to run main.py
depends_on: # ⬅️ New addition: wait until postgres is started
- postgres
healthcheck:
test: [ "CMD-SHELL", "curl -k -f https://localhost:8000/openapi.json || exit 1" ]
interval: 5s
timeout: 5s
retries: 5
environment: # ⬅️ Provide DB info to your backend
ENVIRONMENT: dev
DB_USERNAME: dev_user
DB_PASSWORD: dev_password
DB_HOST: postgres
DB_NAME: aare_dev_db
postgres: # ⬅️ New service (our PostgreSQL database)
image: postgres:16
environment:
POSTGRES_USER: dev_user
POSTGRES_PASSWORD: dev_password
POSTGRES_DB: aare_dev_db
ports:
- "5432:5432"
volumes:
- pgdata:/var/lib/postgresql/data
frontend: frontend:
depends_on:
backend:
condition: service_healthy
build: build:
context: ./frontend context: ./frontend
dockerfile: Dockerfile dockerfile: Dockerfile
args:
- VITE_OPENAPI_BASE_DEV=${VITE_OPENAPI_BASE_DEV}
- VITE_SSL_KEY_PATH=${VITE_SSL_KEY_PATH}
- VITE_SSL_CERT_PATH=${VITE_SSL_CERT_PATH}
- NODE_ENV=${NODE_ENV}
ports: ports:
- "5173:5173" # Map container port 5173 to host - "5173:5173"
volumes:
- ./frontend:/app
- /app/node_modules # ⬅️ explicit exclusion! ensures Docker-provided modules retain explicitly.
- ./backend/ssl:/app/backend/ssl
- ./backend/config_dev.json:/app/backend/config_${ENVIRONMENT}.json # Dynamically maps config based on environment
command: sh -c "npm run dev & ENVIRONMENT=dev npm run watch:openapi"
logistics_frontend: logistics_frontend:
build: build:
context: ./logistics context: ./logistics
dockerfile: Dockerfile dockerfile: Dockerfile
args: # 👈 explicitly pass build args from .env
- VITE_OPENAPI_BASE_DEV=${VITE_OPENAPI_BASE_DEV}
- VITE_SSL_KEY_PATH=${VITE_SSL_KEY_PATH}
- VITE_SSL_CERT_PATH=${VITE_SSL_CERT_PATH}
- NODE_ENV=${NODE_ENV}
ports: ports:
- "3000:3000" - "3000:3000"
depends_on: depends_on:
- frontend # Ensure OpenAPI models are available - frontend # Ensure OpenAPI models are available
volumes:
- ./logistics/src:/app/src # explicitly for active dev (hot reload)
- ./backend/ssl:/app/backend/ssl # clearly mount SSL files explicitly into Docker
environment:
- VITE_OPENAPI_BASE_DEV=${VITE_OPENAPI_BASE_DEV}
volumes: # ⬅️ Persistent storage for PostgreSQL data
pgdata:

View File

@@ -3,14 +3,27 @@ FROM node:18
# Set working directory # Set working directory
WORKDIR /app WORKDIR /app
# Setup build args clearly
ARG VITE_OPENAPI_BASE_DEV
ARG VITE_SSL_KEY_PATH
ARG VITE_SSL_CERT_PATH
ARG NODE_ENV=development
ENV VITE_OPENAPI_BASE_DEV=${VITE_OPENAPI_BASE_DEV}
ENV VITE_SSL_KEY_PATH=${VITE_SSL_KEY_PATH}
ENV VITE_SSL_CERT_PATH=${VITE_SSL_CERT_PATH}
ENV NODE_ENV=${NODE_ENV}
# Copy dependency files and install dependencies # Copy dependency files and install dependencies
COPY package*.json ./ COPY package*.json ./
RUN npm install RUN npm install --prefer-offline --no-audit --progress=false
# Copy rest of the code and build the application # Copy rest of the code and build the application
COPY . . COPY . .
RUN npm run build
# Use a simple HTTP server to serve the built static files # Use a simple HTTP server to serve the built static files
EXPOSE 5173 EXPOSE 5173
CMD ["npx", "vite", "preview", "--port", "5173"] #CMD ["npx", "vite", "preview", "--port", "5173"]
CMD ["npm", "run", "dev"]

View File

@@ -15,13 +15,16 @@ if (!process.env.ENVIRONMENT) {
} }
// Determine environment and configuration file // Determine environment and configuration file
const nodeEnv = process.env.ENVIRONMENT || 'dev';
const configFile = `config_${nodeEnv}.json`; const nodeEnv = process.env.ENVIRONMENT || 'dev'; // Dynamically set from ENV variables
const configFile = `config_${nodeEnv}.json`; // Explicitly Dynamically resolve config file name
const configFilePath = path.resolve('./backend/', configFile); // Explicitly correct path resolution in Docker
// Load configuration file // Load configuration file
let config; let config;
try { try {
config = JSON.parse(fs.readFileSync(path.resolve('../', configFile), 'utf8')); config = JSON.parse(fs.readFileSync(configFilePath, 'utf8'));
} catch (error) { } catch (error) {
console.error(`❌ Failed to read configuration file '${configFile}': ${error.message}`); console.error(`❌ Failed to read configuration file '${configFile}': ${error.message}`);
process.exit(1); process.exit(1);
@@ -42,12 +45,24 @@ for (const field of requiredFields) {
} }
} }
// Resolve paths from the config const OPENAPI_BASE_URL = config.OPENAPI_URL; // or process.env.VITE_OPENAPI_BASE_DEV || config.OPENAPI_URL;
const OPENAPI_URL = config.OPENAPI_URL;
const SCHEMA_PATH = path.resolve(config.SCHEMA_PATH); const SCHEMA_PATH = config.SCHEMA_PATH; // 💡 already absolute
const OUTPUT_DIRECTORY = path.resolve(config.OUTPUT_DIRECTORY); const OUTPUT_DIRECTORY = config.OUTPUT_DIRECTORY; // 💡 already absolute
const SSL_KEY_PATH = path.resolve(config.SSL_KEY_PATH); const SSL_KEY_PATH = config.SSL_KEY_PATH;
const SSL_CERT_PATH = path.resolve(config.SSL_CERT_PATH); const SSL_CERT_PATH = config.SSL_CERT_PATH;
function assertDirExists(dirPath) {
if (!fs.existsSync(dirPath)) {
console.error(`❌ Directory does not exist inside Docker: ${dirPath}`);
process.exit(1);
}
}
// explicitly validate directories clearly explicitly in container paths:
assertDirExists(path.dirname(SCHEMA_PATH));
assertDirExists(OUTPUT_DIRECTORY);
assertDirExists(path.dirname(SSL_KEY_PATH));
// Log configuration // Log configuration
console.log(`[INFO] Environment: ${nodeEnv}`); console.log(`[INFO] Environment: ${nodeEnv}`);
@@ -96,7 +111,7 @@ async function fetchAndGenerate() {
}; };
const res = await new Promise((resolve, reject) => { const res = await new Promise((resolve, reject) => {
https.get(OPENAPI_URL, options, resolve).on('error', reject); https.get(OPENAPI_BASE_URL, options, resolve).on('error', reject);
}); });
let data = ''; let data = '';
@@ -104,21 +119,18 @@ async function fetchAndGenerate() {
data += chunk; data += chunk;
}); });
res.on('end', async () => { await new Promise((resolve, reject) => {
try { res.on('end', resolve);
res.on('error', reject);
});
// Save schema file // Save schema file
fs.writeFileSync(SCHEMA_PATH, data, 'utf8'); fs.writeFileSync(SCHEMA_PATH, data, 'utf8');
console.log(`✅ OpenAPI schema saved to ${SCHEMA_PATH}`); console.log(`✅ OpenAPI schema saved to ${SCHEMA_PATH}`);
console.log("🧼 Cleaning output directory..."); console.log("🧼 Cleaning output directory...");
await fs.promises.rm(OUTPUT_DIRECTORY, { recursive: true, force: true }); await fs.promises.rm(OUTPUT_DIRECTORY, { recursive: true, force: true });
console.log(`✅ Output directory cleaned: ${OUTPUT_DIRECTORY}`); console.log(`✅ Output directory cleaned: ${OUTPUT_DIRECTORY}`);
if (!fs.existsSync(OUTPUT_DIRECTORY)) {
console.log(`✅ Confirmed removal of ${OUTPUT_DIRECTORY}`);
} else {
console.error(`❌ Failed to remove output directory: ${OUTPUT_DIRECTORY}`);
}
// Generate services // Generate services
const command = `npx openapi -i ${SCHEMA_PATH} -o ${OUTPUT_DIRECTORY}`; const command = `npx openapi -i ${SCHEMA_PATH} -o ${OUTPUT_DIRECTORY}`;
@@ -132,13 +144,12 @@ async function fetchAndGenerate() {
} }
// Copy the generated OpenAPI models to ../logistics/openapi // Copy the generated OpenAPI models to ../logistics/openapi
const targetDirectory = path.resolve('../logistics/openapi'); // Adjust as per logistics directory const targetDirectory = path.resolve('../logistics/openapi');
console.log(`🔄 Copying generated OpenAPI models to ${targetDirectory}...`); console.log(`🔄 Copying generated OpenAPI models to ${targetDirectory}...`);
await fs.promises.rm(targetDirectory, { recursive: true, force: true });
await fs.promises.mkdir(targetDirectory, { recursive: true });
await fs.promises.rm(targetDirectory, { recursive: true, force: true }); // Clean target directory // Recursive copy helper
await fs.promises.mkdir(targetDirectory, { recursive: true }); // Ensure the directory exists
// Copy files from OUTPUT_DIRECTORY to the target directory recursively
const copyRecursive = async (src, dest) => { const copyRecursive = async (src, dest) => {
const entries = await fs.promises.readdir(src, { withFileTypes: true }); const entries = await fs.promises.readdir(src, { withFileTypes: true });
for (const entry of entries) { for (const entry of entries) {
@@ -154,19 +165,15 @@ async function fetchAndGenerate() {
} }
}; };
await copyRecursive(OUTPUT_DIRECTORY, targetDirectory); await copyRecursive(OUTPUT_DIRECTORY, targetDirectory);
console.log(`✅ OpenAPI models copied successfully to ${targetDirectory}`); console.log(`✅ OpenAPI models copied successfully to ${targetDirectory}`);
} catch (error) { } catch (error) {
console.error(`❌ Error during schema processing or generation: ${error.message}`); console.error(`❌ Error during schema processing or generation: ${error.message}`);
} } finally {
isGenerating = false;
});
} catch (error) {
console.error(`❌ Failed to fetch OpenAPI schema: ${error.message}`);
isGenerating = false; isGenerating = false;
} }
} }
// Backend directory based on the environment // Backend directory based on the environment
const backendDirectory = (() => { const backendDirectory = (() => {
switch (nodeEnv) { switch (nodeEnv) {
@@ -176,7 +183,7 @@ const backendDirectory = (() => {
return path.resolve('/home/jungfrau/aaredb/backend/app'); // Test path return path.resolve('/home/jungfrau/aaredb/backend/app'); // Test path
case 'dev': case 'dev':
default: default:
return path.resolve('/Users/gotthardg/PycharmProjects/aaredb/backend/app'); // Development path return path.resolve('/app/backend'); // Development path
} }
})(); })();

View File

@@ -1,5 +1,5 @@
{ {
"name": "heidi-frontend-v2", "name": "Aare Web",
"private": true, "private": true,
"version": "0.0.0", "version": "0.0.0",
"type": "module", "type": "module",

View File

@@ -2,6 +2,17 @@ FROM node:18-alpine
WORKDIR /app WORKDIR /app
# Setup build args clearly
ARG VITE_OPENAPI_BASE_DEV
ARG VITE_SSL_KEY_PATH
ARG VITE_SSL_CERT_PATH
ARG NODE_ENV=development
ENV VITE_OPENAPI_BASE_DEV=${VITE_OPENAPI_BASE_DEV}
ENV VITE_SSL_KEY_PATH=${VITE_SSL_KEY_PATH}
ENV VITE_SSL_CERT_PATH=${VITE_SSL_CERT_PATH}
ENV NODE_ENV=${NODE_ENV}
# Copy only the necessary package files first # Copy only the necessary package files first
COPY package*.json ./ COPY package*.json ./
RUN npm install RUN npm install
@@ -14,3 +25,8 @@ COPY . .
# Build the application # Build the application
RUN npm run build RUN npm run build
# Use a simple HTTP server to serve the built static files
EXPOSE 3000
CMD ["npm", "run", "start-dev"]

View File

@@ -16,3 +16,4 @@ mysqlclient~=2.1.1
python-multipart~=0.0.6 python-multipart~=0.0.6
uvicorn==0.23.1 uvicorn==0.23.1
python-dotenv python-dotenv
psycopg2-binary