Refactor Docker setup and migrate to PostgreSQL

Streamlined Dockerfiles with clearer ENV variables and build args. Switched backend database from MySQL to PostgreSQL, updated configurations accordingly, and added robust Docker Compose services for better orchestration, including health checks and persistent storage.
This commit is contained in:
GotthardG 2025-04-09 15:09:22 +02:00
parent 248085b3c4
commit bb6cca4f23
11 changed files with 192 additions and 95 deletions

View File

@ -15,6 +15,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
gpg && \
rm -rf /var/lib/apt/lists/*
# May need to install postgreSQL and run the server within the docker
# Download and install the msodbcsql18 driver for arm64-compatible base image
RUN apt-get update && apt-get install -y --no-install-recommends unixodbc-dev curl apt-transport-https gnupg && \
curl -sSL https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > /etc/apt/trusted.gpg.d/microsoft.asc.gpg && \

View File

@ -26,7 +26,9 @@ else: # Default is dev
db_host = os.getenv("DB_HOST", "localhost")
db_name = os.getenv("DB_NAME", "aare_dev_db")
SQLALCHEMY_DATABASE_URL = f"mysql://{db_username}:{db_password}@{db_host}/{db_name}"
SQLALCHEMY_DATABASE_URL = (
f"postgresql://{db_username}:{db_password}@{db_host}/{db_name}"
)
# Create engine and session
engine = create_engine(SQLALCHEMY_DATABASE_URL)

View File

@ -1,10 +1,10 @@
{
"ssl_cert_path": "ssl/cert.pem",
"ssl_key_path": "ssl/key.pem",
"OPENAPI_URL": "https://127.0.0.1:8000/openapi.json",
"SCHEMA_PATH": "./src/openapi.json",
"OUTPUT_DIRECTORY": "./openapi",
"ssl_cert_path": "/app/backend/ssl/cert.pem",
"ssl_key_path": "/app/backend/ssl/key.pem",
"OPENAPI_URL": "https://backend:8000/openapi.json",
"SCHEMA_PATH": "/app/src/openapi.json",
"OUTPUT_DIRECTORY": "/app/openapi",
"PORT": 8000,
"SSL_KEY_PATH": "../backend/ssl/key.pem",
"SSL_CERT_PATH": "../backend/ssl/cert.pem"
"SSL_KEY_PATH": "/app/backend/ssl/key.pem",
"SSL_CERT_PATH": "/app/backend/ssl/cert.pem"
}

View File

@ -63,7 +63,7 @@ def run_server():
print(f"[INFO] Running on port {port}")
uvicorn.run(
app,
host="127.0.0.1" if environment in ["dev", "test"] else "0.0.0.0",
host="0.0.0.0" if environment in ["dev", "test"] else "0.0.0.0",
port=port,
log_level="debug",
ssl_keyfile=key_path,

View File

@ -28,7 +28,8 @@ dependencies = [
"uvicorn==0.23.1",
"python-dateutil~=2.8.2",
"tomli>=2.0.1",
"python-dotenv"
"python-dotenv",
"psycopg2-binary"
]
[tool.pytest.ini_options]
norecursedirs = ["backend/python-client"]

View File

@ -1,8 +1,9 @@
version: "3.9"
services:
backend:
container_name: backend
build:
context: . # Build the image from the parent directory
context: . # Build the image from the parent directory
dockerfile: backend/Dockerfile
ports:
@ -11,22 +12,76 @@ services:
- ./backend:/app/backend # Map backend directory to /app/backend
- ./app:/app/app # Map app directory to /app/app
- ./config_dev.json:/app/backend/config_dev.json # Explicitly map config_dev.json
- ./backend/ssl:/app/backend/ssl # clearly mount SSL files explicitly into Docker
working_dir: /app/backend # Set working directory to backend/
command: python main.py # Command to run main.py
depends_on: # ⬅️ New addition: wait until postgres is started
- postgres
healthcheck:
test: [ "CMD-SHELL", "curl -k -f https://localhost:8000/openapi.json || exit 1" ]
interval: 5s
timeout: 5s
retries: 5
environment: # ⬅️ Provide DB info to your backend
ENVIRONMENT: dev
DB_USERNAME: dev_user
DB_PASSWORD: dev_password
DB_HOST: postgres
DB_NAME: aare_dev_db
postgres: # ⬅️ New service (our PostgreSQL database)
image: postgres:16
environment:
POSTGRES_USER: dev_user
POSTGRES_PASSWORD: dev_password
POSTGRES_DB: aare_dev_db
ports:
- "5432:5432"
volumes:
- pgdata:/var/lib/postgresql/data
frontend:
depends_on:
backend:
condition: service_healthy
build:
context: ./frontend
dockerfile: Dockerfile
args:
- VITE_OPENAPI_BASE_DEV=${VITE_OPENAPI_BASE_DEV}
- VITE_SSL_KEY_PATH=${VITE_SSL_KEY_PATH}
- VITE_SSL_CERT_PATH=${VITE_SSL_CERT_PATH}
- NODE_ENV=${NODE_ENV}
ports:
- "5173:5173" # Map container port 5173 to host
- "5173:5173"
volumes:
- ./frontend:/app
- /app/node_modules # ⬅️ explicit exclusion! ensures Docker-provided modules retain explicitly.
- ./backend/ssl:/app/backend/ssl
- ./backend/config_dev.json:/app/backend/config_${ENVIRONMENT}.json # Dynamically maps config based on environment
command: sh -c "npm run dev & ENVIRONMENT=dev npm run watch:openapi"
logistics_frontend:
build:
context: ./logistics
dockerfile: Dockerfile
args: # 👈 explicitly pass build args from .env
- VITE_OPENAPI_BASE_DEV=${VITE_OPENAPI_BASE_DEV}
- VITE_SSL_KEY_PATH=${VITE_SSL_KEY_PATH}
- VITE_SSL_CERT_PATH=${VITE_SSL_CERT_PATH}
- NODE_ENV=${NODE_ENV}
ports:
- "3000:3000"
depends_on:
- frontend # Ensure OpenAPI models are available
- frontend # Ensure OpenAPI models are available
volumes:
- ./logistics/src:/app/src # explicitly for active dev (hot reload)
- ./backend/ssl:/app/backend/ssl # clearly mount SSL files explicitly into Docker
environment:
- VITE_OPENAPI_BASE_DEV=${VITE_OPENAPI_BASE_DEV}
volumes: # ⬅️ Persistent storage for PostgreSQL data
pgdata:

View File

@ -1,16 +1,29 @@
FROM node:18
# Set working directory
WORKDIR /app
# Set working directory
WORKDIR /app
# Copy dependency files and install dependencies
COPY package*.json ./
RUN npm install
# Setup build args clearly
ARG VITE_OPENAPI_BASE_DEV
ARG VITE_SSL_KEY_PATH
ARG VITE_SSL_CERT_PATH
ARG NODE_ENV=development
# Copy rest of the code and build the application
COPY . .
RUN npm run build
ENV VITE_OPENAPI_BASE_DEV=${VITE_OPENAPI_BASE_DEV}
ENV VITE_SSL_KEY_PATH=${VITE_SSL_KEY_PATH}
ENV VITE_SSL_CERT_PATH=${VITE_SSL_CERT_PATH}
ENV NODE_ENV=${NODE_ENV}
# Use a simple HTTP server to serve the built static files
EXPOSE 5173
CMD ["npx", "vite", "preview", "--port", "5173"]
# Copy dependency files and install dependencies
COPY package*.json ./
RUN npm install --prefer-offline --no-audit --progress=false
# Copy rest of the code and build the application
COPY . .
# Use a simple HTTP server to serve the built static files
EXPOSE 5173
#CMD ["npx", "vite", "preview", "--port", "5173"]
CMD ["npm", "run", "dev"]

View File

@ -15,13 +15,16 @@ if (!process.env.ENVIRONMENT) {
}
// Determine environment and configuration file
const nodeEnv = process.env.ENVIRONMENT || 'dev';
const configFile = `config_${nodeEnv}.json`;
const nodeEnv = process.env.ENVIRONMENT || 'dev'; // Dynamically set from ENV variables
const configFile = `config_${nodeEnv}.json`; // Explicitly Dynamically resolve config file name
const configFilePath = path.resolve('./backend/', configFile); // Explicitly correct path resolution in Docker
// Load configuration file
let config;
try {
config = JSON.parse(fs.readFileSync(path.resolve('../', configFile), 'utf8'));
config = JSON.parse(fs.readFileSync(configFilePath, 'utf8'));
} catch (error) {
console.error(`❌ Failed to read configuration file '${configFile}': ${error.message}`);
process.exit(1);
@ -42,12 +45,24 @@ for (const field of requiredFields) {
}
}
// Resolve paths from the config
const OPENAPI_URL = config.OPENAPI_URL;
const SCHEMA_PATH = path.resolve(config.SCHEMA_PATH);
const OUTPUT_DIRECTORY = path.resolve(config.OUTPUT_DIRECTORY);
const SSL_KEY_PATH = path.resolve(config.SSL_KEY_PATH);
const SSL_CERT_PATH = path.resolve(config.SSL_CERT_PATH);
const OPENAPI_BASE_URL = config.OPENAPI_URL; // or process.env.VITE_OPENAPI_BASE_DEV || config.OPENAPI_URL;
const SCHEMA_PATH = config.SCHEMA_PATH; // 💡 already absolute
const OUTPUT_DIRECTORY = config.OUTPUT_DIRECTORY; // 💡 already absolute
const SSL_KEY_PATH = config.SSL_KEY_PATH;
const SSL_CERT_PATH = config.SSL_CERT_PATH;
function assertDirExists(dirPath) {
if (!fs.existsSync(dirPath)) {
console.error(`❌ Directory does not exist inside Docker: ${dirPath}`);
process.exit(1);
}
}
// explicitly validate directories clearly explicitly in container paths:
assertDirExists(path.dirname(SCHEMA_PATH));
assertDirExists(OUTPUT_DIRECTORY);
assertDirExists(path.dirname(SSL_KEY_PATH));
// Log configuration
console.log(`[INFO] Environment: ${nodeEnv}`);
@ -96,7 +111,7 @@ async function fetchAndGenerate() {
};
const res = await new Promise((resolve, reject) => {
https.get(OPENAPI_URL, options, resolve).on('error', reject);
https.get(OPENAPI_BASE_URL, options, resolve).on('error', reject);
});
let data = '';
@ -104,69 +119,61 @@ async function fetchAndGenerate() {
data += chunk;
});
res.on('end', async () => {
try {
// Save schema file
fs.writeFileSync(SCHEMA_PATH, data, 'utf8');
console.log(`✅ OpenAPI schema saved to ${SCHEMA_PATH}`);
console.log("🧼 Cleaning output directory...");
await fs.promises.rm(OUTPUT_DIRECTORY, { recursive: true, force: true });
console.log(`✅ Output directory cleaned: ${OUTPUT_DIRECTORY}`);
if (!fs.existsSync(OUTPUT_DIRECTORY)) {
console.log(`✅ Confirmed removal of ${OUTPUT_DIRECTORY}`);
} else {
console.error(`❌ Failed to remove output directory: ${OUTPUT_DIRECTORY}`);
}
// Generate services
const command = `npx openapi -i ${SCHEMA_PATH} -o ${OUTPUT_DIRECTORY}`;
console.log(`🔧 Executing command: ${command}`);
const { stdout, stderr } = await execPromisified(command);
if (stderr) {
console.error(`⚠️ stderr while generating services: ${stderr}`);
} else {
console.log(`✅ Service generation completed successfully:\n${stdout}`);
}
// Copy the generated OpenAPI models to ../logistics/openapi
const targetDirectory = path.resolve('../logistics/openapi'); // Adjust as per logistics directory
console.log(`🔄 Copying generated OpenAPI models to ${targetDirectory}...`);
await fs.promises.rm(targetDirectory, { recursive: true, force: true }); // Clean target directory
await fs.promises.mkdir(targetDirectory, { recursive: true }); // Ensure the directory exists
// Copy files from OUTPUT_DIRECTORY to the target directory recursively
const copyRecursive = async (src, dest) => {
const entries = await fs.promises.readdir(src, { withFileTypes: true });
for (const entry of entries) {
const srcPath = path.join(src, entry.name);
const destPath = path.join(dest, entry.name);
if (entry.isDirectory()) {
await fs.promises.mkdir(destPath, { recursive: true });
await copyRecursive(srcPath, destPath);
} else {
await fs.promises.copyFile(srcPath, destPath);
}
}
};
await copyRecursive(OUTPUT_DIRECTORY, targetDirectory);
console.log(`✅ OpenAPI models copied successfully to ${targetDirectory}`);
} catch (error) {
console.error(`❌ Error during schema processing or generation: ${error.message}`);
}
isGenerating = false;
await new Promise((resolve, reject) => {
res.on('end', resolve);
res.on('error', reject);
});
// Save schema file
fs.writeFileSync(SCHEMA_PATH, data, 'utf8');
console.log(`✅ OpenAPI schema saved to ${SCHEMA_PATH}`);
console.log("🧼 Cleaning output directory...");
await fs.promises.rm(OUTPUT_DIRECTORY, { recursive: true, force: true });
console.log(`✅ Output directory cleaned: ${OUTPUT_DIRECTORY}`);
// Generate services
const command = `npx openapi -i ${SCHEMA_PATH} -o ${OUTPUT_DIRECTORY}`;
console.log(`🔧 Executing command: ${command}`);
const { stdout, stderr } = await execPromisified(command);
if (stderr) {
console.error(`⚠️ stderr while generating services: ${stderr}`);
} else {
console.log(`✅ Service generation completed successfully:\n${stdout}`);
}
// Copy the generated OpenAPI models to ../logistics/openapi
const targetDirectory = path.resolve('../logistics/openapi');
console.log(`🔄 Copying generated OpenAPI models to ${targetDirectory}...`);
await fs.promises.rm(targetDirectory, { recursive: true, force: true });
await fs.promises.mkdir(targetDirectory, { recursive: true });
// Recursive copy helper
const copyRecursive = async (src, dest) => {
const entries = await fs.promises.readdir(src, { withFileTypes: true });
for (const entry of entries) {
const srcPath = path.join(src, entry.name);
const destPath = path.join(dest, entry.name);
if (entry.isDirectory()) {
await fs.promises.mkdir(destPath, { recursive: true });
await copyRecursive(srcPath, destPath);
} else {
await fs.promises.copyFile(srcPath, destPath);
}
}
};
await copyRecursive(OUTPUT_DIRECTORY, targetDirectory);
console.log(`✅ OpenAPI models copied successfully to ${targetDirectory}`);
} catch (error) {
console.error(`❌ Failed to fetch OpenAPI schema: ${error.message}`);
console.error(`❌ Error during schema processing or generation: ${error.message}`);
} finally {
isGenerating = false;
}
}
// Backend directory based on the environment
const backendDirectory = (() => {
switch (nodeEnv) {
@ -176,7 +183,7 @@ const backendDirectory = (() => {
return path.resolve('/home/jungfrau/aaredb/backend/app'); // Test path
case 'dev':
default:
return path.resolve('/Users/gotthardg/PycharmProjects/aaredb/backend/app'); // Development path
return path.resolve('/app/backend'); // Development path
}
})();

View File

@ -1,5 +1,5 @@
{
"name": "heidi-frontend-v2",
"name": "Aare Web",
"private": true,
"version": "0.0.0",
"type": "module",

View File

@ -2,6 +2,17 @@ FROM node:18-alpine
WORKDIR /app
# Setup build args clearly
ARG VITE_OPENAPI_BASE_DEV
ARG VITE_SSL_KEY_PATH
ARG VITE_SSL_CERT_PATH
ARG NODE_ENV=development
ENV VITE_OPENAPI_BASE_DEV=${VITE_OPENAPI_BASE_DEV}
ENV VITE_SSL_KEY_PATH=${VITE_SSL_KEY_PATH}
ENV VITE_SSL_CERT_PATH=${VITE_SSL_CERT_PATH}
ENV NODE_ENV=${NODE_ENV}
# Copy only the necessary package files first
COPY package*.json ./
RUN npm install
@ -14,3 +25,8 @@ COPY . .
# Build the application
RUN npm run build
# Use a simple HTTP server to serve the built static files
EXPOSE 3000
CMD ["npm", "run", "start-dev"]

View File

@ -15,4 +15,5 @@ pydantic[email]
mysqlclient~=2.1.1
python-multipart~=0.0.6
uvicorn==0.23.1
python-dotenv
python-dotenv
psycopg2-binary