Refactor Docker setup and migrate to PostgreSQL
Streamlined Dockerfiles with clearer ENV variables and build args. Switched backend database from MySQL to PostgreSQL, updated configurations accordingly, and added robust Docker Compose services for better orchestration, including health checks and persistent storage.
This commit is contained in:
@ -1,16 +1,29 @@
|
||||
FROM node:18
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy dependency files and install dependencies
|
||||
COPY package*.json ./
|
||||
RUN npm install
|
||||
# Setup build args clearly
|
||||
ARG VITE_OPENAPI_BASE_DEV
|
||||
ARG VITE_SSL_KEY_PATH
|
||||
ARG VITE_SSL_CERT_PATH
|
||||
ARG NODE_ENV=development
|
||||
|
||||
# Copy rest of the code and build the application
|
||||
COPY . .
|
||||
RUN npm run build
|
||||
ENV VITE_OPENAPI_BASE_DEV=${VITE_OPENAPI_BASE_DEV}
|
||||
ENV VITE_SSL_KEY_PATH=${VITE_SSL_KEY_PATH}
|
||||
ENV VITE_SSL_CERT_PATH=${VITE_SSL_CERT_PATH}
|
||||
ENV NODE_ENV=${NODE_ENV}
|
||||
|
||||
# Use a simple HTTP server to serve the built static files
|
||||
EXPOSE 5173
|
||||
CMD ["npx", "vite", "preview", "--port", "5173"]
|
||||
|
||||
# Copy dependency files and install dependencies
|
||||
COPY package*.json ./
|
||||
RUN npm install --prefer-offline --no-audit --progress=false
|
||||
|
||||
|
||||
# Copy rest of the code and build the application
|
||||
COPY . .
|
||||
|
||||
# Use a simple HTTP server to serve the built static files
|
||||
EXPOSE 5173
|
||||
#CMD ["npx", "vite", "preview", "--port", "5173"]
|
||||
CMD ["npm", "run", "dev"]
|
@ -15,13 +15,16 @@ if (!process.env.ENVIRONMENT) {
|
||||
}
|
||||
|
||||
// Determine environment and configuration file
|
||||
const nodeEnv = process.env.ENVIRONMENT || 'dev';
|
||||
const configFile = `config_${nodeEnv}.json`;
|
||||
|
||||
const nodeEnv = process.env.ENVIRONMENT || 'dev'; // Dynamically set from ENV variables
|
||||
const configFile = `config_${nodeEnv}.json`; // Explicitly Dynamically resolve config file name
|
||||
const configFilePath = path.resolve('./backend/', configFile); // Explicitly correct path resolution in Docker
|
||||
|
||||
|
||||
// Load configuration file
|
||||
let config;
|
||||
try {
|
||||
config = JSON.parse(fs.readFileSync(path.resolve('../', configFile), 'utf8'));
|
||||
config = JSON.parse(fs.readFileSync(configFilePath, 'utf8'));
|
||||
} catch (error) {
|
||||
console.error(`❌ Failed to read configuration file '${configFile}': ${error.message}`);
|
||||
process.exit(1);
|
||||
@ -42,12 +45,24 @@ for (const field of requiredFields) {
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve paths from the config
|
||||
const OPENAPI_URL = config.OPENAPI_URL;
|
||||
const SCHEMA_PATH = path.resolve(config.SCHEMA_PATH);
|
||||
const OUTPUT_DIRECTORY = path.resolve(config.OUTPUT_DIRECTORY);
|
||||
const SSL_KEY_PATH = path.resolve(config.SSL_KEY_PATH);
|
||||
const SSL_CERT_PATH = path.resolve(config.SSL_CERT_PATH);
|
||||
const OPENAPI_BASE_URL = config.OPENAPI_URL; // or process.env.VITE_OPENAPI_BASE_DEV || config.OPENAPI_URL;
|
||||
|
||||
const SCHEMA_PATH = config.SCHEMA_PATH; // 💡 already absolute
|
||||
const OUTPUT_DIRECTORY = config.OUTPUT_DIRECTORY; // 💡 already absolute
|
||||
const SSL_KEY_PATH = config.SSL_KEY_PATH;
|
||||
const SSL_CERT_PATH = config.SSL_CERT_PATH;
|
||||
|
||||
function assertDirExists(dirPath) {
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
console.error(`❌ Directory does not exist inside Docker: ${dirPath}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// explicitly validate directories clearly explicitly in container paths:
|
||||
assertDirExists(path.dirname(SCHEMA_PATH));
|
||||
assertDirExists(OUTPUT_DIRECTORY);
|
||||
assertDirExists(path.dirname(SSL_KEY_PATH));
|
||||
|
||||
// Log configuration
|
||||
console.log(`[INFO] Environment: ${nodeEnv}`);
|
||||
@ -96,7 +111,7 @@ async function fetchAndGenerate() {
|
||||
};
|
||||
|
||||
const res = await new Promise((resolve, reject) => {
|
||||
https.get(OPENAPI_URL, options, resolve).on('error', reject);
|
||||
https.get(OPENAPI_BASE_URL, options, resolve).on('error', reject);
|
||||
});
|
||||
|
||||
let data = '';
|
||||
@ -104,69 +119,61 @@ async function fetchAndGenerate() {
|
||||
data += chunk;
|
||||
});
|
||||
|
||||
res.on('end', async () => {
|
||||
try {
|
||||
// Save schema file
|
||||
fs.writeFileSync(SCHEMA_PATH, data, 'utf8');
|
||||
console.log(`✅ OpenAPI schema saved to ${SCHEMA_PATH}`);
|
||||
|
||||
console.log("🧼 Cleaning output directory...");
|
||||
await fs.promises.rm(OUTPUT_DIRECTORY, { recursive: true, force: true });
|
||||
|
||||
console.log(`✅ Output directory cleaned: ${OUTPUT_DIRECTORY}`);
|
||||
if (!fs.existsSync(OUTPUT_DIRECTORY)) {
|
||||
console.log(`✅ Confirmed removal of ${OUTPUT_DIRECTORY}`);
|
||||
} else {
|
||||
console.error(`❌ Failed to remove output directory: ${OUTPUT_DIRECTORY}`);
|
||||
}
|
||||
|
||||
// Generate services
|
||||
const command = `npx openapi -i ${SCHEMA_PATH} -o ${OUTPUT_DIRECTORY}`;
|
||||
console.log(`🔧 Executing command: ${command}`);
|
||||
|
||||
const { stdout, stderr } = await execPromisified(command);
|
||||
if (stderr) {
|
||||
console.error(`⚠️ stderr while generating services: ${stderr}`);
|
||||
} else {
|
||||
console.log(`✅ Service generation completed successfully:\n${stdout}`);
|
||||
}
|
||||
|
||||
// Copy the generated OpenAPI models to ../logistics/openapi
|
||||
const targetDirectory = path.resolve('../logistics/openapi'); // Adjust as per logistics directory
|
||||
console.log(`🔄 Copying generated OpenAPI models to ${targetDirectory}...`);
|
||||
|
||||
await fs.promises.rm(targetDirectory, { recursive: true, force: true }); // Clean target directory
|
||||
await fs.promises.mkdir(targetDirectory, { recursive: true }); // Ensure the directory exists
|
||||
|
||||
// Copy files from OUTPUT_DIRECTORY to the target directory recursively
|
||||
const copyRecursive = async (src, dest) => {
|
||||
const entries = await fs.promises.readdir(src, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const srcPath = path.join(src, entry.name);
|
||||
const destPath = path.join(dest, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await fs.promises.mkdir(destPath, { recursive: true });
|
||||
await copyRecursive(srcPath, destPath);
|
||||
} else {
|
||||
await fs.promises.copyFile(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
};
|
||||
await copyRecursive(OUTPUT_DIRECTORY, targetDirectory);
|
||||
|
||||
console.log(`✅ OpenAPI models copied successfully to ${targetDirectory}`);
|
||||
} catch (error) {
|
||||
console.error(`❌ Error during schema processing or generation: ${error.message}`);
|
||||
}
|
||||
isGenerating = false;
|
||||
await new Promise((resolve, reject) => {
|
||||
res.on('end', resolve);
|
||||
res.on('error', reject);
|
||||
});
|
||||
|
||||
// Save schema file
|
||||
fs.writeFileSync(SCHEMA_PATH, data, 'utf8');
|
||||
console.log(`✅ OpenAPI schema saved to ${SCHEMA_PATH}`);
|
||||
|
||||
console.log("🧼 Cleaning output directory...");
|
||||
await fs.promises.rm(OUTPUT_DIRECTORY, { recursive: true, force: true });
|
||||
console.log(`✅ Output directory cleaned: ${OUTPUT_DIRECTORY}`);
|
||||
|
||||
// Generate services
|
||||
const command = `npx openapi -i ${SCHEMA_PATH} -o ${OUTPUT_DIRECTORY}`;
|
||||
console.log(`🔧 Executing command: ${command}`);
|
||||
|
||||
const { stdout, stderr } = await execPromisified(command);
|
||||
if (stderr) {
|
||||
console.error(`⚠️ stderr while generating services: ${stderr}`);
|
||||
} else {
|
||||
console.log(`✅ Service generation completed successfully:\n${stdout}`);
|
||||
}
|
||||
|
||||
// Copy the generated OpenAPI models to ../logistics/openapi
|
||||
const targetDirectory = path.resolve('../logistics/openapi');
|
||||
console.log(`🔄 Copying generated OpenAPI models to ${targetDirectory}...`);
|
||||
await fs.promises.rm(targetDirectory, { recursive: true, force: true });
|
||||
await fs.promises.mkdir(targetDirectory, { recursive: true });
|
||||
|
||||
// Recursive copy helper
|
||||
const copyRecursive = async (src, dest) => {
|
||||
const entries = await fs.promises.readdir(src, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const srcPath = path.join(src, entry.name);
|
||||
const destPath = path.join(dest, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await fs.promises.mkdir(destPath, { recursive: true });
|
||||
await copyRecursive(srcPath, destPath);
|
||||
} else {
|
||||
await fs.promises.copyFile(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
};
|
||||
await copyRecursive(OUTPUT_DIRECTORY, targetDirectory);
|
||||
console.log(`✅ OpenAPI models copied successfully to ${targetDirectory}`);
|
||||
} catch (error) {
|
||||
console.error(`❌ Failed to fetch OpenAPI schema: ${error.message}`);
|
||||
console.error(`❌ Error during schema processing or generation: ${error.message}`);
|
||||
} finally {
|
||||
isGenerating = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Backend directory based on the environment
|
||||
const backendDirectory = (() => {
|
||||
switch (nodeEnv) {
|
||||
@ -176,7 +183,7 @@ const backendDirectory = (() => {
|
||||
return path.resolve('/home/jungfrau/aaredb/backend/app'); // Test path
|
||||
case 'dev':
|
||||
default:
|
||||
return path.resolve('/Users/gotthardg/PycharmProjects/aaredb/backend/app'); // Development path
|
||||
return path.resolve('/app/backend'); // Development path
|
||||
}
|
||||
})();
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "heidi-frontend-v2",
|
||||
"name": "Aare Web",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
|
Reference in New Issue
Block a user