
Streamlined Dockerfiles with clearer ENV variables and build args. Switched backend database from MySQL to PostgreSQL, updated configurations accordingly, and added robust Docker Compose services for better orchestration, including health checks and persistent storage.
207 lines
6.8 KiB
JavaScript
207 lines
6.8 KiB
JavaScript
import fs from 'fs';
|
|
import https from 'https';
|
|
import { exec } from 'child_process';
|
|
import chokidar from 'chokidar';
|
|
import path from 'path';
|
|
import util from 'util';
|
|
import dotenv from 'dotenv';
|
|
|
|
// Load environment variables
|
|
dotenv.config();
|
|
|
|
if (!process.env.ENVIRONMENT) {
|
|
console.error("❌ ENVIRONMENT variable is missing.");
|
|
process.exit(1);
|
|
}
|
|
|
|
// Determine environment and configuration file
|
|
|
|
const nodeEnv = process.env.ENVIRONMENT || 'dev'; // Dynamically set from ENV variables
|
|
const configFile = `config_${nodeEnv}.json`; // Explicitly Dynamically resolve config file name
|
|
const configFilePath = path.resolve('./backend/', configFile); // Explicitly correct path resolution in Docker
|
|
|
|
|
|
// Load configuration file
|
|
let config;
|
|
try {
|
|
config = JSON.parse(fs.readFileSync(configFilePath, 'utf8'));
|
|
} catch (error) {
|
|
console.error(`❌ Failed to read configuration file '${configFile}': ${error.message}`);
|
|
process.exit(1);
|
|
}
|
|
|
|
// Validate required configurations
|
|
const requiredFields = [
|
|
'OPENAPI_URL',
|
|
'SCHEMA_PATH',
|
|
'OUTPUT_DIRECTORY',
|
|
'SSL_KEY_PATH',
|
|
'SSL_CERT_PATH',
|
|
];
|
|
for (const field of requiredFields) {
|
|
if (!config[field]) {
|
|
console.error(`❌ Missing required configuration: ${field}`);
|
|
process.exit(1);
|
|
}
|
|
}
|
|
|
|
const OPENAPI_BASE_URL = config.OPENAPI_URL; // or process.env.VITE_OPENAPI_BASE_DEV || config.OPENAPI_URL;
|
|
|
|
const SCHEMA_PATH = config.SCHEMA_PATH; // 💡 already absolute
|
|
const OUTPUT_DIRECTORY = config.OUTPUT_DIRECTORY; // 💡 already absolute
|
|
const SSL_KEY_PATH = config.SSL_KEY_PATH;
|
|
const SSL_CERT_PATH = config.SSL_CERT_PATH;
|
|
|
|
function assertDirExists(dirPath) {
|
|
if (!fs.existsSync(dirPath)) {
|
|
console.error(`❌ Directory does not exist inside Docker: ${dirPath}`);
|
|
process.exit(1);
|
|
}
|
|
}
|
|
|
|
// explicitly validate directories clearly explicitly in container paths:
|
|
assertDirExists(path.dirname(SCHEMA_PATH));
|
|
assertDirExists(OUTPUT_DIRECTORY);
|
|
assertDirExists(path.dirname(SSL_KEY_PATH));
|
|
|
|
// Log configuration
|
|
console.log(`[INFO] Environment: ${nodeEnv}`);
|
|
console.log(`[INFO] Using SCHEMA_PATH: ${SCHEMA_PATH}`);
|
|
console.log(`[INFO] Using OUTPUT_DIRECTORY: ${OUTPUT_DIRECTORY}`);
|
|
|
|
// Verify SSL files
|
|
if (!fs.existsSync(SSL_KEY_PATH) || !fs.existsSync(SSL_CERT_PATH)) {
|
|
console.error(`❌ SSL files not found:
|
|
Key Path: ${SSL_KEY_PATH} (exists: ${fs.existsSync(SSL_KEY_PATH)})
|
|
Cert Path: ${SSL_CERT_PATH} (exists: ${fs.existsSync(SSL_CERT_PATH)})`);
|
|
process.exit(1);
|
|
}
|
|
|
|
const execPromisified = util.promisify(exec);
|
|
|
|
let isGenerating = false;
|
|
const debounceDelay = 500; // Debounce interval in milliseconds
|
|
|
|
// Debounce function to control rapid re-triggering
|
|
function debounce(func, delay) {
|
|
let timer;
|
|
return (...args) => {
|
|
clearTimeout(timer);
|
|
timer = setTimeout(() => {
|
|
func.apply(this, args);
|
|
}, delay);
|
|
};
|
|
}
|
|
|
|
// Main function to fetch OpenAPI schema and generate services
|
|
async function fetchAndGenerate() {
|
|
if (isGenerating) {
|
|
console.log("⚠️ Generation process is already running.");
|
|
return;
|
|
}
|
|
isGenerating = true;
|
|
|
|
try {
|
|
// Fetch OpenAPI schema over HTTPS
|
|
console.log("🚀 Fetching OpenAPI schema...");
|
|
const options = {
|
|
rejectUnauthorized: false,
|
|
key: fs.readFileSync(SSL_KEY_PATH),
|
|
cert: fs.readFileSync(SSL_CERT_PATH),
|
|
};
|
|
|
|
const res = await new Promise((resolve, reject) => {
|
|
https.get(OPENAPI_BASE_URL, options, resolve).on('error', reject);
|
|
});
|
|
|
|
let data = '';
|
|
res.on('data', (chunk) => {
|
|
data += chunk;
|
|
});
|
|
|
|
await new Promise((resolve, reject) => {
|
|
res.on('end', resolve);
|
|
res.on('error', reject);
|
|
});
|
|
|
|
// Save schema file
|
|
fs.writeFileSync(SCHEMA_PATH, data, 'utf8');
|
|
console.log(`✅ OpenAPI schema saved to ${SCHEMA_PATH}`);
|
|
|
|
console.log("🧼 Cleaning output directory...");
|
|
await fs.promises.rm(OUTPUT_DIRECTORY, { recursive: true, force: true });
|
|
console.log(`✅ Output directory cleaned: ${OUTPUT_DIRECTORY}`);
|
|
|
|
// Generate services
|
|
const command = `npx openapi -i ${SCHEMA_PATH} -o ${OUTPUT_DIRECTORY}`;
|
|
console.log(`🔧 Executing command: ${command}`);
|
|
|
|
const { stdout, stderr } = await execPromisified(command);
|
|
if (stderr) {
|
|
console.error(`⚠️ stderr while generating services: ${stderr}`);
|
|
} else {
|
|
console.log(`✅ Service generation completed successfully:\n${stdout}`);
|
|
}
|
|
|
|
// Copy the generated OpenAPI models to ../logistics/openapi
|
|
const targetDirectory = path.resolve('../logistics/openapi');
|
|
console.log(`🔄 Copying generated OpenAPI models to ${targetDirectory}...`);
|
|
await fs.promises.rm(targetDirectory, { recursive: true, force: true });
|
|
await fs.promises.mkdir(targetDirectory, { recursive: true });
|
|
|
|
// Recursive copy helper
|
|
const copyRecursive = async (src, dest) => {
|
|
const entries = await fs.promises.readdir(src, { withFileTypes: true });
|
|
for (const entry of entries) {
|
|
const srcPath = path.join(src, entry.name);
|
|
const destPath = path.join(dest, entry.name);
|
|
|
|
if (entry.isDirectory()) {
|
|
await fs.promises.mkdir(destPath, { recursive: true });
|
|
await copyRecursive(srcPath, destPath);
|
|
} else {
|
|
await fs.promises.copyFile(srcPath, destPath);
|
|
}
|
|
}
|
|
};
|
|
await copyRecursive(OUTPUT_DIRECTORY, targetDirectory);
|
|
console.log(`✅ OpenAPI models copied successfully to ${targetDirectory}`);
|
|
} catch (error) {
|
|
console.error(`❌ Error during schema processing or generation: ${error.message}`);
|
|
} finally {
|
|
isGenerating = false;
|
|
}
|
|
}
|
|
|
|
|
|
// Backend directory based on the environment
|
|
const backendDirectory = (() => {
|
|
switch (nodeEnv) {
|
|
case 'prod':
|
|
return path.resolve('/app/backend'); // Production path
|
|
case 'test':
|
|
return path.resolve('/app/backend'); // Test path
|
|
case 'dev':
|
|
default:
|
|
return path.resolve('/app/backend'); // Development path
|
|
}
|
|
})();
|
|
|
|
if (!fs.existsSync(backendDirectory)) {
|
|
console.error(`❌ Backend directory does not exist: ${backendDirectory}`);
|
|
process.exit(1);
|
|
}
|
|
console.log(`👀 Watching for changes in ${backendDirectory}`);
|
|
|
|
// Watcher for change detection
|
|
const watcher = chokidar.watch(backendDirectory, {
|
|
persistent: true,
|
|
ignored: [SCHEMA_PATH, OUTPUT_DIRECTORY],
|
|
});
|
|
|
|
watcher
|
|
.on('add', debounce(fetchAndGenerate, debounceDelay))
|
|
.on('change', debounce(fetchAndGenerate, debounceDelay))
|
|
.on('unlink', debounce(fetchAndGenerate, debounceDelay));
|
|
|
|
console.log(`👀 Watching for changes in ${backendDirectory}`); |