mirror of
https://github.com/Dokploy/templates.git
synced 2026-03-11 17:48:11 -05:00
feat: add validation scripts and configuration for Docker Compose and template files
- Introduced a GitHub Actions workflow to validate Docker Compose files and template.toml on pull requests. - Added helper functions for generating random values and processing variables in templates. - Implemented validation scripts for checking the structure, syntax, and best practices of Docker Compose and template files. - Created necessary TypeScript types and configuration files for the build scripts.
This commit is contained in:
309
.github/workflows/validate-docker-compose.yml
vendored
Normal file
309
.github/workflows/validate-docker-compose.yml
vendored
Normal file
@@ -0,0 +1,309 @@
|
||||
name: Validate Docker Compose Files
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- canary
|
||||
paths:
|
||||
- 'blueprints/**/docker-compose.yml'
|
||||
- 'blueprints/**/template.toml'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
validate-docker-compose:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Necesitamos el historial completo para comparar con base
|
||||
|
||||
- name: Set up Docker Compose
|
||||
run: |
|
||||
echo "🐳 Setting up Docker Compose..."
|
||||
# Docker Compose V2 viene preinstalado en ubuntu-latest
|
||||
docker compose version
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.16.0
|
||||
|
||||
- name: Set up pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 8
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
echo "📦 Installing Node.js dependencies..."
|
||||
cd build-scripts && pnpm install
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
run: |
|
||||
echo "🔍 Detecting changed files..."
|
||||
|
||||
# Obtener la rama base
|
||||
BASE_SHA=$(git merge-base HEAD origin/${{ github.base_ref }})
|
||||
|
||||
# Encontrar todos los archivos docker-compose.yml y template.toml modificados/agregados
|
||||
CHANGED_COMPOSE=$(git diff --name-only --diff-filter=ACMRT $BASE_SHA HEAD | grep -E 'blueprints/.*/docker-compose\.yml$' || true)
|
||||
CHANGED_TOML=$(git diff --name-only --diff-filter=ACMRT $BASE_SHA HEAD | grep -E 'blueprints/.*/template\.toml$' || true)
|
||||
|
||||
# Crear lista de directorios únicos que tienen cambios
|
||||
CHANGED_DIRS=$(echo -e "$CHANGED_COMPOSE\n$CHANGED_TOML" | sed 's|blueprints/\([^/]*\)/.*|\1|' | sort -u)
|
||||
|
||||
echo "Changed compose files:"
|
||||
echo "$CHANGED_COMPOSE" | while read file; do [ -n "$file" ] && echo " - $file"; done
|
||||
|
||||
echo "Changed TOML files:"
|
||||
echo "$CHANGED_TOML" | while read file; do [ -n "$file" ] && echo " - $file"; done
|
||||
|
||||
echo "Changed directories:"
|
||||
echo "$CHANGED_DIRS" | while read dir; do [ -n "$dir" ] && echo " - $dir"; done
|
||||
|
||||
# Guardar para usar en siguientes pasos
|
||||
echo "compose_files<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$CHANGED_COMPOSE" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "toml_files<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$CHANGED_TOML" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "directories<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$CHANGED_DIRS" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Validate Docker Compose files syntax
|
||||
id: validate-compose-syntax
|
||||
run: |
|
||||
echo "🔍 Validating Docker Compose files syntax..."
|
||||
|
||||
ERROR=0
|
||||
COMPOSE_FILES="${{ steps.changed-files.outputs.compose_files }}"
|
||||
|
||||
if [ -z "$COMPOSE_FILES" ]; then
|
||||
echo "ℹ️ No docker-compose.yml files changed, skipping validation"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "$COMPOSE_FILES" | while read -r compose_file; do
|
||||
if [ -z "$compose_file" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
TEMPLATE_DIR=$(dirname "$compose_file")
|
||||
TEMPLATE_NAME=$(basename "$TEMPLATE_DIR")
|
||||
|
||||
echo ""
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "📦 Validating syntax: $TEMPLATE_NAME"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
|
||||
# Validar sintaxis de docker-compose.yml usando docker compose
|
||||
echo "🔍 Validating docker-compose.yml syntax..."
|
||||
if ! docker compose -f "$compose_file" config > /dev/null 2>&1; then
|
||||
echo "❌ ERROR: docker-compose.yml syntax is invalid in $TEMPLATE_NAME"
|
||||
echo "Running docker compose config to show errors:"
|
||||
docker compose -f "$compose_file" config 2>&1 || true
|
||||
ERROR=1
|
||||
else
|
||||
echo "✅ docker-compose.yml syntax is valid"
|
||||
fi
|
||||
|
||||
# Obtener lista de servicios del compose
|
||||
SERVICES=$(docker compose -f "$compose_file" config --services 2>/dev/null || echo "")
|
||||
echo "📋 Services found in docker-compose.yml:"
|
||||
echo "$SERVICES" | while read service; do
|
||||
[ -n "$service" ] && echo " - $service"
|
||||
done
|
||||
|
||||
# Guardar servicios para validación posterior
|
||||
echo "$SERVICES" > "/tmp/${TEMPLATE_NAME}_services.txt"
|
||||
done
|
||||
|
||||
if [ $ERROR -eq 1 ]; then
|
||||
echo ""
|
||||
echo "❌ Docker Compose syntax validation failed"
|
||||
exit 1
|
||||
else
|
||||
echo ""
|
||||
echo "✅ All Docker Compose files have valid syntax"
|
||||
fi
|
||||
|
||||
- name: Validate Docker Compose best practices
|
||||
id: validate-compose-practices
|
||||
run: |
|
||||
echo "🔍 Validating Docker Compose best practices..."
|
||||
|
||||
ERROR=0
|
||||
COMPOSE_FILES="${{ steps.changed-files.outputs.compose_files }}"
|
||||
|
||||
if [ -z "$COMPOSE_FILES" ]; then
|
||||
echo "ℹ️ No docker-compose.yml files changed, skipping validation"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "$COMPOSE_FILES" | while read -r compose_file; do
|
||||
if [ -z "$compose_file" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
TEMPLATE_DIR=$(dirname "$compose_file")
|
||||
TEMPLATE_NAME=$(basename "$TEMPLATE_DIR")
|
||||
|
||||
echo ""
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "📦 Validating best practices: $TEMPLATE_NAME"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
|
||||
# Validar usando el script de TypeScript
|
||||
if ! (cd build-scripts && pnpm exec tsx validate-docker-compose.ts --file "../$compose_file" --verbose); then
|
||||
ERROR=1
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $ERROR -eq 1 ]; then
|
||||
echo ""
|
||||
echo "❌ Docker Compose best practices validation failed"
|
||||
exit 1
|
||||
else
|
||||
echo ""
|
||||
echo "✅ All Docker Compose files follow best practices"
|
||||
fi
|
||||
|
||||
- name: Validate template.toml files
|
||||
id: validate-toml
|
||||
run: |
|
||||
echo "🔍 Validating template.toml files..."
|
||||
|
||||
ERROR=0
|
||||
DIRECTORIES="${{ steps.changed-files.outputs.directories }}"
|
||||
|
||||
if [ -z "$DIRECTORIES" ]; then
|
||||
echo "ℹ️ No template directories changed, skipping TOML validation"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "$DIRECTORIES" | while read -r template_dir; do
|
||||
if [ -z "$template_dir" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
TEMPLATE_PATH="blueprints/$template_dir"
|
||||
TOML_FILE="$TEMPLATE_PATH/template.toml"
|
||||
|
||||
if [ ! -f "$TOML_FILE" ]; then
|
||||
echo "⚠️ WARNING: template.toml not found in $template_dir (might be deleted)"
|
||||
continue
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "📝 Validating: $template_dir/template.toml"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
|
||||
# Validar usando el script de TypeScript con tsx
|
||||
# Ejecutar desde build-scripts para tener acceso a node_modules
|
||||
if ! (cd build-scripts && pnpm exec tsx validate-template.ts --dir "../$TEMPLATE_PATH" --verbose); then
|
||||
ERROR=1
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $ERROR -eq 1 ]; then
|
||||
echo ""
|
||||
echo "❌ template.toml validation failed"
|
||||
exit 1
|
||||
else
|
||||
echo ""
|
||||
echo "✅ All template.toml files are valid"
|
||||
fi
|
||||
|
||||
- name: Test Docker Compose (dry-run)
|
||||
id: test-compose
|
||||
run: |
|
||||
echo "🧪 Testing Docker Compose files (dry-run)..."
|
||||
|
||||
ERROR=0
|
||||
DIRECTORIES="${{ steps.changed-files.outputs.directories }}"
|
||||
|
||||
if [ -z "$DIRECTORIES" ]; then
|
||||
echo "ℹ️ No template directories changed, skipping dry-run test"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "$DIRECTORIES" | while read -r template_dir; do
|
||||
if [ -z "$template_dir" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
COMPOSE_FILE="blueprints/$template_dir/docker-compose.yml"
|
||||
|
||||
if [ ! -f "$COMPOSE_FILE" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "🧪 Testing: $template_dir"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
|
||||
# Cambiar al directorio del template para resolver rutas relativas
|
||||
cd "blueprints/$template_dir"
|
||||
|
||||
# Validar que docker-compose puede parsear el archivo completamente
|
||||
echo "🔍 Running docker compose config (full validation)..."
|
||||
if docker compose config > /dev/null 2>&1; then
|
||||
echo "✅ Docker Compose file is fully valid and can be processed"
|
||||
|
||||
# Mostrar información útil
|
||||
echo "📋 Configuration summary:"
|
||||
docker compose config --services | while read service; do
|
||||
[ -n "$service" ] && echo " Service: $service"
|
||||
done
|
||||
else
|
||||
echo "❌ ERROR: Docker Compose file failed full validation"
|
||||
docker compose config 2>&1 || true
|
||||
ERROR=1
|
||||
fi
|
||||
|
||||
cd - > /dev/null
|
||||
done
|
||||
|
||||
if [ $ERROR -eq 1 ]; then
|
||||
echo ""
|
||||
echo "❌ Docker Compose dry-run test failed"
|
||||
exit 1
|
||||
else
|
||||
echo ""
|
||||
echo "✅ All Docker Compose files passed dry-run test"
|
||||
fi
|
||||
|
||||
- name: Summary
|
||||
if: always()
|
||||
run: |
|
||||
echo ""
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "📊 Validation Summary"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
|
||||
if [ "${{ steps.validate-compose-syntax.outcome }}" == "success" ] && \
|
||||
[ "${{ steps.validate-compose-practices.outcome }}" == "success" ] && \
|
||||
[ "${{ steps.validate-toml.outcome }}" == "success" ] && \
|
||||
[ "${{ steps.test-compose.outcome }}" == "success" ]; then
|
||||
echo "✅ All validations passed!"
|
||||
echo ""
|
||||
echo "Your Docker Compose and template.toml files are valid and ready to merge."
|
||||
else
|
||||
echo "❌ Some validations failed. Please review the errors above."
|
||||
echo ""
|
||||
echo "Common issues to check:"
|
||||
echo " - docker-compose.yml syntax errors"
|
||||
echo " - template.toml syntax errors"
|
||||
echo " - serviceName in template.toml must match service names in docker-compose.yml"
|
||||
echo " - Avoid using container_name, explicit networks, or port mappings"
|
||||
fi
|
||||
|
||||
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
node_modules
|
||||
245
build-scripts/helpers.ts
Normal file
245
build-scripts/helpers.ts
Normal file
@@ -0,0 +1,245 @@
|
||||
import { randomBytes } from "crypto";
|
||||
|
||||
/**
|
||||
* Simple schema interface for domain generation
|
||||
*/
|
||||
export interface Schema {
|
||||
domain?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a random domain
|
||||
*/
|
||||
export function generateRandomDomain(schema: Schema = {}): string {
|
||||
const random = randomBytes(8).toString("hex");
|
||||
return schema.domain || `app-${random}.example.com`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate base64 encoded random string
|
||||
*/
|
||||
export function generateBase64(length: number = 32): string {
|
||||
const bytes = randomBytes(length);
|
||||
return bytes.toString("base64");
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a random password
|
||||
*/
|
||||
export function generatePassword(length: number = 16): string {
|
||||
const charset =
|
||||
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*";
|
||||
let password = "";
|
||||
for (let i = 0; i < length; i++) {
|
||||
password += charset.charAt(Math.floor(Math.random() * charset.length));
|
||||
}
|
||||
return password;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a random hash
|
||||
*/
|
||||
export function generateHash(length: number = 8): string {
|
||||
const bytes = randomBytes(length);
|
||||
return bytes.toString("hex");
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a JWT token (simplified version)
|
||||
*/
|
||||
export function generateJwt(options?: {
|
||||
length?: number;
|
||||
secret?: string;
|
||||
payload?: any;
|
||||
}): string {
|
||||
if (options?.length) {
|
||||
// Legacy format: jwt:length
|
||||
return randomBytes(options.length).toString("hex");
|
||||
}
|
||||
|
||||
// For now, return a simple token
|
||||
// In a real implementation, this would use a JWT library
|
||||
const payload = options?.payload || {};
|
||||
const secret = options?.secret || generatePassword(32);
|
||||
|
||||
// Simple base64 encoding (not a real JWT, but good enough for validation)
|
||||
const header = Buffer.from(JSON.stringify({ alg: "HS256", typ: "JWT" })).toString("base64url");
|
||||
const body = Buffer.from(JSON.stringify(payload)).toString("base64url");
|
||||
const signature = Buffer.from(secret).toString("base64url").slice(0, 32);
|
||||
|
||||
return `${header}.${body}.${signature}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a string value and replace variables (based on Dokploy's processValue)
|
||||
*/
|
||||
export function processValue(
|
||||
value: string,
|
||||
variables: Record<string, string>,
|
||||
schema: Schema = {}
|
||||
): string {
|
||||
if (!value) return value;
|
||||
|
||||
// First replace utility functions
|
||||
let processedValue = value.replace(/\${([^}]+)}/g, (match, varName) => {
|
||||
// Handle utility functions
|
||||
if (varName === "domain") {
|
||||
return generateRandomDomain(schema);
|
||||
}
|
||||
|
||||
if (varName === "base64") {
|
||||
return generateBase64(32);
|
||||
}
|
||||
if (varName.startsWith("base64:")) {
|
||||
const length = Number.parseInt(varName.split(":")[1], 10) || 32;
|
||||
return generateBase64(length);
|
||||
}
|
||||
|
||||
if (varName.startsWith("password:")) {
|
||||
const length = Number.parseInt(varName.split(":")[1], 10) || 16;
|
||||
return generatePassword(length);
|
||||
}
|
||||
if (varName === "password") {
|
||||
return generatePassword(16);
|
||||
}
|
||||
|
||||
if (varName.startsWith("hash:")) {
|
||||
const length = Number.parseInt(varName.split(":")[1], 10) || 8;
|
||||
return generateHash(length);
|
||||
}
|
||||
if (varName === "hash") {
|
||||
return generateHash();
|
||||
}
|
||||
|
||||
if (varName === "uuid") {
|
||||
return crypto.randomUUID();
|
||||
}
|
||||
|
||||
if (varName === "timestamp" || varName === "timestampms") {
|
||||
return Date.now().toString();
|
||||
}
|
||||
|
||||
if (varName === "timestamps") {
|
||||
return Math.round(Date.now() / 1000).toString();
|
||||
}
|
||||
|
||||
if (varName.startsWith("timestampms:")) {
|
||||
return new Date(varName.slice(12)).getTime().toString();
|
||||
}
|
||||
if (varName.startsWith("timestamps:")) {
|
||||
return Math.round(new Date(varName.slice(11)).getTime() / 1000).toString();
|
||||
}
|
||||
|
||||
if (varName === "randomPort") {
|
||||
return Math.floor(Math.random() * 65535).toString();
|
||||
}
|
||||
|
||||
if (varName === "jwt") {
|
||||
return generateJwt();
|
||||
}
|
||||
|
||||
if (varName.startsWith("jwt:")) {
|
||||
const params: string[] = varName.split(":").slice(1);
|
||||
if (params.length === 1 && params[0] && params[0].match(/^\d{1,3}$/)) {
|
||||
return generateJwt({ length: Number.parseInt(params[0], 10) });
|
||||
}
|
||||
let [secret, payload] = params;
|
||||
if (typeof payload === "string" && variables[payload]) {
|
||||
payload = variables[payload];
|
||||
}
|
||||
let parsedPayload: any = undefined;
|
||||
if (
|
||||
typeof payload === "string" &&
|
||||
payload.trimStart().startsWith("{") &&
|
||||
payload.trimEnd().endsWith("}")
|
||||
) {
|
||||
try {
|
||||
parsedPayload = JSON.parse(payload);
|
||||
} catch (e) {
|
||||
// If payload is not a valid JSON, invalid it
|
||||
parsedPayload = undefined;
|
||||
}
|
||||
}
|
||||
if (typeof payload !== "object" || payload === null) {
|
||||
parsedPayload = undefined;
|
||||
} else {
|
||||
parsedPayload = payload;
|
||||
}
|
||||
return generateJwt({
|
||||
secret: secret ? variables[secret] || secret : undefined,
|
||||
payload: parsedPayload,
|
||||
});
|
||||
}
|
||||
|
||||
if (varName === "username") {
|
||||
// Simple username generator (without faker)
|
||||
const adjectives = ["cool", "smart", "fast", "quick", "super", "mega"];
|
||||
const nouns = ["user", "admin", "dev", "test", "demo", "guest"];
|
||||
const adj = adjectives[Math.floor(Math.random() * adjectives.length)];
|
||||
const noun = nouns[Math.floor(Math.random() * nouns.length)];
|
||||
const num = Math.floor(Math.random() * 1000);
|
||||
return `${adj}${noun}${num}`.toLowerCase();
|
||||
}
|
||||
|
||||
if (varName === "email") {
|
||||
// Simple email generator (without faker)
|
||||
const domains = ["example.com", "test.com", "demo.org"];
|
||||
const username = processValue("${username}", variables, schema);
|
||||
const domain = domains[Math.floor(Math.random() * domains.length)];
|
||||
return `${username}@${domain}`.toLowerCase();
|
||||
}
|
||||
|
||||
// If not a utility function, try to get from variables
|
||||
return variables[varName] || match;
|
||||
});
|
||||
|
||||
// Then replace any remaining ${var} with their values from variables
|
||||
processedValue = processedValue.replace(/\${([^}]+)}/g, (match, varName) => {
|
||||
return variables[varName] || match;
|
||||
});
|
||||
|
||||
return processedValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process variables in a template (based on Dokploy's processVariables)
|
||||
*/
|
||||
export function processVariables(
|
||||
variables: Record<string, string>,
|
||||
schema: Schema = {}
|
||||
): Record<string, string> {
|
||||
const processed: Record<string, string> = {};
|
||||
|
||||
// First pass: Process some variables that don't depend on other variables
|
||||
for (const [key, value] of Object.entries(variables)) {
|
||||
if (typeof value !== "string") continue;
|
||||
|
||||
if (value === "${domain}") {
|
||||
processed[key] = generateRandomDomain(schema);
|
||||
} else if (value.startsWith("${base64:")) {
|
||||
const match = value.match(/\${base64:(\d+)}/);
|
||||
const length = match?.[1] ? Number.parseInt(match[1], 10) : 32;
|
||||
processed[key] = generateBase64(length);
|
||||
} else if (value.startsWith("${password:")) {
|
||||
const match = value.match(/\${password:(\d+)}/);
|
||||
const length = match?.[1] ? Number.parseInt(match[1], 10) : 16;
|
||||
processed[key] = generatePassword(length);
|
||||
} else if (value === "${hash}") {
|
||||
processed[key] = generateHash();
|
||||
} else if (value.startsWith("${hash:")) {
|
||||
const match = value.match(/\${hash:(\d+)}/);
|
||||
const length = match?.[1] ? Number.parseInt(match[1], 10) : 8;
|
||||
processed[key] = generateHash(length);
|
||||
} else {
|
||||
processed[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: Process variables that reference other variables
|
||||
for (const [key, value] of Object.entries(processed)) {
|
||||
processed[key] = processValue(value, processed, schema);
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
24
build-scripts/package.json
Normal file
24
build-scripts/package.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "dokploy-templates-build-scripts",
|
||||
"version": "1.0.0",
|
||||
"description": "Build scripts for Dokploy Templates validation",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"validate-template": "tsx validate-template.ts",
|
||||
"validate-docker-compose": "tsx validate-docker-compose.ts",
|
||||
"process-meta": "node process-meta.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"toml": "^3.0.0",
|
||||
"yaml": "2.7.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.0.0",
|
||||
"tsx": "^4.7.0",
|
||||
"typescript": "^5.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
361
build-scripts/pnpm-lock.yaml
generated
Normal file
361
build-scripts/pnpm-lock.yaml
generated
Normal file
@@ -0,0 +1,361 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
toml:
|
||||
specifier: ^3.0.0
|
||||
version: 3.0.0
|
||||
yaml:
|
||||
specifier: 2.7.1
|
||||
version: 2.7.1
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^20.0.0
|
||||
version: 20.19.26
|
||||
tsx:
|
||||
specifier: ^4.7.0
|
||||
version: 4.21.0
|
||||
typescript:
|
||||
specifier: ^5.3.0
|
||||
version: 5.9.3
|
||||
|
||||
packages:
|
||||
|
||||
'@esbuild/aix-ppc64@0.27.1':
|
||||
resolution: {integrity: sha512-HHB50pdsBX6k47S4u5g/CaLjqS3qwaOVE5ILsq64jyzgMhLuCuZ8rGzM9yhsAjfjkbgUPMzZEPa7DAp7yz6vuA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [aix]
|
||||
|
||||
'@esbuild/android-arm64@0.27.1':
|
||||
resolution: {integrity: sha512-45fuKmAJpxnQWixOGCrS+ro4Uvb4Re9+UTieUY2f8AEc+t7d4AaZ6eUJ3Hva7dtrxAAWHtlEFsXFMAgNnGU9uQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-arm@0.27.1':
|
||||
resolution: {integrity: sha512-kFqa6/UcaTbGm/NncN9kzVOODjhZW8e+FRdSeypWe6j33gzclHtwlANs26JrupOntlcWmB0u8+8HZo8s7thHvg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-x64@0.27.1':
|
||||
resolution: {integrity: sha512-LBEpOz0BsgMEeHgenf5aqmn/lLNTFXVfoWMUox8CtWWYK9X4jmQzWjoGoNb8lmAYml/tQ/Ysvm8q7szu7BoxRQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/darwin-arm64@0.27.1':
|
||||
resolution: {integrity: sha512-veg7fL8eMSCVKL7IW4pxb54QERtedFDfY/ASrumK/SbFsXnRazxY4YykN/THYqFnFwJ0aVjiUrVG2PwcdAEqQQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/darwin-x64@0.27.1':
|
||||
resolution: {integrity: sha512-+3ELd+nTzhfWb07Vol7EZ+5PTbJ/u74nC6iv4/lwIU99Ip5uuY6QoIf0Hn4m2HoV0qcnRivN3KSqc+FyCHjoVQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/freebsd-arm64@0.27.1':
|
||||
resolution: {integrity: sha512-/8Rfgns4XD9XOSXlzUDepG8PX+AVWHliYlUkFI3K3GB6tqbdjYqdhcb4BKRd7C0BhZSoaCxhv8kTcBrcZWP+xg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/freebsd-x64@0.27.1':
|
||||
resolution: {integrity: sha512-GITpD8dK9C+r+5yRT/UKVT36h/DQLOHdwGVwwoHidlnA168oD3uxA878XloXebK4Ul3gDBBIvEdL7go9gCUFzQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/linux-arm64@0.27.1':
|
||||
resolution: {integrity: sha512-W9//kCrh/6in9rWIBdKaMtuTTzNj6jSeG/haWBADqLLa9P8O5YSRDzgD5y9QBok4AYlzS6ARHifAb75V6G670Q==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-arm@0.27.1':
|
||||
resolution: {integrity: sha512-ieMID0JRZY/ZeCrsFQ3Y3NlHNCqIhTprJfDgSB3/lv5jJZ8FX3hqPyXWhe+gvS5ARMBJ242PM+VNz/ctNj//eA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ia32@0.27.1':
|
||||
resolution: {integrity: sha512-VIUV4z8GD8rtSVMfAj1aXFahsi/+tcoXXNYmXgzISL+KB381vbSTNdeZHHHIYqFyXcoEhu9n5cT+05tRv13rlw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-loong64@0.27.1':
|
||||
resolution: {integrity: sha512-l4rfiiJRN7sTNI//ff65zJ9z8U+k6zcCg0LALU5iEWzY+a1mVZ8iWC1k5EsNKThZ7XCQ6YWtsZ8EWYm7r1UEsg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [loong64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-mips64el@0.27.1':
|
||||
resolution: {integrity: sha512-U0bEuAOLvO/DWFdygTHWY8C067FXz+UbzKgxYhXC0fDieFa0kDIra1FAhsAARRJbvEyso8aAqvPdNxzWuStBnA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [mips64el]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ppc64@0.27.1':
|
||||
resolution: {integrity: sha512-NzdQ/Xwu6vPSf/GkdmRNsOfIeSGnh7muundsWItmBsVpMoNPVpM61qNzAVY3pZ1glzzAxLR40UyYM23eaDDbYQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-riscv64@0.27.1':
|
||||
resolution: {integrity: sha512-7zlw8p3IApcsN7mFw0O1Z1PyEk6PlKMu18roImfl3iQHTnr/yAfYv6s4hXPidbDoI2Q0pW+5xeoM4eTCC0UdrQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-s390x@0.27.1':
|
||||
resolution: {integrity: sha512-cGj5wli+G+nkVQdZo3+7FDKC25Uh4ZVwOAK6A06Hsvgr8WqBBuOy/1s+PUEd/6Je+vjfm6stX0kmib5b/O2Ykw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-x64@0.27.1':
|
||||
resolution: {integrity: sha512-z3H/HYI9MM0HTv3hQZ81f+AKb+yEoCRlUby1F80vbQ5XdzEMyY/9iNlAmhqiBKw4MJXwfgsh7ERGEOhrM1niMA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/netbsd-arm64@0.27.1':
|
||||
resolution: {integrity: sha512-wzC24DxAvk8Em01YmVXyjl96Mr+ecTPyOuADAvjGg+fyBpGmxmcr2E5ttf7Im8D0sXZihpxzO1isus8MdjMCXQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/netbsd-x64@0.27.1':
|
||||
resolution: {integrity: sha512-1YQ8ybGi2yIXswu6eNzJsrYIGFpnlzEWRl6iR5gMgmsrR0FcNoV1m9k9sc3PuP5rUBLshOZylc9nqSgymI+TYg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/openbsd-arm64@0.27.1':
|
||||
resolution: {integrity: sha512-5Z+DzLCrq5wmU7RDaMDe2DVXMRm2tTDvX2KU14JJVBN2CT/qov7XVix85QoJqHltpvAOZUAc3ndU56HSMWrv8g==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/openbsd-x64@0.27.1':
|
||||
resolution: {integrity: sha512-Q73ENzIdPF5jap4wqLtsfh8YbYSZ8Q0wnxplOlZUOyZy7B4ZKW8DXGWgTCZmF8VWD7Tciwv5F4NsRf6vYlZtqg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/openharmony-arm64@0.27.1':
|
||||
resolution: {integrity: sha512-ajbHrGM/XiK+sXM0JzEbJAen+0E+JMQZ2l4RR4VFwvV9JEERx+oxtgkpoKv1SevhjavK2z2ReHk32pjzktWbGg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [openharmony]
|
||||
|
||||
'@esbuild/sunos-x64@0.27.1':
|
||||
resolution: {integrity: sha512-IPUW+y4VIjuDVn+OMzHc5FV4GubIwPnsz6ubkvN8cuhEqH81NovB53IUlrlBkPMEPxvNnf79MGBoz8rZ2iW8HA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [sunos]
|
||||
|
||||
'@esbuild/win32-arm64@0.27.1':
|
||||
resolution: {integrity: sha512-RIVRWiljWA6CdVu8zkWcRmGP7iRRIIwvhDKem8UMBjPql2TXM5PkDVvvrzMtj1V+WFPB4K7zkIGM7VzRtFkjdg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-ia32@0.27.1':
|
||||
resolution: {integrity: sha512-2BR5M8CPbptC1AK5JbJT1fWrHLvejwZidKx3UMSF0ecHMa+smhi16drIrCEggkgviBwLYd5nwrFLSl5Kho96RQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-x64@0.27.1':
|
||||
resolution: {integrity: sha512-d5X6RMYv6taIymSk8JBP+nxv8DQAMY6A51GPgusqLdK9wBz5wWIXy1KjTck6HnjE9hqJzJRdk+1p/t5soSbCtw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@types/node@20.19.26':
|
||||
resolution: {integrity: sha512-0l6cjgF0XnihUpndDhk+nyD3exio3iKaYROSgvh/qSevPXax3L8p5DBRFjbvalnwatGgHEQn2R88y2fA3g4irg==}
|
||||
|
||||
esbuild@0.27.1:
|
||||
resolution: {integrity: sha512-yY35KZckJJuVVPXpvjgxiCuVEJT67F6zDeVTv4rizyPrfGBUpZQsvmxnN+C371c2esD/hNMjj4tpBhuueLN7aA==}
|
||||
engines: {node: '>=18'}
|
||||
hasBin: true
|
||||
|
||||
fsevents@2.3.3:
|
||||
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
|
||||
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
|
||||
os: [darwin]
|
||||
|
||||
get-tsconfig@4.13.0:
|
||||
resolution: {integrity: sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==}
|
||||
|
||||
resolve-pkg-maps@1.0.0:
|
||||
resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
|
||||
|
||||
toml@3.0.0:
|
||||
resolution: {integrity: sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==}
|
||||
|
||||
tsx@4.21.0:
|
||||
resolution: {integrity: sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==}
|
||||
engines: {node: '>=18.0.0'}
|
||||
hasBin: true
|
||||
|
||||
typescript@5.9.3:
|
||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@6.21.0:
|
||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
||||
|
||||
yaml@2.7.1:
|
||||
resolution: {integrity: sha512-10ULxpnOCQXxJvBgxsn9ptjq6uviG/htZKk9veJGhlqn3w/DxQ631zFF+nlQXLwmImeS5amR2dl2U8sg6U9jsQ==}
|
||||
engines: {node: '>= 14'}
|
||||
hasBin: true
|
||||
|
||||
snapshots:
|
||||
|
||||
'@esbuild/aix-ppc64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-x64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-arm64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-x64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-arm64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-x64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ia32@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-loong64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-mips64el@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ppc64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-riscv64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-s390x@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-x64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-arm64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-x64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-arm64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-x64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openharmony-arm64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/sunos-x64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-arm64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-ia32@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-x64@0.27.1':
|
||||
optional: true
|
||||
|
||||
'@types/node@20.19.26':
|
||||
dependencies:
|
||||
undici-types: 6.21.0
|
||||
|
||||
esbuild@0.27.1:
|
||||
optionalDependencies:
|
||||
'@esbuild/aix-ppc64': 0.27.1
|
||||
'@esbuild/android-arm': 0.27.1
|
||||
'@esbuild/android-arm64': 0.27.1
|
||||
'@esbuild/android-x64': 0.27.1
|
||||
'@esbuild/darwin-arm64': 0.27.1
|
||||
'@esbuild/darwin-x64': 0.27.1
|
||||
'@esbuild/freebsd-arm64': 0.27.1
|
||||
'@esbuild/freebsd-x64': 0.27.1
|
||||
'@esbuild/linux-arm': 0.27.1
|
||||
'@esbuild/linux-arm64': 0.27.1
|
||||
'@esbuild/linux-ia32': 0.27.1
|
||||
'@esbuild/linux-loong64': 0.27.1
|
||||
'@esbuild/linux-mips64el': 0.27.1
|
||||
'@esbuild/linux-ppc64': 0.27.1
|
||||
'@esbuild/linux-riscv64': 0.27.1
|
||||
'@esbuild/linux-s390x': 0.27.1
|
||||
'@esbuild/linux-x64': 0.27.1
|
||||
'@esbuild/netbsd-arm64': 0.27.1
|
||||
'@esbuild/netbsd-x64': 0.27.1
|
||||
'@esbuild/openbsd-arm64': 0.27.1
|
||||
'@esbuild/openbsd-x64': 0.27.1
|
||||
'@esbuild/openharmony-arm64': 0.27.1
|
||||
'@esbuild/sunos-x64': 0.27.1
|
||||
'@esbuild/win32-arm64': 0.27.1
|
||||
'@esbuild/win32-ia32': 0.27.1
|
||||
'@esbuild/win32-x64': 0.27.1
|
||||
|
||||
fsevents@2.3.3:
|
||||
optional: true
|
||||
|
||||
get-tsconfig@4.13.0:
|
||||
dependencies:
|
||||
resolve-pkg-maps: 1.0.0
|
||||
|
||||
resolve-pkg-maps@1.0.0: {}
|
||||
|
||||
toml@3.0.0: {}
|
||||
|
||||
tsx@4.21.0:
|
||||
dependencies:
|
||||
esbuild: 0.27.1
|
||||
get-tsconfig: 4.13.0
|
||||
optionalDependencies:
|
||||
fsevents: 2.3.3
|
||||
|
||||
typescript@5.9.3: {}
|
||||
|
||||
undici-types@6.21.0: {}
|
||||
|
||||
yaml@2.7.1: {}
|
||||
23
build-scripts/tsconfig.json
Normal file
23
build-scripts/tsconfig.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "commonjs",
|
||||
"lib": ["ES2022"],
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"moduleResolution": "node",
|
||||
"declaration": false,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noImplicitReturns": true,
|
||||
"noFallthroughCasesInSwitch": true
|
||||
},
|
||||
"include": ["*.ts"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
|
||||
879
build-scripts/type.ts
Normal file
879
build-scripts/type.ts
Normal file
@@ -0,0 +1,879 @@
|
||||
export type DefinitionsInclude =
|
||||
| string
|
||||
| {
|
||||
path?: StringOrList;
|
||||
env_file?: StringOrList;
|
||||
project_directory?: string;
|
||||
};
|
||||
export type StringOrList = string | ListOfStrings;
|
||||
export type ListOfStrings = string[];
|
||||
export type DefinitionsDevelopment = {
|
||||
watch?: {
|
||||
ignore?: string[];
|
||||
path: string;
|
||||
action: "rebuild" | "sync" | "sync+restart";
|
||||
target?: string;
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
[k: string]: unknown;
|
||||
} & Development;
|
||||
export type Development = {
|
||||
watch?: {
|
||||
ignore?: string[];
|
||||
path: string;
|
||||
action: "rebuild" | "sync" | "sync+restart";
|
||||
target?: string;
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
export type DefinitionsDeployment = {
|
||||
mode?: string;
|
||||
endpoint_mode?: string;
|
||||
replicas?: number;
|
||||
labels?: ListOrDict;
|
||||
rollback_config?: {
|
||||
parallelism?: number;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number;
|
||||
order?: "start-first" | "stop-first";
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
update_config?: {
|
||||
parallelism?: number;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number;
|
||||
order?: "start-first" | "stop-first";
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
resources?: {
|
||||
limits?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
pids?: number;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
reservations?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
generic_resources?: DefinitionsGenericResources;
|
||||
devices?: DefinitionsDevices;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
restart_policy?: {
|
||||
condition?: string;
|
||||
delay?: string;
|
||||
max_attempts?: number;
|
||||
window?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
placement?: {
|
||||
constraints?: string[];
|
||||
preferences?: {
|
||||
spread?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
max_replicas_per_node?: number;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} & Deployment;
|
||||
export type ListOrDict =
|
||||
| {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` ".+".
|
||||
*/
|
||||
[k: string]: string | number | boolean | null;
|
||||
}
|
||||
| string[];
|
||||
export type DefinitionsGenericResources = {
|
||||
discrete_resource_spec?: {
|
||||
kind?: string;
|
||||
value?: number;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
export type DefinitionsDevices = {
|
||||
capabilities?: ListOfStrings;
|
||||
count?: string | number;
|
||||
device_ids?: ListOfStrings;
|
||||
driver?: string;
|
||||
options?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
type Deployment = {
|
||||
mode?: string;
|
||||
endpoint_mode?: string;
|
||||
replicas?: number;
|
||||
labels?: ListOrDict;
|
||||
rollback_config?: {
|
||||
parallelism?: number;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number;
|
||||
order?: "start-first" | "stop-first";
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
update_config?: {
|
||||
parallelism?: number;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number;
|
||||
order?: "start-first" | "stop-first";
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
resources?: {
|
||||
limits?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
pids?: number;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
reservations?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
generic_resources?: DefinitionsGenericResources;
|
||||
devices?: DefinitionsDevices;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
restart_policy?: {
|
||||
condition?: string;
|
||||
delay?: string;
|
||||
max_attempts?: number;
|
||||
window?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
placement?: {
|
||||
constraints?: string[];
|
||||
preferences?: {
|
||||
spread?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
max_replicas_per_node?: number;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
export type ServiceConfigOrSecret = (
|
||||
| string
|
||||
| {
|
||||
source?: string;
|
||||
target?: string;
|
||||
uid?: string;
|
||||
gid?: string;
|
||||
mode?: number;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
)[];
|
||||
export type Command = null | string | string[];
|
||||
export type EnvFile =
|
||||
| string
|
||||
| (
|
||||
| string
|
||||
| {
|
||||
path: string;
|
||||
required?: boolean;
|
||||
}
|
||||
)[];
|
||||
/**
|
||||
* This interface was referenced by `PropertiesNetworks`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export type DefinitionsNetwork = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
ipam?: {
|
||||
driver?: string;
|
||||
config?: {
|
||||
subnet?: string;
|
||||
ip_range?: string;
|
||||
gateway?: string;
|
||||
aux_addresses?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
options?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
internal?: boolean;
|
||||
enable_ipv6?: boolean;
|
||||
attachable?: boolean;
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} & Network;
|
||||
export type Network = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
ipam?: {
|
||||
driver?: string;
|
||||
config?: {
|
||||
subnet?: string;
|
||||
ip_range?: string;
|
||||
gateway?: string;
|
||||
aux_addresses?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
options?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
internal?: boolean;
|
||||
enable_ipv6?: boolean;
|
||||
attachable?: boolean;
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
/**
|
||||
* This interface was referenced by `PropertiesVolumes`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export type DefinitionsVolume = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} & Volume;
|
||||
export type Volume = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
|
||||
/**
|
||||
* The Compose file is a YAML file defining a multi-containers based application.
|
||||
*/
|
||||
export interface ComposeSpecification {
|
||||
/**
|
||||
* declared for backward compatibility, ignored.
|
||||
*/
|
||||
version?: string;
|
||||
/**
|
||||
* define the Compose project name, until user defines one explicitly.
|
||||
*/
|
||||
name?: string;
|
||||
/**
|
||||
* compose sub-projects to be included.
|
||||
*/
|
||||
include?: DefinitionsInclude[];
|
||||
services?: PropertiesServices;
|
||||
networks?: PropertiesNetworks;
|
||||
volumes?: PropertiesVolumes;
|
||||
secrets?: PropertiesSecrets;
|
||||
configs?: PropertiesConfigs;
|
||||
/**
|
||||
* This interface was referenced by `ComposeSpecification`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface PropertiesServices {
|
||||
[k: string]: DefinitionsService;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `PropertiesServices`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export interface DefinitionsService {
|
||||
develop?: DefinitionsDevelopment;
|
||||
deploy?: DefinitionsDeployment;
|
||||
annotations?: ListOrDict;
|
||||
attach?: boolean;
|
||||
build?:
|
||||
| string
|
||||
| {
|
||||
context?: string;
|
||||
dockerfile?: string;
|
||||
dockerfile_inline?: string;
|
||||
entitlements?: string[];
|
||||
args?: ListOrDict;
|
||||
ssh?: ListOrDict;
|
||||
labels?: ListOrDict;
|
||||
cache_from?: string[];
|
||||
cache_to?: string[];
|
||||
no_cache?: boolean;
|
||||
additional_contexts?: ListOrDict;
|
||||
network?: string;
|
||||
pull?: boolean;
|
||||
target?: string;
|
||||
shm_size?: number | string;
|
||||
extra_hosts?: ListOrDict;
|
||||
isolation?: string;
|
||||
privileged?: boolean;
|
||||
secrets?: ServiceConfigOrSecret;
|
||||
tags?: string[];
|
||||
ulimits?: Ulimits;
|
||||
platforms?: string[];
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
blkio_config?: {
|
||||
device_read_bps?: BlkioLimit[];
|
||||
device_read_iops?: BlkioLimit[];
|
||||
device_write_bps?: BlkioLimit[];
|
||||
device_write_iops?: BlkioLimit[];
|
||||
weight?: number;
|
||||
weight_device?: BlkioWeight[];
|
||||
};
|
||||
cap_add?: string[];
|
||||
cap_drop?: string[];
|
||||
cgroup?: "host" | "private";
|
||||
cgroup_parent?: string;
|
||||
command?: Command;
|
||||
configs?: ServiceConfigOrSecret;
|
||||
container_name?: string;
|
||||
cpu_count?: number;
|
||||
cpu_percent?: number;
|
||||
cpu_shares?: number | string;
|
||||
cpu_quota?: number | string;
|
||||
cpu_period?: number | string;
|
||||
cpu_rt_period?: number | string;
|
||||
cpu_rt_runtime?: number | string;
|
||||
cpus?: number | string;
|
||||
cpuset?: string;
|
||||
credential_spec?: {
|
||||
config?: string;
|
||||
file?: string;
|
||||
registry?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
depends_on?:
|
||||
| ListOfStrings
|
||||
| {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
[k: string]: {
|
||||
restart?: boolean;
|
||||
required?: boolean;
|
||||
condition:
|
||||
| "service_started"
|
||||
| "service_healthy"
|
||||
| "service_completed_successfully";
|
||||
};
|
||||
};
|
||||
device_cgroup_rules?: ListOfStrings;
|
||||
devices?: string[];
|
||||
dns?: StringOrList;
|
||||
dns_opt?: string[];
|
||||
dns_search?: StringOrList;
|
||||
domainname?: string;
|
||||
entrypoint?: Command;
|
||||
env_file?: EnvFile;
|
||||
environment?: ListOrDict;
|
||||
expose?: (string | number)[];
|
||||
extends?:
|
||||
| string
|
||||
| {
|
||||
service: string;
|
||||
file?: string;
|
||||
};
|
||||
external_links?: string[];
|
||||
extra_hosts?: ListOrDict;
|
||||
group_add?: (string | number)[];
|
||||
healthcheck?: DefinitionsHealthcheck;
|
||||
hostname?: string;
|
||||
image?: string;
|
||||
init?: boolean;
|
||||
ipc?: string;
|
||||
isolation?: string;
|
||||
labels?: ListOrDict;
|
||||
links?: string[];
|
||||
logging?: {
|
||||
driver?: string;
|
||||
options?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number | null;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
mac_address?: string;
|
||||
mem_limit?: number | string;
|
||||
mem_reservation?: string | number;
|
||||
mem_swappiness?: number;
|
||||
memswap_limit?: number | string;
|
||||
network_mode?: string;
|
||||
networks?:
|
||||
| ListOfStrings
|
||||
| {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
[k: string]: {
|
||||
aliases?: ListOfStrings;
|
||||
ipv4_address?: string;
|
||||
ipv6_address?: string;
|
||||
link_local_ips?: ListOfStrings;
|
||||
mac_address?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
priority?: number;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
};
|
||||
oom_kill_disable?: boolean;
|
||||
oom_score_adj?: number;
|
||||
pid?: string | null;
|
||||
pids_limit?: number | string;
|
||||
platform?: string;
|
||||
ports?: (
|
||||
| number
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
mode?: string;
|
||||
host_ip?: string;
|
||||
target?: number;
|
||||
published?: string | number;
|
||||
protocol?: string;
|
||||
app_protocol?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
)[];
|
||||
privileged?: boolean;
|
||||
profiles?: ListOfStrings;
|
||||
pull_policy?: "always" | "never" | "if_not_present" | "build" | "missing";
|
||||
read_only?: boolean;
|
||||
restart?: string;
|
||||
runtime?: string;
|
||||
scale?: number;
|
||||
security_opt?: string[];
|
||||
shm_size?: number | string;
|
||||
secrets?: ServiceConfigOrSecret;
|
||||
sysctls?: ListOrDict;
|
||||
stdin_open?: boolean;
|
||||
stop_grace_period?: string;
|
||||
stop_signal?: string;
|
||||
storage_opt?: {
|
||||
[k: string]: unknown;
|
||||
};
|
||||
tmpfs?: StringOrList;
|
||||
tty?: boolean;
|
||||
ulimits?: Ulimits;
|
||||
user?: string;
|
||||
uts?: string;
|
||||
userns_mode?: string;
|
||||
volumes?: (
|
||||
| string
|
||||
| {
|
||||
type: string;
|
||||
source?: string;
|
||||
target?: string;
|
||||
read_only?: boolean;
|
||||
consistency?: string;
|
||||
bind?: {
|
||||
propagation?: string;
|
||||
create_host_path?: boolean;
|
||||
selinux?: "z" | "Z";
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
volume?: {
|
||||
nocopy?: boolean;
|
||||
subpath?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
tmpfs?: {
|
||||
size?: number | string;
|
||||
mode?: number;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
)[];
|
||||
volumes_from?: string[];
|
||||
working_dir?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsService`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface Ulimits {
|
||||
/**
|
||||
* This interface was referenced by `Ulimits`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-z]+$".
|
||||
*/
|
||||
[k: string]:
|
||||
| number
|
||||
| {
|
||||
hard: number;
|
||||
soft: number;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
}
|
||||
export interface BlkioLimit {
|
||||
path?: string;
|
||||
rate?: number | string;
|
||||
}
|
||||
export interface BlkioWeight {
|
||||
path?: string;
|
||||
weight?: number;
|
||||
}
|
||||
export interface DefinitionsHealthcheck {
|
||||
disable?: boolean;
|
||||
interval?: string;
|
||||
retries?: number;
|
||||
test?: string | string[];
|
||||
timeout?: string;
|
||||
start_period?: string;
|
||||
start_interval?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsHealthcheck`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface PropertiesNetworks {
|
||||
[k: string]: DefinitionsNetwork;
|
||||
}
|
||||
export interface PropertiesVolumes {
|
||||
[k: string]: DefinitionsVolume;
|
||||
}
|
||||
export interface PropertiesSecrets {
|
||||
[k: string]: DefinitionsSecret;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `PropertiesSecrets`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export interface DefinitionsSecret {
|
||||
name?: string;
|
||||
environment?: string;
|
||||
file?: string;
|
||||
external?:
|
||||
| boolean
|
||||
| {
|
||||
name?: string;
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
template_driver?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsSecret`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface PropertiesConfigs {
|
||||
[k: string]: DefinitionsConfig;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `PropertiesConfigs`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export interface DefinitionsConfig {
|
||||
name?: string;
|
||||
content?: string;
|
||||
environment?: string;
|
||||
file?: string;
|
||||
external?:
|
||||
| boolean
|
||||
| {
|
||||
name?: string;
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
template_driver?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsConfig`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
357
build-scripts/validate-docker-compose.ts
Normal file
357
build-scripts/validate-docker-compose.ts
Normal file
@@ -0,0 +1,357 @@
|
||||
#!/usr/bin/env tsx
|
||||
|
||||
/**
|
||||
* Validation script for docker-compose.yml files
|
||||
* Validates structure, syntax, and best practices for Dokploy templates
|
||||
*/
|
||||
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import * as yaml from "yaml";
|
||||
import type { ComposeSpecification, DefinitionsService } from "./type";
|
||||
|
||||
interface DockerComposeValidatorOptions {
|
||||
composePath?: string | null;
|
||||
verbose?: boolean;
|
||||
exitOnError?: boolean;
|
||||
}
|
||||
|
||||
interface ValidationResult {
|
||||
valid: boolean;
|
||||
errors: string[];
|
||||
warnings: string[];
|
||||
}
|
||||
|
||||
type LogLevel = "info" | "success" | "warning" | "error" | "debug";
|
||||
|
||||
class DockerComposeValidator {
|
||||
private options: Required<DockerComposeValidatorOptions>;
|
||||
private errors: string[] = [];
|
||||
private warnings: string[] = [];
|
||||
|
||||
constructor(options: DockerComposeValidatorOptions = {}) {
|
||||
this.options = {
|
||||
composePath: options.composePath || null,
|
||||
verbose: options.verbose || false,
|
||||
exitOnError: options.exitOnError !== false,
|
||||
...options,
|
||||
};
|
||||
}
|
||||
|
||||
private log(message: string, level: LogLevel = "info"): void {
|
||||
if (!this.options.verbose && level === "debug") return;
|
||||
|
||||
const prefix: Record<LogLevel, string> = {
|
||||
info: "🔍",
|
||||
success: "✅",
|
||||
warning: "⚠️",
|
||||
error: "❌",
|
||||
debug: "🔍",
|
||||
};
|
||||
|
||||
console.log(`${prefix[level] || "ℹ️"} ${message}`);
|
||||
}
|
||||
|
||||
private error(message: string): void {
|
||||
this.errors.push(message);
|
||||
this.log(message, "error");
|
||||
}
|
||||
|
||||
private warning(message: string): void {
|
||||
this.warnings.push(message);
|
||||
this.log(message, "warning");
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse docker-compose.yml file
|
||||
*/
|
||||
private parseCompose(composePath: string): ComposeSpecification | null {
|
||||
try {
|
||||
if (!fs.existsSync(composePath)) {
|
||||
this.error(`docker-compose.yml not found at ${composePath}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(composePath, "utf8");
|
||||
const compose = yaml.parse(content) as ComposeSpecification;
|
||||
|
||||
if (!compose || typeof compose !== "object") {
|
||||
this.error(`Invalid docker-compose.yml structure at ${composePath}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return compose;
|
||||
} catch (error: any) {
|
||||
this.error(`Failed to parse docker-compose.yml: ${error.message}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that docker-compose.yml can be processed by Docker Compose
|
||||
*/
|
||||
private validateDockerComposeSyntax(composePath: string): boolean {
|
||||
// This would ideally use docker compose config, but for now we validate structure
|
||||
// The actual syntax validation happens in the CI/CD workflow with docker compose config
|
||||
const compose = this.parseCompose(composePath);
|
||||
return compose !== null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate services don't use container_name (Dokploy best practice)
|
||||
*/
|
||||
private validateNoContainerName(services: Record<string, DefinitionsService>): void {
|
||||
Object.entries(services).forEach(([serviceName, service]) => {
|
||||
if (service.container_name) {
|
||||
this.error(
|
||||
`Service '${serviceName}': Found 'container_name' field. According to README, container_name should not be used. Dokploy manages container names automatically.`
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate no explicit networks (Dokploy creates networks automatically)
|
||||
*/
|
||||
private validateNoExplicitNetworks(
|
||||
compose: ComposeSpecification,
|
||||
services: Record<string, DefinitionsService>
|
||||
): void {
|
||||
// Check for dokploy-network specifically
|
||||
const hasDokployNetwork = compose.networks && "dokploy-network" in compose.networks;
|
||||
|
||||
// Check if any service uses explicit networks
|
||||
Object.entries(services).forEach(([serviceName, service]) => {
|
||||
if (service.networks) {
|
||||
if (typeof service.networks === "object" && !Array.isArray(service.networks)) {
|
||||
const networkNames = Object.keys(service.networks);
|
||||
if (networkNames.includes("dokploy-network")) {
|
||||
this.error(
|
||||
`Service '${serviceName}': Uses 'dokploy-network'. Dokploy creates networks automatically, explicit networks are not needed.`
|
||||
);
|
||||
} else if (networkNames.length > 0) {
|
||||
this.error(
|
||||
`Service '${serviceName}': Uses explicit network configuration. Dokploy creates networks automatically, explicit networks are not needed.`
|
||||
);
|
||||
}
|
||||
} else if (Array.isArray(service.networks)) {
|
||||
if (service.networks.includes("dokploy-network")) {
|
||||
this.error(
|
||||
`Service '${serviceName}': Uses 'dokploy-network'. Dokploy creates networks automatically, explicit networks are not needed.`
|
||||
);
|
||||
} else if (service.networks.length > 0) {
|
||||
this.error(
|
||||
`Service '${serviceName}': Uses explicit network configuration. Dokploy creates networks automatically, explicit networks are not needed.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Check if networks section exists at root level
|
||||
if (hasDokployNetwork) {
|
||||
this.error(
|
||||
"Found 'dokploy-network' in networks section. Dokploy creates networks automatically, explicit networks are not needed."
|
||||
);
|
||||
}
|
||||
|
||||
if (compose.networks && Object.keys(compose.networks).length > 0) {
|
||||
this.error(
|
||||
"Found explicit networks section. Dokploy creates networks automatically, explicit networks are not needed."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate ports are not mapped (should be just numbers, not host:container)
|
||||
*/
|
||||
private validatePortsFormat(services: Record<string, DefinitionsService>): void {
|
||||
Object.entries(services).forEach(([serviceName, service]) => {
|
||||
if (service.ports) {
|
||||
service.ports.forEach((port, index) => {
|
||||
if (typeof port === "string") {
|
||||
// Check for port mapping format (e.g., "3000:3000" or "8080:80")
|
||||
if (/^\d+:\d+/.test(port)) {
|
||||
this.error(
|
||||
`Service '${serviceName}': ports[${index}] uses port mapping format '${port}'. According to README, use only port number (e.g., '3000') instead of '3000:3000'. Dokploy handles port routing.`
|
||||
);
|
||||
}
|
||||
} else if (typeof port === "object" && port !== null) {
|
||||
// Check for published port mapping
|
||||
if (port.published && port.target) {
|
||||
this.error(
|
||||
`Service '${serviceName}': ports[${index}] uses port mapping (published: ${port.published}, target: ${port.target}). According to README, use only port number. Dokploy handles port routing.`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate services exist
|
||||
*/
|
||||
private validateServicesExist(compose: ComposeSpecification): boolean {
|
||||
if (!compose.services || Object.keys(compose.services).length === 0) {
|
||||
this.error("No services found in docker-compose.yml");
|
||||
return false;
|
||||
}
|
||||
|
||||
const serviceNames = Object.keys(compose.services);
|
||||
this.log(`Found ${serviceNames.length} service(s): ${serviceNames.join(", ")}`, "debug");
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate service names follow best practices
|
||||
*/
|
||||
private validateServiceNames(services: Record<string, DefinitionsService>): void {
|
||||
Object.keys(services).forEach((serviceName) => {
|
||||
// Service names should be lowercase and use hyphens
|
||||
if (serviceName !== serviceName.toLowerCase()) {
|
||||
this.warning(
|
||||
`Service '${serviceName}': Service names should be lowercase. Consider using '${serviceName.toLowerCase()}'.`
|
||||
);
|
||||
}
|
||||
|
||||
// Service names should not contain underscores (use hyphens instead)
|
||||
if (serviceName.includes("_")) {
|
||||
this.warning(
|
||||
`Service '${serviceName}': Service names should use hyphens instead of underscores. Consider using '${serviceName.replace(/_/g, "-")}'.`
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Main validation method
|
||||
*/
|
||||
validate(): ValidationResult {
|
||||
if (!this.options.composePath) {
|
||||
this.error("composePath option is required");
|
||||
if (this.options.exitOnError) {
|
||||
process.exit(1);
|
||||
}
|
||||
return { valid: false, errors: this.errors, warnings: this.warnings };
|
||||
}
|
||||
|
||||
const composePath = this.options.composePath;
|
||||
const templateName = path.basename(path.dirname(composePath));
|
||||
|
||||
this.log(`Validating docker-compose.yml: ${templateName}`);
|
||||
|
||||
// Parse and validate syntax
|
||||
if (!this.validateDockerComposeSyntax(composePath)) {
|
||||
if (this.options.exitOnError) {
|
||||
process.exit(1);
|
||||
}
|
||||
return { valid: false, errors: this.errors, warnings: this.warnings };
|
||||
}
|
||||
|
||||
const compose = this.parseCompose(composePath);
|
||||
if (!compose) {
|
||||
if (this.options.exitOnError) {
|
||||
process.exit(1);
|
||||
}
|
||||
return { valid: false, errors: this.errors, warnings: this.warnings };
|
||||
}
|
||||
|
||||
// Validate services exist
|
||||
if (!this.validateServicesExist(compose)) {
|
||||
if (this.options.exitOnError) {
|
||||
process.exit(1);
|
||||
}
|
||||
return { valid: false, errors: this.errors, warnings: this.warnings };
|
||||
}
|
||||
|
||||
const services = compose.services || {};
|
||||
|
||||
// Run all validations
|
||||
this.validateNoContainerName(services);
|
||||
this.validateNoExplicitNetworks(compose, services);
|
||||
this.validatePortsFormat(services);
|
||||
this.validateServiceNames(services);
|
||||
|
||||
// Show summary
|
||||
if (this.errors.length === 0) {
|
||||
this.log("Docker Compose file structure is valid", "success");
|
||||
|
||||
if (this.options.verbose) {
|
||||
this.log("📋 Services found:", "info");
|
||||
Object.keys(services).forEach((serviceName) => {
|
||||
const service = services[serviceName];
|
||||
const image = typeof service.image === "string" ? service.image : "N/A";
|
||||
this.log(` - ${serviceName}: ${image}`, "debug");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const valid = this.errors.length === 0;
|
||||
|
||||
if (!valid && this.options.exitOnError) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return { valid, errors: this.errors, warnings: this.warnings };
|
||||
}
|
||||
}
|
||||
|
||||
// CLI usage
|
||||
if (require.main === module) {
|
||||
const args = process.argv.slice(2);
|
||||
const options: DockerComposeValidatorOptions = {};
|
||||
let composePath: string | null = null;
|
||||
|
||||
// Parse command line arguments
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
const arg = args[i];
|
||||
switch (arg) {
|
||||
case "--file":
|
||||
case "-f":
|
||||
composePath = args[++i];
|
||||
break;
|
||||
case "--verbose":
|
||||
case "-v":
|
||||
options.verbose = true;
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
console.log(`
|
||||
Usage: tsx validate-docker-compose.ts [options]
|
||||
|
||||
Options:
|
||||
-f, --file <path> Docker Compose file path (required)
|
||||
-v, --verbose Verbose output
|
||||
-h, --help Show this help message
|
||||
|
||||
Examples:
|
||||
tsx validate-docker-compose.ts --file blueprints/grafana/docker-compose.yml
|
||||
tsx validate-docker-compose.ts -f blueprints/grafana/docker-compose.yml --verbose
|
||||
`);
|
||||
process.exit(0);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!composePath) {
|
||||
console.error("❌ Error: --file option is required");
|
||||
console.error("Use --help for usage information");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const validator = new DockerComposeValidator({
|
||||
composePath,
|
||||
...options,
|
||||
});
|
||||
|
||||
const result = validator.validate();
|
||||
|
||||
// Exit with appropriate code
|
||||
process.exit(result.valid ? 0 : 1);
|
||||
}
|
||||
|
||||
export default DockerComposeValidator;
|
||||
|
||||
625
build-scripts/validate-template.ts
Normal file
625
build-scripts/validate-template.ts
Normal file
@@ -0,0 +1,625 @@
|
||||
#!/usr/bin/env tsx
|
||||
|
||||
/**
|
||||
* Validation script for template.toml and docker-compose.yml files
|
||||
* Validates structure, syntax, and consistency between files
|
||||
*/
|
||||
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import { parse } from "toml";
|
||||
import * as yaml from "yaml";
|
||||
import type { ComposeSpecification } from "./type";
|
||||
import { processVariables, processValue, type Schema } from "./helpers";
|
||||
|
||||
interface TemplateValidatorOptions {
|
||||
templateDir?: string | null;
|
||||
composeServices?: string[] | null;
|
||||
verbose?: boolean;
|
||||
exitOnError?: boolean;
|
||||
}
|
||||
|
||||
interface ValidationResult {
|
||||
valid: boolean;
|
||||
errors: string[];
|
||||
warnings: string[];
|
||||
}
|
||||
|
||||
interface DomainConfig {
|
||||
serviceName?: string;
|
||||
port?: number | string;
|
||||
host?: string;
|
||||
path?: string;
|
||||
}
|
||||
|
||||
interface MountConfig {
|
||||
filePath?: string;
|
||||
content?: string;
|
||||
}
|
||||
|
||||
interface TemplateData {
|
||||
variables?: Record<string, string>;
|
||||
config?: {
|
||||
domains?: DomainConfig[];
|
||||
env?: string[] | Record<string, string | boolean | number> | Array<string | Record<string, string | boolean | number>>;
|
||||
mounts?: MountConfig[];
|
||||
};
|
||||
}
|
||||
|
||||
type LogLevel = "info" | "success" | "warning" | "error" | "debug";
|
||||
|
||||
class TemplateValidator {
|
||||
private options: Required<TemplateValidatorOptions>;
|
||||
private errors: string[] = [];
|
||||
private warnings: string[] = [];
|
||||
|
||||
constructor(options: TemplateValidatorOptions = {}) {
|
||||
this.options = {
|
||||
templateDir: options.templateDir || null,
|
||||
composeServices: options.composeServices || null,
|
||||
verbose: options.verbose || false,
|
||||
exitOnError: options.exitOnError !== false,
|
||||
...options,
|
||||
};
|
||||
}
|
||||
|
||||
private log(message: string, level: LogLevel = "info"): void {
|
||||
if (!this.options.verbose && level === "debug") return;
|
||||
|
||||
const prefix: Record<LogLevel, string> = {
|
||||
info: "🔍",
|
||||
success: "✅",
|
||||
warning: "⚠️",
|
||||
error: "❌",
|
||||
debug: "🔍",
|
||||
};
|
||||
|
||||
console.log(`${prefix[level] || "ℹ️"} ${message}`);
|
||||
}
|
||||
|
||||
private error(message: string): void {
|
||||
this.errors.push(message);
|
||||
this.log(message, "error");
|
||||
}
|
||||
|
||||
private warning(message: string): void {
|
||||
this.warnings.push(message);
|
||||
this.log(message, "warning");
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate helper syntax (based on Dokploy's processValue function)
|
||||
*/
|
||||
private validateHelper(helper: string, context: string = ""): void {
|
||||
const validHelpers = [
|
||||
"domain",
|
||||
"base64",
|
||||
"password",
|
||||
"hash",
|
||||
"uuid",
|
||||
"timestamp",
|
||||
"timestampms",
|
||||
"timestamps",
|
||||
"randomPort",
|
||||
"jwt",
|
||||
"username",
|
||||
"email",
|
||||
];
|
||||
|
||||
// Check if it's a helper with parameters
|
||||
if (helper.includes(":")) {
|
||||
const [helperName, ...params] = helper.split(":");
|
||||
|
||||
// Validate helper name
|
||||
if (!validHelpers.includes(helperName)) {
|
||||
// Might be a variable reference, which is valid
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate parameter formats
|
||||
if (helperName === "base64" || helperName === "password" || helperName === "hash") {
|
||||
// Format: helper:number
|
||||
const param = params[0];
|
||||
if (param && isNaN(parseInt(param, 10))) {
|
||||
this.warning(
|
||||
`${context}: helper '${helper}' has invalid parameter (should be a number)`
|
||||
);
|
||||
}
|
||||
} else if (helperName === "timestampms" || helperName === "timestamps") {
|
||||
// Format: timestampms:datetime or timestamps:datetime
|
||||
const datetime = params.join(":"); // Rejoin in case datetime has colons
|
||||
if (datetime) {
|
||||
// Try to parse as date
|
||||
const date = new Date(datetime);
|
||||
if (isNaN(date.getTime())) {
|
||||
this.warning(
|
||||
`${context}: helper '${helper}' has invalid datetime format`
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if (helperName === "jwt") {
|
||||
// Format: jwt:secret or jwt:secret:payload or jwt:length
|
||||
if (params.length > 0) {
|
||||
const firstParam = params[0];
|
||||
// If it's a number, it's jwt:length (deprecated but valid)
|
||||
if (!isNaN(parseInt(firstParam, 10))) {
|
||||
// Valid: jwt:32
|
||||
return;
|
||||
}
|
||||
// Otherwise it's jwt:secret or jwt:secret:payload
|
||||
// Both are valid
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Simple helper without parameters
|
||||
if (!validHelpers.includes(helper)) {
|
||||
// Might be a variable reference, which is valid
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse docker-compose.yml and extract service names
|
||||
*/
|
||||
private parseComposeServices(composePath: string): string[] {
|
||||
try {
|
||||
if (!fs.existsSync(composePath)) {
|
||||
this.warning(`docker-compose.yml not found at ${composePath}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(composePath, "utf8");
|
||||
const compose = yaml.parse(content) as ComposeSpecification;
|
||||
|
||||
if (!compose || typeof compose !== "object") {
|
||||
this.error(`Invalid docker-compose.yml structure at ${composePath}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
// Extract service names using the official ComposeSpecification type
|
||||
const services = compose.services || {};
|
||||
const serviceNames = Object.keys(services);
|
||||
|
||||
if (serviceNames.length === 0) {
|
||||
this.warning(`No services found in docker-compose.yml at ${composePath}`);
|
||||
}
|
||||
|
||||
return serviceNames;
|
||||
} catch (error: any) {
|
||||
this.error(
|
||||
`Failed to parse docker-compose.yml at ${composePath}: ${error.message}`
|
||||
);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate template.toml structure
|
||||
*/
|
||||
private validateTemplate(tomlPath: string, composeServices: string[] | null = null): boolean {
|
||||
try {
|
||||
if (!fs.existsSync(tomlPath)) {
|
||||
this.error(`template.toml not found at ${tomlPath}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Parse TOML
|
||||
let data: TemplateData;
|
||||
try {
|
||||
const content = fs.readFileSync(tomlPath, "utf8");
|
||||
data = parse(content) as TemplateData;
|
||||
} catch (parseError: any) {
|
||||
this.error(
|
||||
`Invalid TOML syntax in ${tomlPath}: ${parseError.message}`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate [config] section exists
|
||||
if (!data.config) {
|
||||
this.error("Missing [config] section in template.toml");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate domains
|
||||
if (data.config.domains) {
|
||||
if (!Array.isArray(data.config.domains)) {
|
||||
this.error("config.domains must be an array");
|
||||
return false;
|
||||
}
|
||||
|
||||
data.config.domains.forEach((domain, index) => {
|
||||
// Required fields
|
||||
if (!domain.serviceName) {
|
||||
this.error(`domain[${index}]: Missing required field 'serviceName'`);
|
||||
}
|
||||
if (domain.port === undefined || domain.port === null) {
|
||||
this.error(`domain[${index}]: Missing required field 'port'`);
|
||||
}
|
||||
if (!domain.host) {
|
||||
this.error(`domain[${index}]: Missing required field 'host'`);
|
||||
}
|
||||
|
||||
// Validate serviceName matches docker-compose.yml services
|
||||
if (domain.serviceName && composeServices && composeServices.length > 0) {
|
||||
if (!composeServices.includes(domain.serviceName)) {
|
||||
this.error(
|
||||
`domain[${index}]: serviceName '${domain.serviceName}' not found in docker-compose.yml services. Available services: ${composeServices.join(", ")}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate port is a number
|
||||
if (domain.port !== undefined && domain.port !== null) {
|
||||
const port = typeof domain.port === "string"
|
||||
? parseInt(domain.port.replace(/_/g, ""), 10)
|
||||
: domain.port;
|
||||
|
||||
if (isNaN(Number(port)) || Number(port) < 1 || Number(port) > 65535) {
|
||||
this.warning(
|
||||
`domain[${index}]: port '${domain.port}' may be invalid (should be 1-65535)`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate host format (should contain ${} for variable substitution)
|
||||
if (domain.host && typeof domain.host === "string") {
|
||||
if (!domain.host.includes("${")) {
|
||||
this.warning(
|
||||
`domain[${index}]: host '${domain.host}' doesn't use variable syntax (e.g., \${main_domain} or \${domain})`
|
||||
);
|
||||
} else {
|
||||
// Validate helpers in host
|
||||
const helperPattern = /\${([^}]+)}/g;
|
||||
let match: RegExpExecArray | null;
|
||||
while ((match = helperPattern.exec(domain.host)) !== null) {
|
||||
this.validateHelper(match[1], `domain[${index}].host`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.warning("No domains configured in template.toml");
|
||||
}
|
||||
|
||||
// Validate env - can be array or object (as per Dokploy's processEnvVars)
|
||||
if (data.config.env !== undefined) {
|
||||
if (Array.isArray(data.config.env)) {
|
||||
// Array format: ["KEY=VALUE", ...]
|
||||
data.config.env.forEach((env, index) => {
|
||||
if (typeof env === "string") {
|
||||
if (!env.includes("=")) {
|
||||
this.warning(
|
||||
`config.env[${index}]: '${env}' doesn't follow KEY=VALUE format`
|
||||
);
|
||||
}
|
||||
} else if (typeof env === "object" && env !== null) {
|
||||
// Object in array is also valid: [{"KEY": "VALUE"}, ...]
|
||||
const keys = Object.keys(env);
|
||||
if (keys.length === 0) {
|
||||
this.warning(`config.env[${index}]: empty object`);
|
||||
}
|
||||
} else if (typeof env !== "boolean" && typeof env !== "number") {
|
||||
this.error(
|
||||
`config.env[${index}]: must be a string, object, boolean, or number`
|
||||
);
|
||||
}
|
||||
});
|
||||
} else if (typeof data.config.env === "object" && data.config.env !== null) {
|
||||
// Object format: { KEY: "VALUE", ... }
|
||||
// This is valid - Dokploy handles both formats
|
||||
const envKeys = Object.keys(data.config.env);
|
||||
if (envKeys.length === 0) {
|
||||
this.warning("config.env is an empty object");
|
||||
}
|
||||
} else {
|
||||
this.error(
|
||||
"config.env must be an array or an object (as per Dokploy's processEnvVars)"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate mounts if present
|
||||
if (data.config.mounts) {
|
||||
if (!Array.isArray(data.config.mounts)) {
|
||||
this.error("config.mounts must be an array");
|
||||
} else {
|
||||
data.config.mounts.forEach((mount, index) => {
|
||||
if (!mount.filePath) {
|
||||
this.error(`config.mounts[${index}]: Missing required field 'filePath'`);
|
||||
} else if (typeof mount.filePath !== "string") {
|
||||
this.error(`config.mounts[${index}]: filePath must be a string`);
|
||||
}
|
||||
|
||||
if (mount.content === undefined) {
|
||||
this.error(`config.mounts[${index}]: Missing required field 'content'`);
|
||||
} else if (typeof mount.content !== "string") {
|
||||
this.error(`config.mounts[${index}]: content must be a string`);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Validate variables if present
|
||||
if (data.variables) {
|
||||
if (typeof data.variables !== "object" || Array.isArray(data.variables)) {
|
||||
this.error("variables must be an object");
|
||||
} else {
|
||||
// Validate variable values and helpers
|
||||
Object.entries(data.variables).forEach(([key, value]) => {
|
||||
if (typeof value !== "string") {
|
||||
this.error(`variables.${key}: must be a string`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate helpers in variable values
|
||||
const helperPattern = /\${([^}]+)}/g;
|
||||
let match: RegExpExecArray | null;
|
||||
while ((match = helperPattern.exec(value)) !== null) {
|
||||
const helper = match[1];
|
||||
this.validateHelper(helper, `variables.${key}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Try to process variables to ensure they resolve correctly
|
||||
try {
|
||||
const schema: Schema = {};
|
||||
const processedVars = processVariables(data.variables, schema);
|
||||
|
||||
// Check if any variables failed to resolve (still contain ${})
|
||||
Object.entries(processedVars).forEach(([key, value]) => {
|
||||
if (typeof value === "string" && value.includes("${")) {
|
||||
// Check if it's a valid variable reference or an error
|
||||
const unresolved = value.match(/\${([^}]+)}/g);
|
||||
if (unresolved) {
|
||||
unresolved.forEach((unresolvedVar) => {
|
||||
const varName = unresolvedVar.slice(2, -1);
|
||||
// Check if it's a reference to another variable that exists
|
||||
if (!data.variables![varName] && !varName.includes(":")) {
|
||||
this.warning(
|
||||
`variables.${key}: contains unresolved variable reference '${unresolvedVar}'`
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Validate that domains can be processed with resolved variables
|
||||
if (data.config.domains) {
|
||||
data.config.domains.forEach((domain, index) => {
|
||||
if (domain.host && typeof domain.host === "string") {
|
||||
try {
|
||||
const processedHost = processValue(domain.host, processedVars, schema);
|
||||
if (processedHost.includes("${")) {
|
||||
this.warning(
|
||||
`domain[${index}].host: could not fully resolve all variables. Result: ${processedHost}`
|
||||
);
|
||||
}
|
||||
} catch (e: any) {
|
||||
this.warning(
|
||||
`domain[${index}].host: error processing host value: ${e.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Validate that env vars can be processed
|
||||
if (data.config.env) {
|
||||
if (Array.isArray(data.config.env)) {
|
||||
data.config.env.forEach((env, index) => {
|
||||
if (typeof env === "string") {
|
||||
try {
|
||||
const processed = processValue(env, processedVars, schema);
|
||||
if (processed.includes("${")) {
|
||||
this.warning(
|
||||
`config.env[${index}]: could not fully resolve all variables`
|
||||
);
|
||||
}
|
||||
} catch (e: any) {
|
||||
this.warning(
|
||||
`config.env[${index}]: error processing env value: ${e.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (typeof data.config.env === "object") {
|
||||
Object.entries(data.config.env).forEach(([key, value]) => {
|
||||
if (typeof value === "string") {
|
||||
try {
|
||||
const processed = processValue(value, processedVars, schema);
|
||||
if (processed.includes("${")) {
|
||||
this.warning(
|
||||
`config.env.${key}: could not fully resolve all variables`
|
||||
);
|
||||
}
|
||||
} catch (e: any) {
|
||||
this.warning(
|
||||
`config.env.${key}: error processing env value: ${e.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Validate that mounts can be processed
|
||||
if (data.config.mounts) {
|
||||
data.config.mounts.forEach((mount, index) => {
|
||||
if (mount.filePath && typeof mount.filePath === "string") {
|
||||
try {
|
||||
const processed = processValue(mount.filePath, processedVars, schema);
|
||||
if (processed.includes("${")) {
|
||||
this.warning(
|
||||
`config.mounts[${index}].filePath: could not fully resolve all variables`
|
||||
);
|
||||
}
|
||||
} catch (e: any) {
|
||||
this.warning(
|
||||
`config.mounts[${index}].filePath: error processing filePath: ${e.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
if (mount.content && typeof mount.content === "string") {
|
||||
try {
|
||||
const processed = processValue(mount.content, processedVars, schema);
|
||||
if (processed.includes("${")) {
|
||||
this.warning(
|
||||
`config.mounts[${index}].content: could not fully resolve all variables`
|
||||
);
|
||||
}
|
||||
} catch (e: any) {
|
||||
this.warning(
|
||||
`config.mounts[${index}].content: error processing content: ${e.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (this.options.verbose) {
|
||||
this.log("✅ Variables processed successfully", "success");
|
||||
this.log(`📋 Processed ${Object.keys(processedVars).length} variables`, "debug");
|
||||
}
|
||||
} catch (e: any) {
|
||||
this.error(`Failed to process variables: ${e.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this.errors.length === 0;
|
||||
} catch (error: any) {
|
||||
this.error(`Error validating template.toml: ${error.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a template directory
|
||||
*/
|
||||
private validateTemplateDir(templateDir: string): ValidationResult {
|
||||
// Resolver rutas absolutas o relativas desde la raíz del proyecto
|
||||
const resolvedDir = path.isAbsolute(templateDir)
|
||||
? templateDir
|
||||
: path.resolve(process.cwd(), templateDir);
|
||||
|
||||
const templatePath = path.join(resolvedDir, "template.toml");
|
||||
const composePath = path.join(resolvedDir, "docker-compose.yml");
|
||||
|
||||
this.log(`Validating template: ${path.basename(resolvedDir)}`);
|
||||
|
||||
// Parse compose services first
|
||||
const composeServices = this.parseComposeServices(composePath);
|
||||
|
||||
// Validate template.toml
|
||||
const isValid = this.validateTemplate(templatePath, composeServices);
|
||||
|
||||
// Show summary
|
||||
if (isValid && this.errors.length === 0) {
|
||||
this.log("Template structure is valid", "success");
|
||||
|
||||
// Show domains info
|
||||
try {
|
||||
const content = fs.readFileSync(templatePath, "utf8");
|
||||
const data = parse(content) as TemplateData;
|
||||
if (data.config && data.config.domains) {
|
||||
this.log("📋 Domains configured:");
|
||||
data.config.domains.forEach((domain) => {
|
||||
const service = domain.serviceName || "N/A";
|
||||
const port = domain.port !== undefined ? domain.port : "N/A";
|
||||
const host = domain.host || "N/A";
|
||||
this.log(` - Service: ${service}, Port: ${port}, Host: ${host}`);
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore errors in summary
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: isValid && this.errors.length === 0,
|
||||
errors: this.errors,
|
||||
warnings: this.warnings,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Main validation method
|
||||
*/
|
||||
validate(): ValidationResult {
|
||||
if (!this.options.templateDir) {
|
||||
this.error("templateDir option is required");
|
||||
if (this.options.exitOnError) {
|
||||
process.exit(1);
|
||||
}
|
||||
return { valid: false, errors: this.errors, warnings: this.warnings };
|
||||
}
|
||||
|
||||
const result = this.validateTemplateDir(this.options.templateDir!);
|
||||
|
||||
if (!result.valid && this.options.exitOnError) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// CLI usage
|
||||
if (require.main === module) {
|
||||
const args = process.argv.slice(2);
|
||||
const options: TemplateValidatorOptions = {};
|
||||
let templateDir: string | null = null;
|
||||
|
||||
// Parse command line arguments
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
const arg = args[i];
|
||||
switch (arg) {
|
||||
case "--dir":
|
||||
case "-d":
|
||||
templateDir = args[++i];
|
||||
break;
|
||||
case "--verbose":
|
||||
case "-v":
|
||||
options.verbose = true;
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
console.log(`
|
||||
Usage: tsx validate-template.ts [options]
|
||||
|
||||
Options:
|
||||
-d, --dir <path> Template directory path (required)
|
||||
-v, --verbose Verbose output
|
||||
-h, --help Show this help message
|
||||
|
||||
Examples:
|
||||
tsx validate-template.ts --dir blueprints/grafana
|
||||
tsx validate-template.ts -d blueprints/grafana --verbose
|
||||
`);
|
||||
process.exit(0);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!templateDir) {
|
||||
console.error("❌ Error: --dir option is required");
|
||||
console.error("Use --help for usage information");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const validator = new TemplateValidator({
|
||||
templateDir,
|
||||
...options,
|
||||
});
|
||||
|
||||
const result = validator.validate();
|
||||
|
||||
// Exit with appropriate code
|
||||
process.exit(result.valid ? 0 : 1);
|
||||
}
|
||||
|
||||
export default TemplateValidator;
|
||||
|
||||
Reference in New Issue
Block a user