Compare commits

...

4 Commits

Author SHA1 Message Date
Leendert de Borst
e881f9486a Add parallel support to db-export command (#1415) 2025-11-30 15:12:55 +00:00
Leendert de Borst
645fd605e6 Update PasswordGenerator.test.ts (#1413) 2025-11-30 12:08:22 +00:00
Leendert de Borst
254f0a1212 Improve browser extension autofill suggestion performance (#1413) 2025-11-30 12:08:22 +00:00
Leendert de Borst
64d29ebcd4 Update admin users list to show correct amount of email claims (#1411) 2025-11-30 11:17:16 +00:00
13 changed files with 254 additions and 92 deletions

View File

@@ -9,7 +9,7 @@ import { handleClipboardCopied, handleCancelClipboardClear, handleGetClipboardCl
import { setupContextMenus } from '@/entrypoints/background/ContextMenu';
import { handleGetWebAuthnSettings, handleWebAuthnCreate, handleWebAuthnGet, handlePasskeyPopupResponse, handleGetRequestData } from '@/entrypoints/background/PasskeyHandler';
import { handleOpenPopup, handlePopupWithCredential, handleOpenPopupCreateCredential, handleToggleContextMenu } from '@/entrypoints/background/PopupMessageHandler';
import { handleCheckAuthStatus, handleClearPersistedFormValues, handleClearVault, handleCreateIdentity, handleGetCredentials, handleGetDefaultEmailDomain, handleGetDefaultIdentitySettings, handleGetEncryptionKey, handleGetEncryptionKeyDerivationParams, handleGetPasswordSettings, handleGetPersistedFormValues, handleGetVault, handlePersistFormValues, handleStoreEncryptionKey, handleStoreEncryptionKeyDerivationParams, handleStoreVault, handleSyncVault, handleUploadVault } from '@/entrypoints/background/VaultMessageHandler';
import { handleCheckAuthStatus, handleClearPersistedFormValues, handleClearVault, handleCreateIdentity, handleGetCredentials, handleGetFilteredCredentials, handleGetSearchCredentials, handleGetDefaultEmailDomain, handleGetDefaultIdentitySettings, handleGetEncryptionKey, handleGetEncryptionKeyDerivationParams, handleGetPasswordSettings, handleGetPersistedFormValues, handleGetVault, handlePersistFormValues, handleStoreEncryptionKey, handleStoreEncryptionKeyDerivationParams, handleStoreVault, handleSyncVault, handleUploadVault } from '@/entrypoints/background/VaultMessageHandler';
import { GLOBAL_CONTEXT_MENU_ENABLED_KEY } from '@/utils/Constants';
import { EncryptionKeyDerivationParams } from "@/utils/dist/shared/models/metadata";
@@ -28,6 +28,8 @@ export default defineBackground({
onMessage('GET_ENCRYPTION_KEY_DERIVATION_PARAMS', () => handleGetEncryptionKeyDerivationParams());
onMessage('GET_VAULT', () => handleGetVault());
onMessage('GET_CREDENTIALS', () => handleGetCredentials());
onMessage('GET_FILTERED_CREDENTIALS', ({ data }) => handleGetFilteredCredentials(data as { currentUrl: string, pageTitle: string, matchingMode?: string }));
onMessage('GET_SEARCH_CREDENTIALS', ({ data }) => handleGetSearchCredentials(data as { searchTerm: string }));
onMessage('GET_DEFAULT_EMAIL_DOMAIN', () => handleGetDefaultEmailDomain());
onMessage('GET_DEFAULT_IDENTITY_SETTINGS', () => handleGetDefaultIdentitySettings());

View File

@@ -4,12 +4,12 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import { handleGetEncryptionKey } from '@/entrypoints/background/VaultMessageHandler';
import { extractDomain, extractRootDomain } from '@/entrypoints/contentScript/CredentialMatcher';
import {
PASSKEY_PROVIDER_ENABLED_KEY,
PASSKEY_DISABLED_SITES_KEY
} from '@/utils/Constants';
import { extractDomain, extractRootDomain } from '@/utils/credentialMatcher/CredentialMatcher';
import { EncryptionUtility } from '@/utils/EncryptionUtility';
import { PasskeyHelper } from '@/utils/passkey/PasskeyHelper';
import type {

View File

@@ -264,6 +264,101 @@ export async function handleGetCredentials(
}
}
/**
* Get credentials filtered by URL and page title for autofill performance optimization.
* Filters credentials in the background script before sending to reduce message payload size.
* Critical for large vaults (1000+ credentials) to avoid multi-second delays.
*
* @param message - Filtering parameters: currentUrl, pageTitle, matchingMode
*/
export async function handleGetFilteredCredentials(
message: { currentUrl: string, pageTitle: string, matchingMode?: string }
) : Promise<messageCredentialsResponse> {
const encryptionKey = await handleGetEncryptionKey();
if (!encryptionKey) {
return { success: false, error: await t('common.errors.vaultIsLocked') };
}
try {
const sqliteClient = await createVaultSqliteClient();
const allCredentials = sqliteClient.getAllCredentials();
// Import filtering logic
const { filterCredentials, AutofillMatchingMode } = await import('@/utils/credentialMatcher/CredentialMatcher');
// Parse matching mode from string
let matchingMode = AutofillMatchingMode.DEFAULT;
if (message.matchingMode) {
matchingMode = message.matchingMode as typeof AutofillMatchingMode[keyof typeof AutofillMatchingMode];
}
// Filter credentials in background to reduce payload size (~95% reduction)
const filteredCredentials = filterCredentials(
allCredentials,
message.currentUrl,
message.pageTitle,
matchingMode
);
return { success: true, credentials: filteredCredentials };
} catch (error) {
console.error('Error getting filtered credentials:', error);
return { success: false, error: await t('common.errors.unknownError') };
}
}
/**
* Get credentials filtered by text search query.
* Searches across entire vault (service name, username, email, URL) and returns matches.
*
* @param message - Search parameters: searchTerm
*/
export async function handleGetSearchCredentials(
message: { searchTerm: string }
) : Promise<messageCredentialsResponse> {
const encryptionKey = await handleGetEncryptionKey();
if (!encryptionKey) {
return { success: false, error: await t('common.errors.vaultIsLocked') };
}
try {
const sqliteClient = await createVaultSqliteClient();
const allCredentials = sqliteClient.getAllCredentials();
// If search term is empty, return empty array
if (!message.searchTerm || message.searchTerm.trim() === '') {
return { success: true, credentials: [] };
}
const searchTerm = message.searchTerm.toLowerCase().trim();
// Filter credentials by search term across multiple fields
const searchResults = allCredentials.filter(cred => {
const searchableFields = [
cred.ServiceName?.toLowerCase(),
cred.Username?.toLowerCase(),
cred.Alias?.Email?.toLowerCase(),
cred.ServiceUrl?.toLowerCase()
];
return searchableFields.some(field => field?.includes(searchTerm));
}).sort((a, b) => {
// Sort by service name, then username
const serviceNameComparison = (a.ServiceName ?? '').localeCompare(b.ServiceName ?? '');
if (serviceNameComparison !== 0) {
return serviceNameComparison;
}
return (a.Username ?? '').localeCompare(b.Username ?? '');
});
return { success: true, credentials: searchResults };
} catch (error) {
console.error('Error searching credentials:', error);
return { success: false, error: await t('common.errors.unknownError') };
}
}
/**
* Create an identity.
*/

View File

@@ -1,9 +1,9 @@
import { sendMessage } from 'webext-bridge/content-script';
import { filterCredentials, AutofillMatchingMode } from '@/entrypoints/contentScript/CredentialMatcher';
import { fillCredential } from '@/entrypoints/contentScript/Form';
import { DISABLED_SITES_KEY, TEMPORARY_DISABLED_SITES_KEY, GLOBAL_AUTOFILL_POPUP_ENABLED_KEY, VAULT_LOCKED_DISMISS_UNTIL_KEY, AUTOFILL_MATCHING_MODE_KEY, CUSTOM_EMAIL_HISTORY_KEY, CUSTOM_USERNAME_HISTORY_KEY } from '@/utils/Constants';
import { AutofillMatchingMode } from '@/utils/credentialMatcher/CredentialMatcher';
import { CreateIdentityGenerator } from '@/utils/dist/shared/identity-generator';
import type { Credential } from '@/utils/dist/shared/models/vault';
import { CreatePasswordGenerator, PasswordGenerator, PasswordSettings } from '@/utils/dist/shared/password-generator';
@@ -49,7 +49,14 @@ export function openAutofillPopup(input: HTMLInputElement, container: HTMLElemen
document.addEventListener('keydown', handleEnterKey);
(async () : Promise<void> => {
const response = await sendMessage('GET_CREDENTIALS', { }, 'background') as CredentialsResponse;
// Load autofill matching mode setting to send to background for filtering
const matchingMode = await storage.getItem(AUTOFILL_MATCHING_MODE_KEY) as AutofillMatchingMode ?? AutofillMatchingMode.DEFAULT;
const response = await sendMessage('GET_FILTERED_CREDENTIALS', {
currentUrl: window.location.href,
pageTitle: document.title,
matchingMode: matchingMode
}, 'background') as CredentialsResponse;
if (response.success) {
await createAutofillPopup(input, response.credentials, container);
@@ -182,22 +189,12 @@ export async function createAutofillPopup(input: HTMLInputElement, credentials:
credentialList.className = 'av-credential-list';
popup.appendChild(credentialList);
// Add initial credentials
// Add initial credentials (already filtered by background script for performance)
if (!credentials) {
credentials = [];
}
// Load autofill matching mode setting
const matchingMode = await storage.getItem(AUTOFILL_MATCHING_MODE_KEY) as AutofillMatchingMode ?? AutofillMatchingMode.DEFAULT;
const filteredCredentials = filterCredentials(
credentials,
window.location.href,
document.title,
matchingMode
);
updatePopupContent(filteredCredentials, credentialList, input, rootContainer, noMatchesText);
updatePopupContent(credentials, credentialList, input, rootContainer, noMatchesText);
// Add divider
const divider = document.createElement('div');
@@ -549,62 +546,41 @@ export async function createVaultLockedPopup(input: HTMLInputElement, rootContai
}
/**
* Handle popup search input by filtering credentials based on the search term.
* Handle popup search input - searches entire vault when user types.
* When empty, shows the initially URL-filtered credentials.
* When user types, searches ALL credentials in vault (not just the pre-filtered set).
*
* @param searchInput - The search input element
* @param initialCredentials - The initially URL-filtered credentials to show when search is empty
* @param rootContainer - The root container element
* @param searchTimeout - Timeout for debouncing search
* @param credentialList - The credential list element to update
* @param input - The input field that triggered the popup
* @param noMatchesText - Text to show when no matches found
*/
async function handleSearchInput(searchInput: HTMLInputElement, credentials: Credential[], rootContainer: HTMLElement, searchTimeout: NodeJS.Timeout | null, credentialList: HTMLElement | null, input: HTMLInputElement, noMatchesText?: string) : Promise<void> {
async function handleSearchInput(searchInput: HTMLInputElement, initialCredentials: Credential[], rootContainer: HTMLElement, searchTimeout: NodeJS.Timeout | null, credentialList: HTMLElement | null, input: HTMLInputElement, noMatchesText?: string) : Promise<void> {
if (searchTimeout) {
clearTimeout(searchTimeout);
}
const searchTerm = searchInput.value.toLowerCase();
// Ensure we have unique credentials
const uniqueCredentials = Array.from(new Map(credentials.map(cred => [cred.Id, cred])).values());
let filteredCredentials;
const searchTerm = searchInput.value.trim();
if (searchTerm === '') {
// Load autofill matching mode setting
const matchingMode = await storage.getItem(AUTOFILL_MATCHING_MODE_KEY) as AutofillMatchingMode ?? AutofillMatchingMode.DEFAULT;
// If search is empty, use original URL-based filtering
filteredCredentials = filterCredentials(
uniqueCredentials,
window.location.href,
document.title,
matchingMode
).sort((a, b) => {
// First compare by service name
const serviceNameComparison = (a.ServiceName ?? '').localeCompare(b.ServiceName ?? '');
if (serviceNameComparison !== 0) {
return serviceNameComparison;
}
// If service names are equal, compare by username/nickname
return (a.Username ?? '').localeCompare(b.Username ?? '');
});
// If search is empty, show the initially URL-filtered credentials
updatePopupContent(initialCredentials, credentialList, input, rootContainer, noMatchesText);
} else {
// Otherwise filter based on search term
filteredCredentials = uniqueCredentials.filter(cred => {
const searchableFields = [
cred.ServiceName?.toLowerCase(),
cred.Username?.toLowerCase(),
cred.Alias?.Email?.toLowerCase(),
cred.ServiceUrl?.toLowerCase()
];
return searchableFields.some(field => field?.includes(searchTerm));
}).sort((a, b) => {
// First compare by service name
const serviceNameComparison = (a.ServiceName ?? '').localeCompare(b.ServiceName ?? '');
if (serviceNameComparison !== 0) {
return serviceNameComparison;
}
// Search in full vault with search term
const response = await sendMessage('GET_SEARCH_CREDENTIALS', {
searchTerm: searchTerm
}, 'background') as CredentialsResponse;
// If service names are equal, compare by username/nickname
return (a.Username ?? '').localeCompare(b.Username ?? '');
});
if (response.success && response.credentials) {
updatePopupContent(response.credentials, credentialList, input, rootContainer, noMatchesText);
} else {
// On error, fallback to showing initial filtered credentials
updatePopupContent(initialCredentials, credentialList, input, rootContainer, noMatchesText);
}
}
// Update popup content with filtered results
updatePopupContent(filteredCredentials, credentialList, input, rootContainer, noMatchesText);
}
/**

View File

@@ -3,7 +3,6 @@ import { useTranslation } from 'react-i18next';
import { useLocation } from 'react-router-dom';
import { sendMessage } from 'webext-bridge/popup';
import { extractDomain, extractRootDomain } from '@/entrypoints/contentScript/CredentialMatcher';
import Button from '@/entrypoints/popup/components/Button';
import PasskeyBypassDialog from '@/entrypoints/popup/components/Dialogs/PasskeyBypassDialog';
import LoadingSpinner from '@/entrypoints/popup/components/LoadingSpinner';
@@ -12,6 +11,7 @@ import { useLoading } from '@/entrypoints/popup/context/LoadingContext';
import { useVaultLockRedirect } from '@/entrypoints/popup/hooks/useVaultLockRedirect';
import { PASSKEY_DISABLED_SITES_KEY } from '@/utils/Constants';
import { extractDomain, extractRootDomain } from '@/utils/credentialMatcher/CredentialMatcher';
import { PasskeyAuthenticator } from '@/utils/passkey/PasskeyAuthenticator';
import { PasskeyHelper } from '@/utils/passkey/PasskeyHelper';
import type { GetRequest, PasskeyGetCredentialResponse, PendingPasskeyGetRequest, StoredPasskeyRecord } from '@/utils/passkey/types';

View File

@@ -3,7 +3,6 @@ import { useTranslation } from 'react-i18next';
import { useLocation } from 'react-router-dom';
import { sendMessage } from 'webext-bridge/popup';
import { extractDomain, extractRootDomain } from '@/entrypoints/contentScript/CredentialMatcher';
import Alert from '@/entrypoints/popup/components/Alert';
import Button from '@/entrypoints/popup/components/Button';
import PasskeyBypassDialog from '@/entrypoints/popup/components/Dialogs/PasskeyBypassDialog';
@@ -16,6 +15,7 @@ import { useVaultLockRedirect } from '@/entrypoints/popup/hooks/useVaultLockRedi
import { useVaultMutate } from '@/entrypoints/popup/hooks/useVaultMutate';
import { PASSKEY_DISABLED_SITES_KEY } from '@/utils/Constants';
import { extractDomain, extractRootDomain } from '@/utils/credentialMatcher/CredentialMatcher';
import type { Passkey } from '@/utils/dist/shared/models/vault';
import { PasskeyAuthenticator } from '@/utils/passkey/PasskeyAuthenticator';
import { PasskeyHelper } from '@/utils/passkey/PasskeyHelper';

View File

@@ -1,7 +1,6 @@
import React, { useEffect, useState, useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import { AutofillMatchingMode } from '@/entrypoints/contentScript/CredentialMatcher';
import { useLoading } from '@/entrypoints/popup/context/LoadingContext';
import {
@@ -10,6 +9,7 @@ import {
TEMPORARY_DISABLED_SITES_KEY,
AUTOFILL_MATCHING_MODE_KEY
} from '@/utils/Constants';
import { AutofillMatchingMode } from '@/utils/credentialMatcher/CredentialMatcher';
import { storage, browser } from "#imports";

View File

@@ -1,13 +1,13 @@
import React, { useEffect, useState, useCallback } from 'react';
import { useTranslation } from 'react-i18next';
import { extractDomain, extractRootDomain } from '@/entrypoints/contentScript/CredentialMatcher';
import { useLoading } from '@/entrypoints/popup/context/LoadingContext';
import {
PASSKEY_PROVIDER_ENABLED_KEY,
PASSKEY_DISABLED_SITES_KEY
} from '@/utils/Constants';
import { extractDomain, extractRootDomain } from '@/utils/credentialMatcher/CredentialMatcher';
import { storage, browser } from "#imports";

View File

@@ -1,9 +1,8 @@
import { describe, it, expect, beforeEach } from 'vitest';
import { filterCredentials } from '@/utils/credentialMatcher/CredentialMatcher';
import type { Credential } from '@/utils/dist/shared/models/vault';
import { filterCredentials } from '../CredentialMatcher';
describe('CredentialMatcher - Credential URL Matching', () => {
let testCredentials: Credential[];

View File

@@ -218,11 +218,7 @@ else
v.RevisionNumber,
CredentialCount = v.CredentialsCount,
}),
EmailClaims = u.EmailClaims.Select(ec => new
{
ec.CreatedAt,
ec.Address
}),
EmailClaimCount = u.EmailClaims.Count(),
})
.ToListAsync(cancellationToken);
@@ -247,7 +243,7 @@ else
IsInactive = isInactive,
VaultCount = user.Vaults.Count(),
CredentialCount = user.Vaults.OrderByDescending(x => x.RevisionNumber).First().CredentialCount,
EmailClaimCount = user.EmailClaims.Count(),
EmailClaimCount = user.EmailClaimCount,
VaultStorageInKb = user.Vaults.Sum(x => x.FileSize),
};
}).ToList();

View File

@@ -72,9 +72,10 @@ show_usage() {
printf " configure-dev-db Enable/disable development database (for local development only)\n"
printf "\n"
printf "Options:\n"
printf " --verbose Show detailed output\n"
printf " -y, --yes Automatic yes to prompts\n"
printf " --dev Target development database for db import/export operations"
printf " --verbose Show detailed output\n"
printf " -y, --yes Automatic yes to prompts\n"
printf " --dev Target development database for db import/export operations\n"
printf " --parallel=N Use pigz with N threads for faster compression (default: off, max: 32)\n"
printf "\n"
}
@@ -98,6 +99,7 @@ parse_args() {
FORCE_YES=false
COMMAND_ARG=""
DEV_DB=false
PARALLEL_JOBS=0 # 0 = use standard gzip, >0 = use pigz with N threads
if [ $# -eq 0 ]; then
show_usage
@@ -228,6 +230,14 @@ parse_args() {
DEV_DB=true
shift
;;
--parallel=*)
PARALLEL_JOBS="${1#*=}"
if ! [[ "$PARALLEL_JOBS" =~ ^[0-9]+$ ]] || [ "$PARALLEL_JOBS" -lt 1 ] || [ "$PARALLEL_JOBS" -gt 32 ]; then
echo "Error: Invalid --parallel value '$PARALLEL_JOBS'. Must be a number between 1 and 32"
exit 1
fi
shift
;;
*)
echo "Unknown option: $1"
show_usage
@@ -2816,18 +2826,30 @@ handle_db_export() {
# Check if output redirection is present
if [ -t 1 ]; then
printf "Usage: ./install.sh db-export [--dev] > backup.sql.gz\n" >&2
printf "Usage: ./install.sh db-export [OPTIONS] > backup.sql.gz\n" >&2
printf "\n" >&2
printf "Options:\n" >&2
printf " --dev Export from development database\n" >&2
printf " --dev Export from development database\n" >&2
printf " --parallel=N Use pigz with N threads for parallel compression (max: 32)\n" >&2
printf "\n" >&2
printf "Examples:\n" >&2
printf " ./install.sh db-export > my_backup_$(date +%Y%m%d).sql.gz\n" >&2
printf " ./install.sh db-export --dev > my_dev_backup_$(date +%Y%m%d).sql.gz\n" >&2
printf "Compression:\n" >&2
printf " Default (no --parallel) Uses standard gzip (slowest, lowest CPU usage)\n" >&2
printf " --parallel=X Uses pigz with X threads (~2x faster, good for production)\n" >&2
printf "\n" >&2
printf "Note: Parallel compression runs at lowest priority (nice/ionice) to minimize\n" >&2
printf " impact on production.\n" >&2
printf "\n" >&2
exit 1
fi
# Create temporary file for export
temp_export_file=$(mktemp)
trap 'rm -f "$temp_export_file"' EXIT INT TERM
# Start timing
export_start_time=$(date +%s)
# Determine docker compose command based on dev/prod
if [ "$DEV_DB" = true ]; then
# Check if dev containers are running
if ! docker compose -f dockerfiles/docker-compose.dev.yml -p aliasvault-dev ps postgres-dev --quiet 2>/dev/null | grep -q .; then
@@ -2841,8 +2863,8 @@ handle_db_export() {
exit 1
fi
printf "${CYAN}> Exporting development database...${NC}\n" >&2
docker compose -f dockerfiles/docker-compose.dev.yml -p aliasvault-dev exec postgres-dev pg_dump -U aliasvault aliasvault | gzip
DOCKER_CMD="docker compose -f dockerfiles/docker-compose.dev.yml -p aliasvault-dev exec -T postgres-dev"
DB_TYPE="development"
else
# Production database export logic
if ! docker compose ps --quiet 2>/dev/null | grep -q .; then
@@ -2855,12 +2877,67 @@ handle_db_export() {
exit 1
fi
printf "${CYAN}> Exporting production database...${NC}\n" >&2
docker compose exec postgres pg_dump -U aliasvault aliasvault | gzip
DOCKER_CMD="docker compose exec -T postgres"
DB_TYPE="production"
fi
if [ $? -eq 0 ]; then
# Execute export based on parallel setting
if [ "$PARALLEL_JOBS" -gt 0 ]; then
printf "${CYAN}> Exporting ${DB_TYPE} database (with ${PARALLEL_JOBS}-thread parallel compression)...${NC}\n" >&2
# Use pigz for parallel compression
$DOCKER_CMD bash -c "
# Install pigz if not available (for parallel gzip)
if ! command -v pigz >/dev/null 2>&1; then
if command -v apk >/dev/null 2>&1; then
apk add --no-cache pigz >/dev/null 2>&1 || true
elif command -v apt-get >/dev/null 2>&1; then
apt-get update >/dev/null 2>&1 && apt-get install -y pigz >/dev/null 2>&1 || true
fi
fi
# Dump with pigz parallel compression (or fallback to gzip -1 if pigz install failed)
# Use nice (lowest CPU priority) and ionice (lowest I/O priority) to minimize impact
if command -v pigz >/dev/null 2>&1; then
ionice -c 3 nice -n 19 pg_dump -U aliasvault aliasvault | ionice -c 3 nice -n 19 pigz -1 -p ${PARALLEL_JOBS} 2>/dev/null || \
nice -n 19 pg_dump -U aliasvault aliasvault | nice -n 19 pigz -1 -p ${PARALLEL_JOBS}
else
ionice -c 3 nice -n 19 pg_dump -U aliasvault aliasvault | ionice -c 3 nice -n 19 gzip -1 2>/dev/null || \
nice -n 19 pg_dump -U aliasvault aliasvault | nice -n 19 gzip -1
fi
" > "$temp_export_file" 2>/dev/null
export_status=$?
else
# Default: standard gzip (backwards compatible)
printf "${CYAN}> Exporting ${DB_TYPE} database (standard compression)...${NC}\n" >&2
$DOCKER_CMD nice -n 19 pg_dump -U aliasvault aliasvault | gzip -1 > "$temp_export_file"
export_status=$?
fi
# End timing
export_end_time=$(date +%s)
export_duration=$((export_end_time - export_start_time))
# Get filesize
if [ -f "$temp_export_file" ]; then
export_filesize=$(wc -c < "$temp_export_file")
export_filesize_mb=$(awk "BEGIN {printf \"%.2f\", $export_filesize/1024/1024}")
fi
if [ $export_status -eq 0 ]; then
# Output the file to stdout
cat "$temp_export_file"
printf "${GREEN}> Database exported successfully.${NC}\n" >&2
printf "${CYAN}> Export format: SQL (.sql.gz)${NC}\n" >&2
if [ "$PARALLEL_JOBS" -gt 0 ]; then
printf "${CYAN}> Compression: pigz with ${PARALLEL_JOBS} threads${NC}\n" >&2
else
printf "${CYAN}> Compression: gzip (standard)${NC}\n" >&2
fi
printf "${CYAN}> Export duration: ${export_duration}s${NC}\n" >&2
if [ -n "$export_filesize_mb" ]; then
printf "${CYAN}> Export filesize: ${export_filesize_mb} MB (compressed)${NC}\n" >&2
fi
else
printf "${RED}> Failed to export database.${NC}\n" >&2
exit 1
@@ -2892,9 +2969,9 @@ handle_db_import() {
printf " --dev Import to development database\n"
printf "\n"
printf "Examples:\n"
printf " ./install.sh db-import < backup.sql.gz # Import gzipped backup\n"
printf " ./install.sh db-import < backup.sql # Import plain SQL backup\n"
printf " ./install.sh db-import --dev < backup.sql # Import to dev database\n"
printf " ./install.sh db-import < backup.sql.gz # Import gzipped SQL (standard)\n"
printf " ./install.sh db-import < backup.sql # Import plain SQL\n"
printf " ./install.sh db-import --dev < backup.sql.gz # Import to dev database\n"
exit 1
fi
@@ -2951,8 +3028,16 @@ handle_db_import() {
cat <&3 > "$temp_file" # Read from fd 3 instead of stdin
exec 3<&- # Close fd 3
# Detect if the file is gzipped or plain SQL
# Get input filesize
if [ -f "$temp_file" ]; then
import_filesize=$(wc -c < "$temp_file")
import_filesize_mb=$(awk "BEGIN {printf \"%.2f\", $import_filesize/1024/1024}")
printf "${CYAN}> Input file size: ${import_filesize_mb} MB${NC}\n"
fi
# Detect file format
is_gzipped=false
if gzip -t "$temp_file" 2>/dev/null; then
is_gzipped=true
printf "${CYAN}> Detected gzipped SQL backup${NC}\n"
@@ -2966,6 +3051,9 @@ handle_db_import() {
fi
fi
# Start timing
import_start_time=$(date +%s)
if [ "$DEV_DB" = true ]; then
if [ "$VERBOSE" = true ]; then
docker compose -f dockerfiles/docker-compose.dev.yml -p aliasvault-dev exec -T postgres-dev psql -U aliasvault postgres -c "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = 'aliasvault' AND pid <> pg_backend_pid();" && \
@@ -3009,10 +3097,16 @@ handle_db_import() {
fi
import_status=$?
# End timing
import_end_time=$(date +%s)
import_duration=$((import_end_time - import_start_time))
rm "$temp_file"
if [ $import_status -eq 0 ]; then
printf "${GREEN}> Database imported successfully.${NC}\n"
printf "${CYAN}> Import duration: ${import_duration}s${NC}\n"
if [ "$DEV_DB" != true ]; then
printf "${CYAN}> Starting services...${NC}\n"
if [ "$VERBOSE" = true ]; then

View File

@@ -131,7 +131,7 @@ describe('PasswordGenerator', () => {
expect(password).not.toMatch(/[Ss5]/);
expect(password).not.toMatch(/[Bb8]/);
expect(password).not.toMatch(/[Gg6]/);
expect(password).not.toMatch(/[[\]{}()<>]/);
expect(password).not.toMatch(/[\\[\\]{}()<>]/);
expect(password).not.toMatch(/['"`]/);
expect(password).not.toMatch(/[;:,.]/);
expect(password).not.toMatch(/[_-]/);