Initial clean commit
This commit is contained in:
33
scripts/BFG.ps1
Normal file
33
scripts/BFG.ps1
Normal file
@@ -0,0 +1,33 @@
|
||||
# BFG.ps1 (run from C:\vibe\auctiora\scripts)
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# 1) Download BFG jar once, next to this script
|
||||
$bfgJar = Join-Path $PSScriptRoot "bfg.jar"
|
||||
if (-not (Test-Path $bfgJar)) {
|
||||
Invoke-WebRequest `
|
||||
"https://repo1.maven.org/maven2/com/madgag/bfg/1.14.0/bfg-1.14.0.jar" `
|
||||
-OutFile $bfgJar
|
||||
}
|
||||
|
||||
# 2) Clone bare mirror next to project root: C:\vibe\auctiora\auctiora.git
|
||||
$rootDir = Join-Path $PSScriptRoot ".."
|
||||
$mirrorPath = Join-Path $rootDir "auctiora.git"
|
||||
|
||||
if (Test-Path $mirrorPath) {
|
||||
Remove-Item $mirrorPath -Recurse -Force
|
||||
}
|
||||
|
||||
git clone --mirror "https://git.appmodel.nl/Tour/auctiora.git" $mirrorPath
|
||||
|
||||
# 3) Run BFG in mirror
|
||||
Push-Location $mirrorPath
|
||||
|
||||
java -jar $bfgJar --strip-blobs-bigger-than 50M .
|
||||
|
||||
git reflog expire --expire=now --all
|
||||
git gc --prune=now --aggressive
|
||||
|
||||
# 4) Force-push cleaned history
|
||||
git push --force
|
||||
|
||||
Pop-Location
|
||||
206
scripts/README.md
Normal file
206
scripts/README.md
Normal file
@@ -0,0 +1,206 @@
|
||||
# Auctiora Scripts
|
||||
|
||||
Utility scripts for managing the Auctiora auction monitoring system.
|
||||
|
||||
## 📦 Available Scripts
|
||||
|
||||
### 1. Production Data Sync
|
||||
|
||||
Sync production database and images from `athena.lan` to your local development environment.
|
||||
|
||||
#### Quick Start
|
||||
|
||||
**Linux/Mac (Bash)**:
|
||||
```bash
|
||||
# Make executable (first time only)
|
||||
chmod +x scripts/sync-production-data.sh
|
||||
|
||||
# Sync database only
|
||||
./scripts/sync-production-data.sh --db-only
|
||||
|
||||
# Sync everything
|
||||
./scripts/sync-production-data.sh --all
|
||||
|
||||
# Sync images only
|
||||
./scripts/sync-production-data.sh --images-only
|
||||
```
|
||||
|
||||
## 🔧 Prerequisites
|
||||
|
||||
### Required
|
||||
- **SSH Client**: OpenSSH or equivalent
|
||||
- Windows: Built-in on Windows 10+, or install [Git Bash](https://git-scm.com/downloads)
|
||||
- Linux/Mac: Pre-installed
|
||||
- **SCP**: Secure copy (usually comes with SSH)
|
||||
- **SSH Access**: SSH key configured for `tour@athena.lan`
|
||||
|
||||
### Optional
|
||||
- **rsync**: For efficient incremental image sync
|
||||
- Windows: Install via [WSL](https://docs.microsoft.com/en-us/windows/wsl/install) or [Cygwin](https://www.cygwin.com/)
|
||||
- Linux/Mac: Usually pre-installed
|
||||
- **sqlite3**: For showing database statistics
|
||||
- Windows: Download from [sqlite.org](https://www.sqlite.org/download.html)
|
||||
- Linux: `sudo apt install sqlite3`
|
||||
- Mac: Pre-installed
|
||||
|
||||
## 📊 What Gets Synced
|
||||
|
||||
### Database (`cache.db`)
|
||||
- **Size**: ~8.9 GB (as of Dec 2024)
|
||||
- **Contains**:
|
||||
- Auctions metadata
|
||||
- Lots (kavels) with bid information
|
||||
- Images metadata and URLs
|
||||
- HTTP cache for scraper
|
||||
- **Local Path**: `c:\mnt\okcomputer\cache.db`
|
||||
|
||||
### Images Directory
|
||||
- **Size**: Varies (can be large)
|
||||
- **Contains**:
|
||||
- Downloaded lot images
|
||||
- Organized by lot ID
|
||||
- **Local Path**: `c:\mnt\okcomputer\images\`
|
||||
|
||||
## 🚀 Usage Examples
|
||||
|
||||
## 📁 File Locations
|
||||
|
||||
### Remote (Production)
|
||||
```
|
||||
athena.lan
|
||||
├── Docker Volume: shared-auction-data
|
||||
│ ├── /data/cache.db (SQLite database)
|
||||
│ └── /data/images/ (Image files)
|
||||
└── /tmp/ (Temporary staging area)
|
||||
```
|
||||
|
||||
### Local (Development)
|
||||
```
|
||||
c:\mnt\okcomputer\
|
||||
├── cache.db (SQLite database)
|
||||
├── cache.db.backup-* (Automatic backups)
|
||||
└── images\ (Image files)
|
||||
```
|
||||
|
||||
## 🔒 Safety Features
|
||||
|
||||
### Automatic Backups
|
||||
- Existing local database is automatically backed up before sync
|
||||
- Backup format: `cache.db.backup-YYYYMMDD-HHMMSS`
|
||||
- Keep recent backups manually or clean up old ones
|
||||
|
||||
### Confirmation Prompts
|
||||
- PowerShell script prompts for confirmation (unless `-Force` is used)
|
||||
- Shows configuration before executing
|
||||
- Safe to cancel at any time
|
||||
|
||||
### Error Handling
|
||||
- Validates SSH connection before starting
|
||||
- Cleans up temporary files on remote server
|
||||
- Reports clear error messages
|
||||
|
||||
## ⚡ Performance Tips
|
||||
|
||||
### Faster Image Sync with rsync
|
||||
Install rsync for incremental image sync (only new/changed files):
|
||||
|
||||
**Windows (WSL)**:
|
||||
```powershell
|
||||
wsl --install
|
||||
wsl -d Ubuntu
|
||||
sudo apt install rsync
|
||||
```
|
||||
|
||||
**Windows (Chocolatey)**:
|
||||
```powershell
|
||||
choco install rsync
|
||||
```
|
||||
|
||||
**Benefit**: First sync downloads everything, subsequent syncs only transfer changed files.
|
||||
|
||||
Images can be synced separately when needed for image processing tests.
|
||||
|
||||
## 🐛 Troubleshooting
|
||||
|
||||
### SSH Connection Issues
|
||||
```powershell
|
||||
# Test SSH connection
|
||||
ssh tour@athena.lan "echo 'Connection OK'"
|
||||
|
||||
# Check SSH key
|
||||
ssh-add -l
|
||||
```
|
||||
|
||||
### Permission Denied
|
||||
```bash
|
||||
# Add SSH key (Linux/Mac)
|
||||
chmod 600 ~/.ssh/id_rsa
|
||||
ssh-add ~/.ssh/id_rsa
|
||||
|
||||
# Windows: Use PuTTY or OpenSSH for Windows
|
||||
```
|
||||
|
||||
### Database Locked Error
|
||||
```powershell
|
||||
# Make sure no other process is using the database
|
||||
Get-Process | Where-Object {$_.Path -like "*java*"} | Stop-Process
|
||||
|
||||
# Or restart the monitor
|
||||
```
|
||||
|
||||
### Slow Image Sync
|
||||
- Use rsync instead of scp (see Performance Tips)
|
||||
- Consider syncing only database for code development
|
||||
- Images only needed for object detection testing
|
||||
|
||||
## 📝 Script Details
|
||||
|
||||
### sync-production-data.sh (Bash)
|
||||
- **Platform**: Linux, Mac, Git Bash on Windows
|
||||
- **Best for**: Unix-like environments
|
||||
- **Features**: Color output, progress bars, statistics
|
||||
|
||||
## 🔄 Automation
|
||||
|
||||
### Linux/Mac Cron
|
||||
```bash
|
||||
# Edit crontab
|
||||
crontab -e
|
||||
|
||||
# Add daily sync at 7 AM
|
||||
0 7 * * * /path/to/auctiora/scripts/sync-production-data.sh --db-only
|
||||
```
|
||||
|
||||
## 🆘 Support
|
||||
|
||||
### Getting Help
|
||||
```bash
|
||||
# Bash
|
||||
./scripts/sync-production-data.sh --help
|
||||
```
|
||||
|
||||
### Common Commands
|
||||
```powershell
|
||||
# Check database size
|
||||
ls c:\mnt\okcomputer\cache.db -h
|
||||
|
||||
# View database contents
|
||||
sqlite3 c:\mnt\okcomputer\cache.db
|
||||
.tables
|
||||
.schema lots
|
||||
SELECT COUNT(*) FROM lots;
|
||||
.quit
|
||||
|
||||
# Check image count
|
||||
(Get-ChildItem c:\mnt\okcomputer\images -Recurse -File).Count
|
||||
```
|
||||
|
||||
## 📚 Related Documentation
|
||||
- [Database Architecture](../wiki/DATABASE_ARCHITECTURE.md)
|
||||
- [Integration Flowchart](../docs/INTEGRATION_FLOWCHART.md)
|
||||
- [Main README](../README.md)
|
||||
|
||||
---
|
||||
|
||||
**Last Updated**: December 2025
|
||||
**Maintainer**: Auctiora Development Team
|
||||
160
scripts/cleanup-database.sh
Normal file
160
scripts/cleanup-database.sh
Normal file
@@ -0,0 +1,160 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Database Cleanup Utility
|
||||
#
|
||||
# Removes invalid/old data from the local database
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/cleanup-database.sh [--dry-run]
|
||||
#
|
||||
# Options:
|
||||
# --dry-run Show what would be deleted without actually deleting
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
# Configuration
|
||||
LOCAL_DB_PATH="${1:-c:/mnt/okcomputer/cache.db}"
|
||||
DRY_RUN=false
|
||||
|
||||
# Colors
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
# Parse arguments
|
||||
if [ "$1" = "--dry-run" ] || [ "$2" = "--dry-run" ]; then
|
||||
DRY_RUN=true
|
||||
fi
|
||||
|
||||
if [ "$1" = "--help" ] || [ "$1" = "-h" ]; then
|
||||
grep '^#' "$0" | sed 's/^# \?//'
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo -e "${BLUE}╔════════════════════════════════════════════════════════╗${NC}"
|
||||
echo -e "${BLUE}║ Database Cleanup - Auctiora Monitor ║${NC}"
|
||||
echo -e "${BLUE}╚════════════════════════════════════════════════════════╝${NC}"
|
||||
echo ""
|
||||
|
||||
if [ ! -f "${LOCAL_DB_PATH}" ]; then
|
||||
echo -e "${RED}Error: Database not found at ${LOCAL_DB_PATH}${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Backup database before cleanup
|
||||
if [ "$DRY_RUN" = false ]; then
|
||||
BACKUP_PATH="${LOCAL_DB_PATH}.backup-before-cleanup-$(date +%Y%m%d-%H%M%S)"
|
||||
echo -e "${YELLOW}Creating backup: ${BACKUP_PATH}${NC}"
|
||||
cp "${LOCAL_DB_PATH}" "${BACKUP_PATH}"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Show current state
|
||||
echo -e "${BLUE}Current database state:${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" <<EOF
|
||||
.mode box
|
||||
SELECT
|
||||
'Total lots' as metric,
|
||||
COUNT(*) as count
|
||||
FROM lots
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Valid lots (with auction_id)',
|
||||
COUNT(*)
|
||||
FROM lots
|
||||
WHERE auction_id IS NOT NULL AND auction_id != ''
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Invalid lots (missing auction_id)',
|
||||
COUNT(*)
|
||||
FROM lots
|
||||
WHERE auction_id IS NULL OR auction_id = '';
|
||||
EOF
|
||||
echo ""
|
||||
|
||||
# Count items to be deleted
|
||||
echo -e "${YELLOW}Analyzing data to clean up...${NC}"
|
||||
|
||||
INVALID_LOTS=$(sqlite3 "${LOCAL_DB_PATH}" "SELECT COUNT(*) FROM lots WHERE auction_id IS NULL OR auction_id = '';")
|
||||
ORPHANED_IMAGES=$(sqlite3 "${LOCAL_DB_PATH}" "SELECT COUNT(*) FROM images WHERE lot_id NOT IN (SELECT lot_id FROM lots);")
|
||||
|
||||
echo -e " ${RED}→ Invalid lots to delete: ${INVALID_LOTS}${NC}"
|
||||
echo -e " ${YELLOW}→ Orphaned images to delete: ${ORPHANED_IMAGES}${NC}"
|
||||
echo ""
|
||||
|
||||
if [ "$INVALID_LOTS" -eq 0 ] && [ "$ORPHANED_IMAGES" -eq 0 ]; then
|
||||
echo -e "${GREEN}✓ Database is clean! No cleanup needed.${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
echo -e "${BLUE}DRY RUN MODE - No changes will be made${NC}"
|
||||
echo ""
|
||||
echo "Would delete:"
|
||||
echo " - $INVALID_LOTS invalid lots"
|
||||
echo " - $ORPHANED_IMAGES orphaned images"
|
||||
echo ""
|
||||
echo "Run without --dry-run to perform cleanup"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Confirm cleanup
|
||||
echo -e "${YELLOW}This will permanently delete the above records.${NC}"
|
||||
read -p "Continue? (y/N) " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
echo "Cleanup cancelled"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Perform cleanup
|
||||
echo ""
|
||||
echo -e "${YELLOW}Cleaning up database...${NC}"
|
||||
|
||||
# Delete invalid lots
|
||||
if [ "$INVALID_LOTS" -gt 0 ]; then
|
||||
echo -e " ${BLUE}[1/2] Deleting invalid lots...${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" "DELETE FROM lots WHERE auction_id IS NULL OR auction_id = '';"
|
||||
echo -e " ${GREEN}✓ Deleted ${INVALID_LOTS} invalid lots${NC}"
|
||||
fi
|
||||
|
||||
# Delete orphaned images
|
||||
if [ "$ORPHANED_IMAGES" -gt 0 ]; then
|
||||
echo -e " ${BLUE}[2/2] Deleting orphaned images...${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" "DELETE FROM images WHERE lot_id NOT IN (SELECT lot_id FROM lots);"
|
||||
echo -e " ${GREEN}✓ Deleted ${ORPHANED_IMAGES} orphaned images${NC}"
|
||||
fi
|
||||
|
||||
# Vacuum database to reclaim space
|
||||
echo -e " ${BLUE}[3/3] Compacting database...${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" "VACUUM;"
|
||||
echo -e " ${GREEN}✓ Database compacted${NC}"
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}╔════════════════════════════════════════════════════════╗${NC}"
|
||||
echo -e "${GREEN}║ Cleanup completed successfully ║${NC}"
|
||||
echo -e "${GREEN}╚════════════════════════════════════════════════════════╝${NC}"
|
||||
echo ""
|
||||
|
||||
# Show final state
|
||||
echo -e "${BLUE}Final database state:${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" <<EOF
|
||||
.mode box
|
||||
SELECT
|
||||
'Total lots' as metric,
|
||||
COUNT(*) as count
|
||||
FROM lots
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Total images',
|
||||
COUNT(*)
|
||||
FROM images;
|
||||
EOF
|
||||
|
||||
echo ""
|
||||
DB_SIZE=$(du -h "${LOCAL_DB_PATH}" | cut -f1)
|
||||
echo -e "${BLUE}Database size: ${DB_SIZE}${NC}"
|
||||
echo ""
|
||||
15
scripts/smb.ps1
Normal file
15
scripts/smb.ps1
Normal file
@@ -0,0 +1,15 @@
|
||||
# PowerShell: map the remote share, copy the folder, then clean up
|
||||
$remote = '\\192.168.1.159\shared-auction-data'
|
||||
$local = 'C:\mnt\okcomputer\output\models'
|
||||
|
||||
# (1) create/verify the PSDrive (prompts for password if needed)
|
||||
if (-not (Get-PSDrive -Name Z -ErrorAction SilentlyContinue)) {
|
||||
$cred = Get-Credential -UserName 'tour' -Message 'SMB password for tour@192.168.1.159'
|
||||
New-PSDrive -Name Z -PSProvider FileSystem -Root $remote -Credential $cred -Persist | Out-Null
|
||||
}
|
||||
|
||||
# (2) copy the local folder into the share
|
||||
Copy-Item -Path $local -Destination 'Z:\' -Recurse -Force
|
||||
|
||||
# (3) optional cleanup
|
||||
Remove-PSDrive -Name Z -Force
|
||||
200
scripts/sync-production-data.sh
Normal file
200
scripts/sync-production-data.sh
Normal file
@@ -0,0 +1,200 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Sync Production Data to Local
|
||||
#
|
||||
# This script copies the production SQLite database and images from the remote
|
||||
# server (athena.lan) to your local development environment.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/sync-production-data.sh [--db-only|--images-only|--all]
|
||||
#
|
||||
# Options:
|
||||
# --db-only Only sync the database (default)
|
||||
# --images-only Only sync the images
|
||||
# --all Sync both database and images
|
||||
# --help Show this help message
|
||||
#
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
# Configuration
|
||||
REMOTE_HOST="tour@athena.lan"
|
||||
REMOTE_VOLUME="shared-auction-data"
|
||||
LOCAL_DB_PATH="c:/mnt/okcomputer/output/cache.db"
|
||||
LOCAL_IMAGES_PATH="c:/mnt/okcomputer/images"
|
||||
REMOTE_TMP="/tmp"
|
||||
|
||||
# Colors for output
|
||||
GREEN='\033[0;32m'
|
||||
BLUE='\033[0;34m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Parse arguments
|
||||
SYNC_MODE="db" # Default: database only
|
||||
|
||||
case "${1:-}" in
|
||||
--db-only)
|
||||
SYNC_MODE="db"
|
||||
;;
|
||||
--images-only)
|
||||
SYNC_MODE="images"
|
||||
;;
|
||||
--all)
|
||||
SYNC_MODE="all"
|
||||
;;
|
||||
--help|-h)
|
||||
grep '^#' "$0" | sed 's/^# \?//'
|
||||
exit 0
|
||||
;;
|
||||
"")
|
||||
SYNC_MODE="db"
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Error: Unknown option '$1'${NC}"
|
||||
echo "Use --help for usage information"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo -e "${BLUE}╔════════════════════════════════════════════════════════╗${NC}"
|
||||
echo -e "${BLUE}║ Production Data Sync - Auctiora Monitor ║${NC}"
|
||||
echo -e "${BLUE}╚════════════════════════════════════════════════════════╝${NC}"
|
||||
echo ""
|
||||
|
||||
# Function to sync database
|
||||
sync_database() {
|
||||
echo -e "${YELLOW}[1/3] Copying database from Docker volume to /tmp...${NC}"
|
||||
ssh ${REMOTE_HOST} "docker run --rm -v ${REMOTE_VOLUME}:/data -v ${REMOTE_TMP}:${REMOTE_TMP} alpine cp /data/cache.db ${REMOTE_TMP}/cache.db"
|
||||
|
||||
echo -e "${YELLOW}[2/3] Downloading database from remote server...${NC}"
|
||||
# Create backup and remove old local database
|
||||
if [ -f "${LOCAL_DB_PATH}" ]; then
|
||||
BACKUP_PATH="${LOCAL_DB_PATH}.backup-$(date +%Y%m%d-%H%M%S)"
|
||||
echo -e "${BLUE} Backing up existing database to: ${BACKUP_PATH}${NC}"
|
||||
cp "${LOCAL_DB_PATH}" "${BACKUP_PATH}"
|
||||
|
||||
echo -e "${BLUE} Removing old local database...${NC}"
|
||||
rm -f "${LOCAL_DB_PATH}"
|
||||
fi
|
||||
|
||||
# Download new database
|
||||
scp ${REMOTE_HOST}:${REMOTE_TMP}/cache.db "${LOCAL_DB_PATH}"
|
||||
|
||||
echo -e "${YELLOW}[3/3] Cleaning up remote /tmp...${NC}"
|
||||
ssh ${REMOTE_HOST} "rm -f ${REMOTE_TMP}/cache.db"
|
||||
|
||||
# Show database info
|
||||
DB_SIZE=$(du -h "${LOCAL_DB_PATH}" | cut -f1)
|
||||
echo -e "${GREEN}✓ Database synced successfully (${DB_SIZE})${NC}"
|
||||
|
||||
# Show table counts
|
||||
echo -e "${BLUE} Database statistics:${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" <<EOF
|
||||
.mode box
|
||||
SELECT
|
||||
'auctions' as table_name, COUNT(*) as count FROM auctions
|
||||
UNION ALL
|
||||
SELECT 'lots', COUNT(*) FROM lots
|
||||
UNION ALL
|
||||
SELECT 'images', COUNT(*) FROM images
|
||||
UNION ALL
|
||||
SELECT 'cache', COUNT(*) FROM cache;
|
||||
EOF
|
||||
|
||||
# Show data quality report
|
||||
echo -e "${BLUE} Data quality:${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" <<EOF
|
||||
.mode box
|
||||
SELECT
|
||||
'Valid lots' as metric,
|
||||
COUNT(*) as count,
|
||||
ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM lots), 2) || '%' as percentage
|
||||
FROM lots
|
||||
WHERE auction_id IS NOT NULL AND auction_id != ''
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Invalid lots (missing auction_id)',
|
||||
COUNT(*),
|
||||
ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM lots), 2) || '%'
|
||||
FROM lots
|
||||
WHERE auction_id IS NULL OR auction_id = ''
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Lots with intelligence fields',
|
||||
COUNT(*),
|
||||
ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM lots), 2) || '%'
|
||||
FROM lots
|
||||
WHERE followers_count IS NOT NULL OR estimated_min IS NOT NULL;
|
||||
EOF
|
||||
}
|
||||
|
||||
# Function to sync images
|
||||
sync_images() {
|
||||
echo -e "${YELLOW}[1/4] Getting image directory structure from Docker volume...${NC}"
|
||||
|
||||
# Create local images directory if it doesn't exist
|
||||
mkdir -p "${LOCAL_IMAGES_PATH}"
|
||||
|
||||
echo -e "${YELLOW}[2/4] Copying images from Docker volume to /tmp...${NC}"
|
||||
# Copy entire images directory from volume to /tmp
|
||||
ssh ${REMOTE_HOST} "docker run --rm -v ${REMOTE_VOLUME}:/data -v ${REMOTE_TMP}:${REMOTE_TMP} alpine sh -c 'mkdir -p ${REMOTE_TMP}/auction-images && cp -r /data/images/* ${REMOTE_TMP}/auction-images/ 2>/dev/null || true'"
|
||||
|
||||
echo -e "${YELLOW}[3/4] Syncing images to local directory (this may take a while)...${NC}"
|
||||
# Use rsync for efficient incremental sync
|
||||
if command -v rsync &> /dev/null; then
|
||||
echo -e "${BLUE} Using rsync for efficient transfer...${NC}"
|
||||
rsync -avz --progress ${REMOTE_HOST}:${REMOTE_TMP}/auction-images/ "${LOCAL_IMAGES_PATH}/"
|
||||
else
|
||||
echo -e "${BLUE} Using scp for transfer (install rsync for faster incremental sync)...${NC}"
|
||||
scp -r ${REMOTE_HOST}:${REMOTE_TMP}/auction-images/* "${LOCAL_IMAGES_PATH}/"
|
||||
fi
|
||||
|
||||
echo -e "${YELLOW}[4/4] Cleaning up remote /tmp...${NC}"
|
||||
ssh ${REMOTE_HOST} "rm -rf ${REMOTE_TMP}/auction-images"
|
||||
|
||||
# Show image stats
|
||||
IMAGE_COUNT=$(find "${LOCAL_IMAGES_PATH}" -type f 2>/dev/null | wc -l)
|
||||
IMAGE_SIZE=$(du -sh "${LOCAL_IMAGES_PATH}" 2>/dev/null | cut -f1)
|
||||
echo -e "${GREEN}✓ Images synced successfully${NC}"
|
||||
echo -e "${BLUE} Total images: ${IMAGE_COUNT}${NC}"
|
||||
echo -e "${BLUE} Total size: ${IMAGE_SIZE}${NC}"
|
||||
}
|
||||
|
||||
# Execute sync based on mode
|
||||
START_TIME=$(date +%s)
|
||||
|
||||
case "$SYNC_MODE" in
|
||||
db)
|
||||
echo -e "${BLUE}Mode: Database only${NC}"
|
||||
echo ""
|
||||
sync_database
|
||||
;;
|
||||
images)
|
||||
echo -e "${BLUE}Mode: Images only${NC}"
|
||||
echo ""
|
||||
sync_images
|
||||
;;
|
||||
all)
|
||||
echo -e "${BLUE}Mode: Database + Images${NC}"
|
||||
echo ""
|
||||
sync_database
|
||||
echo ""
|
||||
sync_images
|
||||
;;
|
||||
esac
|
||||
|
||||
END_TIME=$(date +%s)
|
||||
DURATION=$((END_TIME - START_TIME))
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}╔════════════════════════════════════════════════════════╗${NC}"
|
||||
echo -e "${GREEN}║ Sync completed successfully in ${DURATION} seconds ║${NC}"
|
||||
echo -e "${GREEN}╚════════════════════════════════════════════════════════╝${NC}"
|
||||
echo ""
|
||||
echo -e "${BLUE}Next steps:${NC}"
|
||||
echo -e " 1. Verify data: sqlite3 ${LOCAL_DB_PATH} 'SELECT COUNT(*) FROM lots;'"
|
||||
echo -e " 2. Start monitor: mvn quarkus:dev"
|
||||
echo -e " 3. Open dashboard: http://localhost:8080"
|
||||
echo ""
|
||||
Reference in New Issue
Block a user