Compare commits
3 Commits
6668ae77e9
...
70dbb21c5d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
70dbb21c5d | ||
|
|
255f1e81dd | ||
|
|
c46c0fe21e |
@@ -10,3 +10,9 @@
|
||||
dist/
|
||||
build/
|
||||
out/
|
||||
.idea
|
||||
node_modules/
|
||||
.vscode/
|
||||
.git
|
||||
.github
|
||||
scripts
|
||||
|
||||
@@ -4,6 +4,7 @@ models/
|
||||
|
||||
# Exclude build artifacts
|
||||
target/
|
||||
build/
|
||||
*.class
|
||||
*.jar
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -31,4 +31,5 @@ bin/
|
||||
|
||||
NUL
|
||||
target/
|
||||
build/
|
||||
.idea/
|
||||
@@ -1,6 +0,0 @@
|
||||
--add-opens=java.base/java.lang=ALL-UNNAMED
|
||||
--add-opens=java.base/java.util=ALL-UNNAMED
|
||||
--add-opens=java.base/java.util.concurrent=ALL-UNNAMED
|
||||
--add-opens=java.base/java.net=ALL-UNNAMED
|
||||
--add-opens=java.base/java.io=ALL-UNNAMED
|
||||
--enable-native-access=ALL-UNNAMED
|
||||
117
.mvn/wrapper/MavenWrapperDownloader.java
vendored
117
.mvn/wrapper/MavenWrapperDownloader.java
vendored
@@ -1,117 +0,0 @@
|
||||
/*
|
||||
* Copyright 2007-present the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import java.net.*;
|
||||
import java.io.*;
|
||||
import java.nio.channels.*;
|
||||
import java.util.Properties;
|
||||
|
||||
public class MavenWrapperDownloader {
|
||||
|
||||
private static final String WRAPPER_VERSION = "0.5.6";
|
||||
/**
|
||||
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
|
||||
*/
|
||||
private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
|
||||
+ WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
|
||||
|
||||
/**
|
||||
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
|
||||
* use instead of the default one.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
|
||||
".mvn/wrapper/maven-wrapper.properties";
|
||||
|
||||
/**
|
||||
* Path where the maven-wrapper.jar will be saved to.
|
||||
*/
|
||||
private static final String MAVEN_WRAPPER_JAR_PATH =
|
||||
".mvn/wrapper/maven-wrapper.jar";
|
||||
|
||||
/**
|
||||
* Name of the property which should be used to override the default download url for the wrapper.
|
||||
*/
|
||||
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
|
||||
|
||||
public static void main(String args[]) {
|
||||
System.out.println("- Downloader started");
|
||||
File baseDirectory = new File(args[0]);
|
||||
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
|
||||
|
||||
// If the maven-wrapper.properties exists, read it and check if it contains a custom
|
||||
// wrapperUrl parameter.
|
||||
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
|
||||
String url = DEFAULT_DOWNLOAD_URL;
|
||||
if(mavenWrapperPropertyFile.exists()) {
|
||||
FileInputStream mavenWrapperPropertyFileInputStream = null;
|
||||
try {
|
||||
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
|
||||
Properties mavenWrapperProperties = new Properties();
|
||||
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
|
||||
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
|
||||
} catch (IOException e) {
|
||||
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
|
||||
} finally {
|
||||
try {
|
||||
if(mavenWrapperPropertyFileInputStream != null) {
|
||||
mavenWrapperPropertyFileInputStream.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Ignore ...
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading from: " + url);
|
||||
|
||||
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
|
||||
if(!outputFile.getParentFile().exists()) {
|
||||
if(!outputFile.getParentFile().mkdirs()) {
|
||||
System.out.println(
|
||||
"- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
|
||||
}
|
||||
}
|
||||
System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
|
||||
try {
|
||||
downloadFileFromURL(url, outputFile);
|
||||
System.out.println("Done");
|
||||
System.exit(0);
|
||||
} catch (Throwable e) {
|
||||
System.out.println("- Error downloading");
|
||||
e.printStackTrace();
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private static void downloadFileFromURL(String urlString, File destination) throws Exception {
|
||||
if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
|
||||
String username = System.getenv("MVNW_USERNAME");
|
||||
char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
|
||||
Authenticator.setDefault(new Authenticator() {
|
||||
@Override
|
||||
protected PasswordAuthentication getPasswordAuthentication() {
|
||||
return new PasswordAuthentication(username, password);
|
||||
}
|
||||
});
|
||||
}
|
||||
URL website = new URL(urlString);
|
||||
ReadableByteChannel rbc;
|
||||
rbc = Channels.newChannel(website.openStream());
|
||||
FileOutputStream fos = new FileOutputStream(destination);
|
||||
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
|
||||
fos.close();
|
||||
rbc.close();
|
||||
}
|
||||
|
||||
}
|
||||
1
.mvn/wrapper/maven-wrapper.config
vendored
1
.mvn/wrapper/maven-wrapper.config
vendored
@@ -1 +0,0 @@
|
||||
jvmArguments=-Djava.util.logging.manager=org.jboss.logmanager.LogManager --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED
|
||||
BIN
.mvn/wrapper/maven-wrapper.jar
vendored
BIN
.mvn/wrapper/maven-wrapper.jar
vendored
Binary file not shown.
2
.mvn/wrapper/maven-wrapper.properties
vendored
2
.mvn/wrapper/maven-wrapper.properties
vendored
@@ -1,2 +0,0 @@
|
||||
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.11/apache-maven-3.9.11-bin.zip
|
||||
wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.3.4/maven-wrapper-3.3.4.jar
|
||||
BIN
.mvn/wrapper/maven-wrapper_old.jar
vendored
BIN
.mvn/wrapper/maven-wrapper_old.jar
vendored
Binary file not shown.
@@ -119,8 +119,8 @@ mvn clean package
|
||||
```
|
||||
|
||||
This creates:
|
||||
- `target/troostwijk-scraper-1.0-SNAPSHOT.jar` - Regular JAR
|
||||
- `target/troostwijk-scraper-1.0-SNAPSHOT-jar-with-dependencies.jar` - Executable JAR with all dependencies
|
||||
- `../build/auctiora/auctiora-1.0-SNAPSHOT.jar` - Regular JAR
|
||||
- `../build/auctiora/auctiora-1.0-SNAPSHOT-jar-with-dependencies.jar` - Executable JAR with all dependencies
|
||||
|
||||
## Running
|
||||
|
||||
@@ -128,7 +128,7 @@ This creates:
|
||||
|
||||
```bash
|
||||
java -Djava.library.path="/path/to/opencv/lib" \
|
||||
-jar target/troostwijk-scraper-1.0-SNAPSHOT-jar-with-dependencies.jar
|
||||
-jar ../build/auctiora/troostwijk-scraper-1.0-SNAPSHOT-jar-with-dependencies.jar
|
||||
```
|
||||
|
||||
### With Email Notifications
|
||||
@@ -137,7 +137,7 @@ java -Djava.library.path="/path/to/opencv/lib" \
|
||||
export NOTIFICATION_CONFIG="smtp:your@gmail.com:app_password:your@gmail.com"
|
||||
|
||||
java -Djava.library.path="/path/to/opencv/lib" \
|
||||
-jar target/troostwijk-scraper-1.0-SNAPSHOT-jar-with-dependencies.jar
|
||||
-jar ../build/auctiora/troostwijk-scraper-1.0-SNAPSHOT-jar-with-dependencies.jar
|
||||
```
|
||||
|
||||
### Using Maven
|
||||
|
||||
@@ -36,11 +36,11 @@ mvn quarkus:dev
|
||||
mvn clean package
|
||||
|
||||
# Run
|
||||
java -jar target/quarkus-app/quarkus-run.jar
|
||||
java -jar ../build/auctiora/quarkus-app/quarkus-run.jar
|
||||
|
||||
# Or use fast-jar (recommended for production)
|
||||
mvn clean package -Dquarkus.package.jar.type=fast-jar
|
||||
java -jar target/quarkus-app/quarkus-run.jar
|
||||
java -jar ../build/auctiora/quarkus-app/quarkus-run.jar
|
||||
```
|
||||
|
||||
### Option 3: Docker
|
||||
@@ -545,7 +545,7 @@ quarkus.log.category."com.auction".level=DEBUG
|
||||
mvn package -Pnative
|
||||
|
||||
# Run native executable
|
||||
./target/troostwijk-scraper-1.0-SNAPSHOT-runner
|
||||
../build/auctiora/troostwijk-scraper-1.0-SNAPSHOT-runner
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
310
mvnw
vendored
310
mvnw
vendored
@@ -1,310 +0,0 @@
|
||||
#!/bin/sh
|
||||
# ----------------------------------------------------------------------------
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Maven Start Up Batch script
|
||||
#
|
||||
# Required ENV vars:
|
||||
# ------------------
|
||||
# JAVA_HOME - location of a JDK home dir
|
||||
#
|
||||
# Optional ENV vars
|
||||
# -----------------
|
||||
# M2_HOME - location of maven2's installed home dir
|
||||
# MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
# e.g. to debug Maven itself, use
|
||||
# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
# MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if [ -z "$MAVEN_SKIP_RC" ] ; then
|
||||
|
||||
if [ -f /etc/mavenrc ] ; then
|
||||
. /etc/mavenrc
|
||||
fi
|
||||
|
||||
if [ -f "$HOME/.mavenrc" ] ; then
|
||||
. "$HOME/.mavenrc"
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# OS specific support. $var _must_ be set to either true or false.
|
||||
cygwin=false;
|
||||
darwin=false;
|
||||
mingw=false
|
||||
case "`uname`" in
|
||||
CYGWIN*) cygwin=true ;;
|
||||
MINGW*) mingw=true;;
|
||||
Darwin*) darwin=true
|
||||
# Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
|
||||
# See https://developer.apple.com/library/mac/qa/qa1170/_index.html
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
if [ -x "/usr/libexec/java_home" ]; then
|
||||
export JAVA_HOME="`/usr/libexec/java_home`"
|
||||
else
|
||||
export JAVA_HOME="/Library/Java/Home"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
if [ -r /etc/gentoo-release ] ; then
|
||||
JAVA_HOME=`java-config --jre-home`
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$M2_HOME" ] ; then
|
||||
## resolve links - $0 may be a link to maven's home
|
||||
PRG="$0"
|
||||
|
||||
# need this for relative symlinks
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG="`dirname "$PRG"`/$link"
|
||||
fi
|
||||
done
|
||||
|
||||
saveddir=`pwd`
|
||||
|
||||
M2_HOME=`dirname "$PRG"`/..
|
||||
|
||||
# make it fully qualified
|
||||
M2_HOME=`cd "$M2_HOME" && pwd`
|
||||
|
||||
cd "$saveddir"
|
||||
# echo Using m2 at $M2_HOME
|
||||
fi
|
||||
|
||||
# For Cygwin, ensure paths are in UNIX format before anything is touched
|
||||
if $cygwin ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --unix "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
|
||||
fi
|
||||
|
||||
# For Mingw, ensure paths are in UNIX format before anything is touched
|
||||
if $mingw ; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME="`(cd "$M2_HOME"; pwd)`"
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ]; then
|
||||
javaExecutable="`which javac`"
|
||||
if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
|
||||
# readlink(1) is not available as standard on Solaris 10.
|
||||
readLink=`which readlink`
|
||||
if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
|
||||
if $darwin ; then
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
|
||||
else
|
||||
javaExecutable="`readlink -f \"$javaExecutable\"`"
|
||||
fi
|
||||
javaHome="`dirname \"$javaExecutable\"`"
|
||||
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
|
||||
JAVA_HOME="$javaHome"
|
||||
export JAVA_HOME
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$JAVACMD" ] ; then
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
else
|
||||
JAVACMD="`which java`"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
echo "Error: JAVA_HOME is not defined correctly." >&2
|
||||
echo " We cannot execute $JAVACMD" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$JAVA_HOME" ] ; then
|
||||
echo "Warning: JAVA_HOME environment variable is not set."
|
||||
fi
|
||||
|
||||
CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
|
||||
|
||||
# traverses directory structure from process work directory to filesystem root
|
||||
# first directory with .mvn subdirectory is considered project base directory
|
||||
find_maven_basedir() {
|
||||
|
||||
if [ -z "$1" ]
|
||||
then
|
||||
echo "Path not specified to find_maven_basedir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
basedir="$1"
|
||||
wdir="$1"
|
||||
while [ "$wdir" != '/' ] ; do
|
||||
if [ -d "$wdir"/.mvn ] ; then
|
||||
basedir=$wdir
|
||||
break
|
||||
fi
|
||||
# workaround for JBEAP-8937 (on Solaris 10/Sparc)
|
||||
if [ -d "${wdir}" ]; then
|
||||
wdir=`cd "$wdir/.."; pwd`
|
||||
fi
|
||||
# end of workaround
|
||||
done
|
||||
echo "${basedir}"
|
||||
}
|
||||
|
||||
# concatenates all lines of a file
|
||||
concat_lines() {
|
||||
if [ -f "$1" ]; then
|
||||
echo "$(tr -s '\n' ' ' < "$1")"
|
||||
fi
|
||||
}
|
||||
|
||||
BASE_DIR=`find_maven_basedir "$(pwd)"`
|
||||
if [ -z "$BASE_DIR" ]; then
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
##########################################################################################
|
||||
# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
|
||||
# This allows using the maven wrapper in projects that prohibit checking in binary data.
|
||||
##########################################################################################
|
||||
if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found .mvn/wrapper/maven-wrapper.jar"
|
||||
fi
|
||||
else
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
|
||||
fi
|
||||
if [ -n "$MVNW_REPOURL" ]; then
|
||||
jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
|
||||
else
|
||||
jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
|
||||
fi
|
||||
while IFS="=" read key value; do
|
||||
case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
|
||||
esac
|
||||
done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Downloading from: $jarUrl"
|
||||
fi
|
||||
wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
|
||||
if $cygwin; then
|
||||
wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
|
||||
fi
|
||||
|
||||
if command -v wget > /dev/null; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found wget ... using wget"
|
||||
fi
|
||||
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
|
||||
wget "$jarUrl" -O "$wrapperJarPath"
|
||||
else
|
||||
wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath"
|
||||
fi
|
||||
elif command -v curl > /dev/null; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Found curl ... using curl"
|
||||
fi
|
||||
if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
|
||||
curl -o "$wrapperJarPath" "$jarUrl" -f
|
||||
else
|
||||
curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
|
||||
fi
|
||||
|
||||
else
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo "Falling back to using Java to download"
|
||||
fi
|
||||
javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
|
||||
# For Cygwin, switch paths to Windows format before running javac
|
||||
if $cygwin; then
|
||||
javaClass=`cygpath --path --windows "$javaClass"`
|
||||
fi
|
||||
if [ -e "$javaClass" ]; then
|
||||
if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo " - Compiling MavenWrapperDownloader.java ..."
|
||||
fi
|
||||
# Compiling the Java class
|
||||
("$JAVA_HOME/bin/javac" "$javaClass")
|
||||
fi
|
||||
if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
|
||||
# Running the downloader
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo " - Running MavenWrapperDownloader.java ..."
|
||||
fi
|
||||
("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
##########################################################################################
|
||||
# End of extension
|
||||
##########################################################################################
|
||||
|
||||
export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
|
||||
if [ "$MVNW_VERBOSE" = true ]; then
|
||||
echo $MAVEN_PROJECTBASEDIR
|
||||
fi
|
||||
MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin; then
|
||||
[ -n "$M2_HOME" ] &&
|
||||
M2_HOME=`cygpath --path --windows "$M2_HOME"`
|
||||
[ -n "$JAVA_HOME" ] &&
|
||||
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
|
||||
[ -n "$CLASSPATH" ] &&
|
||||
CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
|
||||
[ -n "$MAVEN_PROJECTBASEDIR" ] &&
|
||||
MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
|
||||
fi
|
||||
|
||||
# Provide a "standardized" way to retrieve the CLI args that will
|
||||
# work with both Windows and non-Windows executions.
|
||||
MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
|
||||
export MAVEN_CMD_LINE_ARGS
|
||||
|
||||
WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
exec "$JAVACMD" \
|
||||
$MAVEN_OPTS \
|
||||
-classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
|
||||
"-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
|
||||
${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"
|
||||
187
mvnw.cmd
vendored
187
mvnw.cmd
vendored
@@ -1,187 +0,0 @@
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Licensed to the Apache Software Foundation (ASF) under one
|
||||
@REM or more contributor license agreements. See the NOTICE file
|
||||
@REM distributed with this work for additional information
|
||||
@REM regarding copyright ownership. The ASF licenses this file
|
||||
@REM to you under the Apache License, Version 2.0 (the
|
||||
@REM "License"); you may not use this file except in compliance
|
||||
@REM with the License. You may obtain a copy of the License at
|
||||
@REM
|
||||
@REM http://www.apache.org/licenses/LICENSE-2.0
|
||||
@REM
|
||||
@REM Unless required by applicable law or agreed to in writing,
|
||||
@REM software distributed under the License is distributed on an
|
||||
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
@REM KIND, either express or implied. See the License for the
|
||||
@REM specific language governing permissions and limitations
|
||||
@REM under the License.
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM ----------------------------------------------------------------------------
|
||||
@REM Apache Maven Wrapper startup batch script, version 3.3.4
|
||||
@REM
|
||||
@REM Required ENV vars:
|
||||
@REM JAVA_HOME - location of a JDK home dir
|
||||
@REM
|
||||
@REM Optional ENV vars
|
||||
@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
|
||||
@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
|
||||
@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
|
||||
@REM e.g. to debug Maven itself, use
|
||||
@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
|
||||
@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
|
||||
@REM ----------------------------------------------------------------------------
|
||||
|
||||
@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
|
||||
@echo off
|
||||
@REM set title of command window
|
||||
title %0
|
||||
@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
|
||||
@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO%
|
||||
|
||||
@REM set %HOME% to equivalent of $HOME
|
||||
if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
|
||||
|
||||
@REM Execute a user defined script before this one
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
|
||||
@REM check for pre script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%USERPROFILE%\mavenrc_pre.bat" call "%USERPROFILE%\mavenrc_pre.bat" %*
|
||||
if exist "%USERPROFILE%\mavenrc_pre.cmd" call "%USERPROFILE%\mavenrc_pre.cmd" %*
|
||||
:skipRcPre
|
||||
|
||||
@setlocal
|
||||
|
||||
set ERROR_CODE=0
|
||||
|
||||
@REM To isolate internal variables from possible post scripts, we use another setlocal
|
||||
@setlocal
|
||||
|
||||
@REM ==== START VALIDATION ====
|
||||
if not "%JAVA_HOME%" == "" goto OkJHome
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME not found in your environment. >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
:OkJHome
|
||||
if exist "%JAVA_HOME%\bin\java.exe" goto init
|
||||
|
||||
echo.
|
||||
echo Error: JAVA_HOME is set to an invalid directory. >&2
|
||||
echo JAVA_HOME = "%JAVA_HOME%" >&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the >&2
|
||||
echo location of your Java installation. >&2
|
||||
echo.
|
||||
goto error
|
||||
|
||||
@REM ==== END VALIDATION ====
|
||||
|
||||
:init
|
||||
|
||||
@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
|
||||
@REM Fallback to current working directory if not found.
|
||||
|
||||
set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
|
||||
IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
|
||||
|
||||
set EXEC_DIR=%CD%
|
||||
set WDIR=%EXEC_DIR%
|
||||
:findBaseDir
|
||||
IF EXIST "%WDIR%"\.mvn goto baseDirFound
|
||||
cd ..
|
||||
IF "%WDIR%"=="%CD%" goto baseDirNotFound
|
||||
set WDIR=%CD%
|
||||
goto findBaseDir
|
||||
|
||||
:baseDirNotFound
|
||||
set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
|
||||
cd "%EXEC_DIR%"
|
||||
goto endDetectBaseDir
|
||||
|
||||
:baseDirFound
|
||||
set MAVEN_PROJECTBASEDIR=%WDIR%
|
||||
cd "%EXEC_DIR%"
|
||||
:endDetectBaseDir
|
||||
|
||||
IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
|
||||
|
||||
@setlocal EnableExtensions EnableDelayedExpansion
|
||||
for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG=!JVM_CONFIG! %%a
|
||||
@endlocal & set JVM_CONFIG=%JVM_CONFIG%
|
||||
|
||||
:endReadAdditionalConfig
|
||||
|
||||
SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
|
||||
set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
|
||||
set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
|
||||
|
||||
set WRAPPER_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.3.4/maven-wrapper-3.3.4.jar"
|
||||
|
||||
FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
|
||||
IF "%%A"=="wrapperUrl" SET WRAPPER_URL=%%B
|
||||
)
|
||||
|
||||
@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
|
||||
@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
|
||||
if exist %WRAPPER_JAR% (
|
||||
if "%MVNW_VERBOSE%" == "true" (
|
||||
echo Found %WRAPPER_JAR%
|
||||
)
|
||||
) else (
|
||||
if not "%MVNW_REPOURL%" == "" (
|
||||
SET WRAPPER_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.3.4/maven-wrapper-3.3.4.jar"
|
||||
)
|
||||
if "%MVNW_VERBOSE%" == "true" (
|
||||
echo Couldn't find %WRAPPER_JAR%, downloading it ...
|
||||
echo Downloading from: %WRAPPER_URL%
|
||||
)
|
||||
|
||||
powershell -Command "&{"^
|
||||
"$webclient = new-object System.Net.WebClient;"^
|
||||
"if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
|
||||
"$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
|
||||
"}"^
|
||||
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%WRAPPER_URL%', '%WRAPPER_JAR%')"^
|
||||
"}"
|
||||
if "%MVNW_VERBOSE%" == "true" (
|
||||
echo Finished downloading %WRAPPER_JAR%
|
||||
)
|
||||
)
|
||||
@REM End of extension
|
||||
|
||||
%MAVEN_JAVA_EXE% ^
|
||||
%JVM_CONFIG% ^
|
||||
--add-opens=java.base/java.lang=ALL-UNNAMED ^
|
||||
--add-opens=java.base/java.util=ALL-UNNAMED ^
|
||||
--add-opens=java.base/java.util.concurrent=ALL-UNNAMED ^
|
||||
--add-opens=java.base/java.net=ALL-UNNAMED ^
|
||||
--add-opens=java.base/java.io=ALL-UNNAMED ^
|
||||
%MAVEN_OPTS% ^
|
||||
%MAVEN_DEBUG_OPTS% ^
|
||||
-classpath %WRAPPER_JAR% ^
|
||||
"-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" ^
|
||||
%WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
|
||||
if ERRORLEVEL 1 goto error
|
||||
goto end
|
||||
|
||||
:error
|
||||
set ERROR_CODE=1
|
||||
|
||||
:end
|
||||
@endlocal & set ERROR_CODE=%ERROR_CODE%
|
||||
|
||||
if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
|
||||
@REM check for post script, once with legacy .bat ending and once with .cmd ending
|
||||
if exist "%USERPROFILE%\mavenrc_post.bat" call "%USERPROFILE%\mavenrc_post.bat"
|
||||
if exist "%USERPROFILE%\mavenrc_post.cmd" call "%USERPROFILE%\mavenrc_post.cmd"
|
||||
:skipRcPost
|
||||
|
||||
@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
|
||||
if "%MAVEN_BATCH_PAUSE%" == "on" pause
|
||||
|
||||
if "%MAVEN_VERBOSE%" == "on" echo %ERROR_CODE%
|
||||
|
||||
exit /B %ERROR_CODE%
|
||||
22
nginx.conf
Normal file
22
nginx.conf
Normal file
@@ -0,0 +1,22 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
|
||||
# Enable clean URLs without .html extension
|
||||
location / {
|
||||
# Try the exact URI, then with .html, then as directory with index.html, then 404
|
||||
try_files $uri $uri.html $uri/ =404;
|
||||
}
|
||||
|
||||
# Optional: Redirect .html URLs to clean URLs
|
||||
if ($request_uri ~ ^/(.*)\.html(\?|$)) {
|
||||
return 301 /$1$2;
|
||||
}
|
||||
|
||||
# Gzip compression for better performance
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_types text/plain text/css text/xml text/javascript application/javascript application/xml+rss application/json;
|
||||
}
|
||||
11
pom.xml
11
pom.xml
@@ -162,11 +162,11 @@
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>2.0.9</version>
|
||||
</dependency>
|
||||
<!-- <dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-simple</artifactId>
|
||||
<version>2.0.9</version>
|
||||
</dependency>-->
|
||||
<!-- <dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-simple</artifactId>
|
||||
<version>2.0.9</version>
|
||||
</dependency>-->
|
||||
<!-- JUnit 5 for testing -->
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
@@ -342,6 +342,7 @@
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<directory>${project.basedir}/../build/${project.artifactId}</directory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>src/main/resources</directory>
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
# BFG.ps1 (run from C:\vibe\auctiora\scripts)
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# 1) Download BFG jar once, next to this script
|
||||
$bfgJar = Join-Path $PSScriptRoot "bfg.jar"
|
||||
if (-not (Test-Path $bfgJar)) {
|
||||
Invoke-WebRequest `
|
||||
"https://repo1.maven.org/maven2/com/madgag/bfg/1.14.0/bfg-1.14.0.jar" `
|
||||
-OutFile $bfgJar
|
||||
}
|
||||
|
||||
# 2) Clone bare mirror next to project root: C:\vibe\auctiora\auctiora.git
|
||||
$rootDir = Join-Path $PSScriptRoot ".."
|
||||
$mirrorPath = Join-Path $rootDir "auctiora.git"
|
||||
|
||||
if (Test-Path $mirrorPath) {
|
||||
Remove-Item $mirrorPath -Recurse -Force
|
||||
}
|
||||
|
||||
git clone --mirror "https://git.appmodel.nl/Tour/auctiora.git" $mirrorPath
|
||||
|
||||
# 3) Run BFG in mirror
|
||||
Push-Location $mirrorPath
|
||||
|
||||
java -jar $bfgJar --strip-blobs-bigger-than 50M .
|
||||
|
||||
git reflog expire --expire=now --all
|
||||
git gc --prune=now --aggressive
|
||||
|
||||
# 4) Force-push cleaned history
|
||||
git push --force
|
||||
|
||||
Pop-Location
|
||||
@@ -1,206 +0,0 @@
|
||||
# Auctiora Scripts
|
||||
|
||||
Utility scripts for managing the Auctiora auction monitoring system.
|
||||
|
||||
## 📦 Available Scripts
|
||||
|
||||
### 1. Production Data Sync
|
||||
|
||||
Sync production database and images from `athena.lan` to your local development environment.
|
||||
|
||||
#### Quick Start
|
||||
|
||||
**Linux/Mac (Bash)**:
|
||||
```bash
|
||||
# Make executable (first time only)
|
||||
chmod +x scripts/sync-production-data.sh
|
||||
|
||||
# Sync database only
|
||||
./scripts/sync-production-data.sh --db-only
|
||||
|
||||
# Sync everything
|
||||
./scripts/sync-production-data.sh --all
|
||||
|
||||
# Sync images only
|
||||
./scripts/sync-production-data.sh --images-only
|
||||
```
|
||||
|
||||
## 🔧 Prerequisites
|
||||
|
||||
### Required
|
||||
- **SSH Client**: OpenSSH or equivalent
|
||||
- Windows: Built-in on Windows 10+, or install [Git Bash](https://git-scm.com/downloads)
|
||||
- Linux/Mac: Pre-installed
|
||||
- **SCP**: Secure copy (usually comes with SSH)
|
||||
- **SSH Access**: SSH key configured for `tour@athena.lan`
|
||||
|
||||
### Optional
|
||||
- **rsync**: For efficient incremental image sync
|
||||
- Windows: Install via [WSL](https://docs.microsoft.com/en-us/windows/wsl/install) or [Cygwin](https://www.cygwin.com/)
|
||||
- Linux/Mac: Usually pre-installed
|
||||
- **sqlite3**: For showing database statistics
|
||||
- Windows: Download from [sqlite.org](https://www.sqlite.org/download.html)
|
||||
- Linux: `sudo apt install sqlite3`
|
||||
- Mac: Pre-installed
|
||||
|
||||
## 📊 What Gets Synced
|
||||
|
||||
### Database (`cache.db`)
|
||||
- **Size**: ~8.9 GB (as of Dec 2024)
|
||||
- **Contains**:
|
||||
- Auctions metadata
|
||||
- Lots (kavels) with bid information
|
||||
- Images metadata and URLs
|
||||
- HTTP cache for scraper
|
||||
- **Local Path**: `c:\mnt\okcomputer\cache.db`
|
||||
|
||||
### Images Directory
|
||||
- **Size**: Varies (can be large)
|
||||
- **Contains**:
|
||||
- Downloaded lot images
|
||||
- Organized by lot ID
|
||||
- **Local Path**: `c:\mnt\okcomputer\images\`
|
||||
|
||||
## 🚀 Usage Examples
|
||||
|
||||
## 📁 File Locations
|
||||
|
||||
### Remote (Production)
|
||||
```
|
||||
athena.lan
|
||||
├── Docker Volume: shared-auction-data
|
||||
│ ├── /data/cache.db (SQLite database)
|
||||
│ └── /data/images/ (Image files)
|
||||
└── /tmp/ (Temporary staging area)
|
||||
```
|
||||
|
||||
### Local (Development)
|
||||
```
|
||||
c:\mnt\okcomputer\
|
||||
├── cache.db (SQLite database)
|
||||
├── cache.db.backup-* (Automatic backups)
|
||||
└── images\ (Image files)
|
||||
```
|
||||
|
||||
## 🔒 Safety Features
|
||||
|
||||
### Automatic Backups
|
||||
- Existing local database is automatically backed up before sync
|
||||
- Backup format: `cache.db.backup-YYYYMMDD-HHMMSS`
|
||||
- Keep recent backups manually or clean up old ones
|
||||
|
||||
### Confirmation Prompts
|
||||
- PowerShell script prompts for confirmation (unless `-Force` is used)
|
||||
- Shows configuration before executing
|
||||
- Safe to cancel at any time
|
||||
|
||||
### Error Handling
|
||||
- Validates SSH connection before starting
|
||||
- Cleans up temporary files on remote server
|
||||
- Reports clear error messages
|
||||
|
||||
## ⚡ Performance Tips
|
||||
|
||||
### Faster Image Sync with rsync
|
||||
Install rsync for incremental image sync (only new/changed files):
|
||||
|
||||
**Windows (WSL)**:
|
||||
```powershell
|
||||
wsl --install
|
||||
wsl -d Ubuntu
|
||||
sudo apt install rsync
|
||||
```
|
||||
|
||||
**Windows (Chocolatey)**:
|
||||
```powershell
|
||||
choco install rsync
|
||||
```
|
||||
|
||||
**Benefit**: First sync downloads everything, subsequent syncs only transfer changed files.
|
||||
|
||||
Images can be synced separately when needed for image processing tests.
|
||||
|
||||
## 🐛 Troubleshooting
|
||||
|
||||
### SSH Connection Issues
|
||||
```powershell
|
||||
# Test SSH connection
|
||||
ssh tour@athena.lan "echo 'Connection OK'"
|
||||
|
||||
# Check SSH key
|
||||
ssh-add -l
|
||||
```
|
||||
|
||||
### Permission Denied
|
||||
```bash
|
||||
# Add SSH key (Linux/Mac)
|
||||
chmod 600 ~/.ssh/id_rsa
|
||||
ssh-add ~/.ssh/id_rsa
|
||||
|
||||
# Windows: Use PuTTY or OpenSSH for Windows
|
||||
```
|
||||
|
||||
### Database Locked Error
|
||||
```powershell
|
||||
# Make sure no other process is using the database
|
||||
Get-Process | Where-Object {$_.Path -like "*java*"} | Stop-Process
|
||||
|
||||
# Or restart the monitor
|
||||
```
|
||||
|
||||
### Slow Image Sync
|
||||
- Use rsync instead of scp (see Performance Tips)
|
||||
- Consider syncing only database for code development
|
||||
- Images only needed for object detection testing
|
||||
|
||||
## 📝 Script Details
|
||||
|
||||
### sync-production-data.sh (Bash)
|
||||
- **Platform**: Linux, Mac, Git Bash on Windows
|
||||
- **Best for**: Unix-like environments
|
||||
- **Features**: Color output, progress bars, statistics
|
||||
|
||||
## 🔄 Automation
|
||||
|
||||
### Linux/Mac Cron
|
||||
```bash
|
||||
# Edit crontab
|
||||
crontab -e
|
||||
|
||||
# Add daily sync at 7 AM
|
||||
0 7 * * * /path/to/auctiora/scripts/sync-production-data.sh --db-only
|
||||
```
|
||||
|
||||
## 🆘 Support
|
||||
|
||||
### Getting Help
|
||||
```bash
|
||||
# Bash
|
||||
./scripts/sync-production-data.sh --help
|
||||
```
|
||||
|
||||
### Common Commands
|
||||
```powershell
|
||||
# Check database size
|
||||
ls c:\mnt\okcomputer\cache.db -h
|
||||
|
||||
# View database contents
|
||||
sqlite3 c:\mnt\okcomputer\cache.db
|
||||
.tables
|
||||
.schema lots
|
||||
SELECT COUNT(*) FROM lots;
|
||||
.quit
|
||||
|
||||
# Check image count
|
||||
(Get-ChildItem c:\mnt\okcomputer\images -Recurse -File).Count
|
||||
```
|
||||
|
||||
## 📚 Related Documentation
|
||||
- [Database Architecture](../wiki/DATABASE_ARCHITECTURE.md)
|
||||
- [Integration Flowchart](../docs/INTEGRATION_FLOWCHART.md)
|
||||
- [Main README](../README.md)
|
||||
|
||||
---
|
||||
|
||||
**Last Updated**: December 2025
|
||||
**Maintainer**: Auctiora Development Team
|
||||
@@ -1,160 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Database Cleanup Utility
|
||||
#
|
||||
# Removes invalid/old data from the local database
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/cleanup-database.sh [--dry-run]
|
||||
#
|
||||
# Options:
|
||||
# --dry-run Show what would be deleted without actually deleting
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
# Configuration
|
||||
LOCAL_DB_PATH="${1:-c:/mnt/okcomputer/cache.db}"
|
||||
DRY_RUN=false
|
||||
|
||||
# Colors
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
# Parse arguments
|
||||
if [ "$1" = "--dry-run" ] || [ "$2" = "--dry-run" ]; then
|
||||
DRY_RUN=true
|
||||
fi
|
||||
|
||||
if [ "$1" = "--help" ] || [ "$1" = "-h" ]; then
|
||||
grep '^#' "$0" | sed 's/^# \?//'
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo -e "${BLUE}╔════════════════════════════════════════════════════════╗${NC}"
|
||||
echo -e "${BLUE}║ Database Cleanup - Auctiora Monitor ║${NC}"
|
||||
echo -e "${BLUE}╚════════════════════════════════════════════════════════╝${NC}"
|
||||
echo ""
|
||||
|
||||
if [ ! -f "${LOCAL_DB_PATH}" ]; then
|
||||
echo -e "${RED}Error: Database not found at ${LOCAL_DB_PATH}${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Backup database before cleanup
|
||||
if [ "$DRY_RUN" = false ]; then
|
||||
BACKUP_PATH="${LOCAL_DB_PATH}.backup-before-cleanup-$(date +%Y%m%d-%H%M%S)"
|
||||
echo -e "${YELLOW}Creating backup: ${BACKUP_PATH}${NC}"
|
||||
cp "${LOCAL_DB_PATH}" "${BACKUP_PATH}"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Show current state
|
||||
echo -e "${BLUE}Current database state:${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" <<EOF
|
||||
.mode box
|
||||
SELECT
|
||||
'Total lots' as metric,
|
||||
COUNT(*) as count
|
||||
FROM lots
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Valid lots (with auction_id)',
|
||||
COUNT(*)
|
||||
FROM lots
|
||||
WHERE auction_id IS NOT NULL AND auction_id != ''
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Invalid lots (missing auction_id)',
|
||||
COUNT(*)
|
||||
FROM lots
|
||||
WHERE auction_id IS NULL OR auction_id = '';
|
||||
EOF
|
||||
echo ""
|
||||
|
||||
# Count items to be deleted
|
||||
echo -e "${YELLOW}Analyzing data to clean up...${NC}"
|
||||
|
||||
INVALID_LOTS=$(sqlite3 "${LOCAL_DB_PATH}" "SELECT COUNT(*) FROM lots WHERE auction_id IS NULL OR auction_id = '';")
|
||||
ORPHANED_IMAGES=$(sqlite3 "${LOCAL_DB_PATH}" "SELECT COUNT(*) FROM images WHERE lot_id NOT IN (SELECT lot_id FROM lots);")
|
||||
|
||||
echo -e " ${RED}→ Invalid lots to delete: ${INVALID_LOTS}${NC}"
|
||||
echo -e " ${YELLOW}→ Orphaned images to delete: ${ORPHANED_IMAGES}${NC}"
|
||||
echo ""
|
||||
|
||||
if [ "$INVALID_LOTS" -eq 0 ] && [ "$ORPHANED_IMAGES" -eq 0 ]; then
|
||||
echo -e "${GREEN}✓ Database is clean! No cleanup needed.${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
echo -e "${BLUE}DRY RUN MODE - No changes will be made${NC}"
|
||||
echo ""
|
||||
echo "Would delete:"
|
||||
echo " - $INVALID_LOTS invalid lots"
|
||||
echo " - $ORPHANED_IMAGES orphaned images"
|
||||
echo ""
|
||||
echo "Run without --dry-run to perform cleanup"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Confirm cleanup
|
||||
echo -e "${YELLOW}This will permanently delete the above records.${NC}"
|
||||
read -p "Continue? (y/N) " -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
echo "Cleanup cancelled"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Perform cleanup
|
||||
echo ""
|
||||
echo -e "${YELLOW}Cleaning up database...${NC}"
|
||||
|
||||
# Delete invalid lots
|
||||
if [ "$INVALID_LOTS" -gt 0 ]; then
|
||||
echo -e " ${BLUE}[1/2] Deleting invalid lots...${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" "DELETE FROM lots WHERE auction_id IS NULL OR auction_id = '';"
|
||||
echo -e " ${GREEN}✓ Deleted ${INVALID_LOTS} invalid lots${NC}"
|
||||
fi
|
||||
|
||||
# Delete orphaned images
|
||||
if [ "$ORPHANED_IMAGES" -gt 0 ]; then
|
||||
echo -e " ${BLUE}[2/2] Deleting orphaned images...${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" "DELETE FROM images WHERE lot_id NOT IN (SELECT lot_id FROM lots);"
|
||||
echo -e " ${GREEN}✓ Deleted ${ORPHANED_IMAGES} orphaned images${NC}"
|
||||
fi
|
||||
|
||||
# Vacuum database to reclaim space
|
||||
echo -e " ${BLUE}[3/3] Compacting database...${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" "VACUUM;"
|
||||
echo -e " ${GREEN}✓ Database compacted${NC}"
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}╔════════════════════════════════════════════════════════╗${NC}"
|
||||
echo -e "${GREEN}║ Cleanup completed successfully ║${NC}"
|
||||
echo -e "${GREEN}╚════════════════════════════════════════════════════════╝${NC}"
|
||||
echo ""
|
||||
|
||||
# Show final state
|
||||
echo -e "${BLUE}Final database state:${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" <<EOF
|
||||
.mode box
|
||||
SELECT
|
||||
'Total lots' as metric,
|
||||
COUNT(*) as count
|
||||
FROM lots
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Total images',
|
||||
COUNT(*)
|
||||
FROM images;
|
||||
EOF
|
||||
|
||||
echo ""
|
||||
DB_SIZE=$(du -h "${LOCAL_DB_PATH}" | cut -f1)
|
||||
echo -e "${BLUE}Database size: ${DB_SIZE}${NC}"
|
||||
echo ""
|
||||
@@ -1,15 +0,0 @@
|
||||
# PowerShell: map the remote share, copy the folder, then clean up
|
||||
$remote = '\\192.168.1.159\shared-auction-data'
|
||||
$local = 'C:\mnt\okcomputer\output\models'
|
||||
|
||||
# (1) create/verify the PSDrive (prompts for password if needed)
|
||||
if (-not (Get-PSDrive -Name Z -ErrorAction SilentlyContinue)) {
|
||||
$cred = Get-Credential -UserName 'tour' -Message 'SMB password for tour@192.168.1.159'
|
||||
New-PSDrive -Name Z -PSProvider FileSystem -Root $remote -Credential $cred -Persist | Out-Null
|
||||
}
|
||||
|
||||
# (2) copy the local folder into the share
|
||||
Copy-Item -Path $local -Destination 'Z:\' -Recurse -Force
|
||||
|
||||
# (3) optional cleanup
|
||||
Remove-PSDrive -Name Z -Force
|
||||
@@ -1,200 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Sync Production Data to Local
|
||||
#
|
||||
# This script copies the production SQLite database and images from the remote
|
||||
# server (athena.lan) to your local development environment.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/sync-production-data.sh [--db-only|--images-only|--all]
|
||||
#
|
||||
# Options:
|
||||
# --db-only Only sync the database (default)
|
||||
# --images-only Only sync the images
|
||||
# --all Sync both database and images
|
||||
# --help Show this help message
|
||||
#
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
# Configuration
|
||||
REMOTE_HOST="tour@athena.lan"
|
||||
REMOTE_VOLUME="shared-auction-data"
|
||||
LOCAL_DB_PATH="c:/mnt/okcomputer/output/cache.db"
|
||||
LOCAL_IMAGES_PATH="c:/mnt/okcomputer/images"
|
||||
REMOTE_TMP="/tmp"
|
||||
|
||||
# Colors for output
|
||||
GREEN='\033[0;32m'
|
||||
BLUE='\033[0;34m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Parse arguments
|
||||
SYNC_MODE="db" # Default: database only
|
||||
|
||||
case "${1:-}" in
|
||||
--db-only)
|
||||
SYNC_MODE="db"
|
||||
;;
|
||||
--images-only)
|
||||
SYNC_MODE="images"
|
||||
;;
|
||||
--all)
|
||||
SYNC_MODE="all"
|
||||
;;
|
||||
--help|-h)
|
||||
grep '^#' "$0" | sed 's/^# \?//'
|
||||
exit 0
|
||||
;;
|
||||
"")
|
||||
SYNC_MODE="db"
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Error: Unknown option '$1'${NC}"
|
||||
echo "Use --help for usage information"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo -e "${BLUE}╔════════════════════════════════════════════════════════╗${NC}"
|
||||
echo -e "${BLUE}║ Production Data Sync - Auctiora Monitor ║${NC}"
|
||||
echo -e "${BLUE}╚════════════════════════════════════════════════════════╝${NC}"
|
||||
echo ""
|
||||
|
||||
# Function to sync database
|
||||
sync_database() {
|
||||
echo -e "${YELLOW}[1/3] Copying database from Docker volume to /tmp...${NC}"
|
||||
ssh ${REMOTE_HOST} "docker run --rm -v ${REMOTE_VOLUME}:/data -v ${REMOTE_TMP}:${REMOTE_TMP} alpine cp /data/cache.db ${REMOTE_TMP}/cache.db"
|
||||
|
||||
echo -e "${YELLOW}[2/3] Downloading database from remote server...${NC}"
|
||||
# Create backup and remove old local database
|
||||
if [ -f "${LOCAL_DB_PATH}" ]; then
|
||||
BACKUP_PATH="${LOCAL_DB_PATH}.backup-$(date +%Y%m%d-%H%M%S)"
|
||||
echo -e "${BLUE} Backing up existing database to: ${BACKUP_PATH}${NC}"
|
||||
cp "${LOCAL_DB_PATH}" "${BACKUP_PATH}"
|
||||
|
||||
echo -e "${BLUE} Removing old local database...${NC}"
|
||||
rm -f "${LOCAL_DB_PATH}"
|
||||
fi
|
||||
|
||||
# Download new database
|
||||
scp ${REMOTE_HOST}:${REMOTE_TMP}/cache.db "${LOCAL_DB_PATH}"
|
||||
|
||||
echo -e "${YELLOW}[3/3] Cleaning up remote /tmp...${NC}"
|
||||
ssh ${REMOTE_HOST} "rm -f ${REMOTE_TMP}/cache.db"
|
||||
|
||||
# Show database info
|
||||
DB_SIZE=$(du -h "${LOCAL_DB_PATH}" | cut -f1)
|
||||
echo -e "${GREEN}✓ Database synced successfully (${DB_SIZE})${NC}"
|
||||
|
||||
# Show table counts
|
||||
echo -e "${BLUE} Database statistics:${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" <<EOF
|
||||
.mode box
|
||||
SELECT
|
||||
'auctions' as table_name, COUNT(*) as count FROM auctions
|
||||
UNION ALL
|
||||
SELECT 'lots', COUNT(*) FROM lots
|
||||
UNION ALL
|
||||
SELECT 'images', COUNT(*) FROM images
|
||||
UNION ALL
|
||||
SELECT 'cache', COUNT(*) FROM cache;
|
||||
EOF
|
||||
|
||||
# Show data quality report
|
||||
echo -e "${BLUE} Data quality:${NC}"
|
||||
sqlite3 "${LOCAL_DB_PATH}" <<EOF
|
||||
.mode box
|
||||
SELECT
|
||||
'Valid lots' as metric,
|
||||
COUNT(*) as count,
|
||||
ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM lots), 2) || '%' as percentage
|
||||
FROM lots
|
||||
WHERE auction_id IS NOT NULL AND auction_id != ''
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Invalid lots (missing auction_id)',
|
||||
COUNT(*),
|
||||
ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM lots), 2) || '%'
|
||||
FROM lots
|
||||
WHERE auction_id IS NULL OR auction_id = ''
|
||||
UNION ALL
|
||||
SELECT
|
||||
'Lots with intelligence fields',
|
||||
COUNT(*),
|
||||
ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM lots), 2) || '%'
|
||||
FROM lots
|
||||
WHERE followers_count IS NOT NULL OR estimated_min IS NOT NULL;
|
||||
EOF
|
||||
}
|
||||
|
||||
# Function to sync images
|
||||
sync_images() {
|
||||
echo -e "${YELLOW}[1/4] Getting image directory structure from Docker volume...${NC}"
|
||||
|
||||
# Create local images directory if it doesn't exist
|
||||
mkdir -p "${LOCAL_IMAGES_PATH}"
|
||||
|
||||
echo -e "${YELLOW}[2/4] Copying images from Docker volume to /tmp...${NC}"
|
||||
# Copy entire images directory from volume to /tmp
|
||||
ssh ${REMOTE_HOST} "docker run --rm -v ${REMOTE_VOLUME}:/data -v ${REMOTE_TMP}:${REMOTE_TMP} alpine sh -c 'mkdir -p ${REMOTE_TMP}/auction-images && cp -r /data/images/* ${REMOTE_TMP}/auction-images/ 2>/dev/null || true'"
|
||||
|
||||
echo -e "${YELLOW}[3/4] Syncing images to local directory (this may take a while)...${NC}"
|
||||
# Use rsync for efficient incremental sync
|
||||
if command -v rsync &> /dev/null; then
|
||||
echo -e "${BLUE} Using rsync for efficient transfer...${NC}"
|
||||
rsync -avz --progress ${REMOTE_HOST}:${REMOTE_TMP}/auction-images/ "${LOCAL_IMAGES_PATH}/"
|
||||
else
|
||||
echo -e "${BLUE} Using scp for transfer (install rsync for faster incremental sync)...${NC}"
|
||||
scp -r ${REMOTE_HOST}:${REMOTE_TMP}/auction-images/* "${LOCAL_IMAGES_PATH}/"
|
||||
fi
|
||||
|
||||
echo -e "${YELLOW}[4/4] Cleaning up remote /tmp...${NC}"
|
||||
ssh ${REMOTE_HOST} "rm -rf ${REMOTE_TMP}/auction-images"
|
||||
|
||||
# Show image stats
|
||||
IMAGE_COUNT=$(find "${LOCAL_IMAGES_PATH}" -type f 2>/dev/null | wc -l)
|
||||
IMAGE_SIZE=$(du -sh "${LOCAL_IMAGES_PATH}" 2>/dev/null | cut -f1)
|
||||
echo -e "${GREEN}✓ Images synced successfully${NC}"
|
||||
echo -e "${BLUE} Total images: ${IMAGE_COUNT}${NC}"
|
||||
echo -e "${BLUE} Total size: ${IMAGE_SIZE}${NC}"
|
||||
}
|
||||
|
||||
# Execute sync based on mode
|
||||
START_TIME=$(date +%s)
|
||||
|
||||
case "$SYNC_MODE" in
|
||||
db)
|
||||
echo -e "${BLUE}Mode: Database only${NC}"
|
||||
echo ""
|
||||
sync_database
|
||||
;;
|
||||
images)
|
||||
echo -e "${BLUE}Mode: Images only${NC}"
|
||||
echo ""
|
||||
sync_images
|
||||
;;
|
||||
all)
|
||||
echo -e "${BLUE}Mode: Database + Images${NC}"
|
||||
echo ""
|
||||
sync_database
|
||||
echo ""
|
||||
sync_images
|
||||
;;
|
||||
esac
|
||||
|
||||
END_TIME=$(date +%s)
|
||||
DURATION=$((END_TIME - START_TIME))
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}╔════════════════════════════════════════════════════════╗${NC}"
|
||||
echo -e "${GREEN}║ Sync completed successfully in ${DURATION} seconds ║${NC}"
|
||||
echo -e "${GREEN}╚════════════════════════════════════════════════════════╝${NC}"
|
||||
echo ""
|
||||
echo -e "${BLUE}Next steps:${NC}"
|
||||
echo -e " 1. Verify data: sqlite3 ${LOCAL_DB_PATH} 'SELECT COUNT(*) FROM lots;'"
|
||||
echo -e " 2. Start monitor: mvn quarkus:dev"
|
||||
echo -e " 3. Open dashboard: http://localhost:8080"
|
||||
echo ""
|
||||
@@ -4,6 +4,8 @@ import auctiora.db.*;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.jdbi.v3.core.Jdbi;
|
||||
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -15,204 +17,254 @@ import java.util.List;
|
||||
*/
|
||||
@Slf4j
|
||||
public class DatabaseService {
|
||||
|
||||
private final Jdbi jdbi;
|
||||
private final LotRepository lotRepository;
|
||||
private final AuctionRepository auctionRepository;
|
||||
private final ImageRepository imageRepository;
|
||||
|
||||
/**
|
||||
* Constructor for programmatic instantiation (tests, CLI tools).
|
||||
*/
|
||||
public DatabaseService(String dbPath) {
|
||||
String url = "jdbc:sqlite:" + dbPath + "?journal_mode=WAL&busy_timeout=10000";
|
||||
this.jdbi = Jdbi.create(url);
|
||||
|
||||
// Initialize schema
|
||||
DatabaseSchema.ensureSchema(jdbi);
|
||||
|
||||
// Create repositories
|
||||
this.lotRepository = new LotRepository(jdbi);
|
||||
this.auctionRepository = new AuctionRepository(jdbi);
|
||||
this.imageRepository = new ImageRepository(jdbi);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor with JDBI instance (for dependency injection).
|
||||
*/
|
||||
public DatabaseService(Jdbi jdbi) {
|
||||
this.jdbi = jdbi;
|
||||
DatabaseSchema.ensureSchema(jdbi);
|
||||
|
||||
this.lotRepository = new LotRepository(jdbi);
|
||||
this.auctionRepository = new AuctionRepository(jdbi);
|
||||
this.imageRepository = new ImageRepository(jdbi);
|
||||
}
|
||||
|
||||
// ==================== LEGACY COMPATIBILITY METHODS ====================
|
||||
// These methods delegate to repositories for backward compatibility
|
||||
|
||||
void ensureSchema() {
|
||||
DatabaseSchema.ensureSchema(jdbi);
|
||||
}
|
||||
|
||||
synchronized void upsertAuction(AuctionInfo auction) {
|
||||
auctionRepository.upsert(auction);
|
||||
}
|
||||
|
||||
synchronized List<AuctionInfo> getAllAuctions() {
|
||||
return auctionRepository.getAll();
|
||||
}
|
||||
|
||||
synchronized List<AuctionInfo> getAuctionsByCountry(String countryCode) {
|
||||
return auctionRepository.getByCountry(countryCode);
|
||||
}
|
||||
|
||||
synchronized void upsertLot(Lot lot) {
|
||||
lotRepository.upsert(lot);
|
||||
}
|
||||
|
||||
synchronized void upsertLotWithIntelligence(Lot lot) {
|
||||
lotRepository.upsertWithIntelligence(lot);
|
||||
}
|
||||
|
||||
synchronized void updateLotCurrentBid(Lot lot) {
|
||||
lotRepository.updateCurrentBid(lot);
|
||||
}
|
||||
|
||||
synchronized void updateLotNotificationFlags(Lot lot) {
|
||||
lotRepository.updateNotificationFlags(lot);
|
||||
}
|
||||
|
||||
synchronized List<Lot> getActiveLots() {
|
||||
return lotRepository.getActiveLots();
|
||||
}
|
||||
|
||||
synchronized List<Lot> getAllLots() {
|
||||
return lotRepository.getAllLots();
|
||||
}
|
||||
|
||||
synchronized List<BidHistory> getBidHistory(String lotId) {
|
||||
return lotRepository.getBidHistory(lotId);
|
||||
}
|
||||
|
||||
synchronized void insertBidHistory(List<BidHistory> bidHistory) {
|
||||
lotRepository.insertBidHistory(bidHistory);
|
||||
}
|
||||
|
||||
synchronized void insertImage(long lotId, String url, String filePath, List<String> labels) {
|
||||
imageRepository.insert(lotId, url, filePath, labels);
|
||||
}
|
||||
|
||||
synchronized void updateImageLabels(int imageId, List<String> labels) {
|
||||
imageRepository.updateLabels(imageId, labels);
|
||||
}
|
||||
|
||||
synchronized List<String> getImageLabels(int imageId) {
|
||||
return imageRepository.getLabels(imageId);
|
||||
}
|
||||
|
||||
synchronized List<ImageRecord> getImagesForLot(long lotId) {
|
||||
return imageRepository.getImagesForLot(lotId)
|
||||
.stream()
|
||||
.map(img -> new ImageRecord(img.id(), img.lotId(), img.url(), img.filePath(), img.labels()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
synchronized List<ImageDetectionRecord> getImagesNeedingDetection() {
|
||||
return imageRepository.getImagesNeedingDetection()
|
||||
.stream()
|
||||
.map(img -> new ImageDetectionRecord(img.id(), img.lotId(), img.filePath()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
synchronized int getImageCount() {
|
||||
return imageRepository.getImageCount();
|
||||
}
|
||||
|
||||
synchronized List<AuctionInfo> importAuctionsFromScraper() {
|
||||
return jdbi.withHandle(handle -> {
|
||||
var sql = """
|
||||
SELECT
|
||||
l.auction_id,
|
||||
MIN(l.title) as title,
|
||||
MIN(l.location) as location,
|
||||
MIN(l.url) as url,
|
||||
COUNT(*) as lots_count,
|
||||
MIN(l.closing_time) as first_lot_closing_time,
|
||||
MIN(l.scraped_at) as scraped_at
|
||||
FROM lots l
|
||||
WHERE l.auction_id IS NOT NULL
|
||||
GROUP BY l.auction_id
|
||||
""";
|
||||
|
||||
return handle.createQuery(sql)
|
||||
.map((rs, ctx) -> {
|
||||
try {
|
||||
var auction = ScraperDataAdapter.fromScraperAuction(rs);
|
||||
if (auction.auctionId() != 0L) {
|
||||
auctionRepository.upsert(auction);
|
||||
return auction;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to import auction: {}", e.getMessage());
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.list()
|
||||
.stream()
|
||||
.filter(a -> a != null)
|
||||
.toList();
|
||||
});
|
||||
}
|
||||
|
||||
synchronized List<Lot> importLotsFromScraper() {
|
||||
return jdbi.withHandle(handle -> {
|
||||
var sql = "SELECT * FROM lots";
|
||||
|
||||
return handle.createQuery(sql)
|
||||
.map((rs, ctx) -> {
|
||||
try {
|
||||
var lot = ScraperDataAdapter.fromScraperLot(rs);
|
||||
if (lot.lotId() != 0L && lot.saleId() != 0L) {
|
||||
lotRepository.upsert(lot);
|
||||
return lot;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to import lot: {}", e.getMessage());
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.list()
|
||||
.stream()
|
||||
.filter(l -> l != null)
|
||||
.toList();
|
||||
});
|
||||
}
|
||||
|
||||
// ==================== DIRECT REPOSITORY ACCESS ====================
|
||||
// Expose repositories for modern usage patterns
|
||||
|
||||
public LotRepository lots() {
|
||||
return lotRepository;
|
||||
}
|
||||
|
||||
public AuctionRepository auctions() {
|
||||
return auctionRepository;
|
||||
}
|
||||
|
||||
public ImageRepository images() {
|
||||
return imageRepository;
|
||||
}
|
||||
|
||||
public Jdbi getJdbi() {
|
||||
return jdbi;
|
||||
}
|
||||
|
||||
// ==================== LEGACY RECORDS ====================
|
||||
// Keep records for backward compatibility with existing code
|
||||
|
||||
public record ImageRecord(int id, long lotId, String url, String filePath, String labels) {}
|
||||
|
||||
public record ImageDetectionRecord(int id, long lotId, String filePath) {}
|
||||
|
||||
private final Jdbi jdbi;
|
||||
private final LotRepository lotRepository;
|
||||
private final AuctionRepository auctionRepository;
|
||||
private final ImageRepository imageRepository;
|
||||
|
||||
/**
|
||||
* Constructor for programmatic instantiation (tests, CLI tools).
|
||||
*/
|
||||
private final String url;
|
||||
|
||||
public DatabaseService(String dbPath) {
|
||||
this.url = "jdbc:sqlite:" + dbPath + "?journal_mode=WAL&busy_timeout=10000";
|
||||
this.jdbi = Jdbi.create(url);
|
||||
|
||||
// Initialize schema
|
||||
DatabaseSchema.ensureSchema(jdbi);
|
||||
|
||||
// Create repositories
|
||||
this.lotRepository = new LotRepository(jdbi);
|
||||
this.auctionRepository = new AuctionRepository(jdbi);
|
||||
this.imageRepository = new ImageRepository(jdbi);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor with JDBI instance (for dependency injection).
|
||||
*/
|
||||
public DatabaseService(Jdbi jdbi) {
|
||||
this.jdbi = jdbi;
|
||||
this.url = null; // Use null as this constructor doesn't use the URL
|
||||
|
||||
DatabaseSchema.ensureSchema(jdbi);
|
||||
|
||||
this.lotRepository = new LotRepository(jdbi);
|
||||
this.auctionRepository = new AuctionRepository(jdbi);
|
||||
this.imageRepository = new ImageRepository(jdbi);
|
||||
}
|
||||
|
||||
// ==================== LEGACY COMPATIBILITY METHODS ====================
|
||||
// These methods delegate to repositories for backward compatibility
|
||||
|
||||
void ensureSchema() {
|
||||
DatabaseSchema.ensureSchema(jdbi);
|
||||
}
|
||||
|
||||
synchronized void upsertAuction(AuctionInfo auction) {
|
||||
auctionRepository.upsert(auction);
|
||||
}
|
||||
|
||||
synchronized List<AuctionInfo> getAllAuctions() {
|
||||
return auctionRepository.getAll();
|
||||
}
|
||||
|
||||
synchronized List<AuctionInfo> getAuctionsByCountry(String countryCode) {
|
||||
return auctionRepository.getByCountry(countryCode);
|
||||
}
|
||||
|
||||
synchronized void upsertLot(Lot lot) {
|
||||
retry(() -> {
|
||||
try (var connection = DriverManager.getConnection(url)) {
|
||||
connection.setAutoCommit(false); // Start transaction
|
||||
lotRepository.upsert(lot); // Perform update
|
||||
connection.commit(); // Commit transaction
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("Failed to upsert lot", e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void upsertLots(List<Lot> lots) { // Batch import with transactions
|
||||
retry(() -> {
|
||||
try (var connection = DriverManager.getConnection(url)) {
|
||||
connection.setAutoCommit(false); // Start transaction
|
||||
for (Lot lot : lots) {
|
||||
lotRepository.upsert(lot); // Upsert individual lot
|
||||
}
|
||||
connection.commit(); // Commit transaction
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("Failed to upsert lots", e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Retry logic for transient database failures
|
||||
private void retry(Runnable action) {
|
||||
final int maxRetries = 3;
|
||||
for (int attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
action.run(); // Attempt action
|
||||
return; // Exit on success
|
||||
} catch (RuntimeException e) {
|
||||
boolean isBusy = e.getCause() instanceof SQLException
|
||||
&& e.getCause().getMessage().contains("[SQLITE_BUSY]");
|
||||
if (isBusy && attempt < maxRetries) {
|
||||
log.warn("Database locked, retrying {} of {}", attempt, maxRetries);
|
||||
try {
|
||||
Thread.sleep(500L * attempt); // Backoff
|
||||
} catch (InterruptedException interrupted) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
} else {
|
||||
throw e; // Non-retryable error or max retries exceeded
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
synchronized void upsertLotWithIntelligence(Lot lot) {
|
||||
lotRepository.upsertWithIntelligence(lot);
|
||||
}
|
||||
|
||||
synchronized void updateLotCurrentBid(Lot lot) {
|
||||
lotRepository.updateCurrentBid(lot);
|
||||
}
|
||||
|
||||
synchronized void updateLotNotificationFlags(Lot lot) {
|
||||
lotRepository.updateNotificationFlags(lot);
|
||||
}
|
||||
|
||||
synchronized List<Lot> getActiveLots() {
|
||||
return lotRepository.getActiveLots();
|
||||
}
|
||||
|
||||
synchronized List<Lot> getAllLots() {
|
||||
return lotRepository.getAllLots();
|
||||
}
|
||||
|
||||
synchronized List<BidHistory> getBidHistory(String lotId) {
|
||||
return lotRepository.getBidHistory(lotId);
|
||||
}
|
||||
|
||||
synchronized void insertBidHistory(List<BidHistory> bidHistory) {
|
||||
lotRepository.insertBidHistory(bidHistory);
|
||||
}
|
||||
|
||||
synchronized void insertImage(long lotId, String url, String filePath, List<String> labels) {
|
||||
imageRepository.insert(lotId, url, filePath, labels);
|
||||
}
|
||||
|
||||
synchronized void updateImageLabels(int imageId, List<String> labels) {
|
||||
imageRepository.updateLabels(imageId, labels);
|
||||
}
|
||||
|
||||
synchronized List<String> getImageLabels(int imageId) {
|
||||
return imageRepository.getLabels(imageId);
|
||||
}
|
||||
|
||||
synchronized List<ImageRecord> getImagesForLot(long lotId) {
|
||||
return imageRepository.getImagesForLot(lotId)
|
||||
.stream()
|
||||
.map(img -> new ImageRecord(img.id(), img.lotId(), img.url(), img.filePath(), img.labels()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
synchronized List<ImageDetectionRecord> getImagesNeedingDetection() {
|
||||
return imageRepository.getImagesNeedingDetection()
|
||||
.stream()
|
||||
.map(img -> new ImageDetectionRecord(img.id(), img.lotId(), img.filePath()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
synchronized int getImageCount() {
|
||||
return imageRepository.getImageCount();
|
||||
}
|
||||
|
||||
synchronized List<AuctionInfo> importAuctionsFromScraper() {
|
||||
return jdbi.withHandle(handle -> {
|
||||
var sql = """
|
||||
SELECT
|
||||
l.auction_id,
|
||||
MIN(l.title) as title,
|
||||
MIN(l.location) as location,
|
||||
MIN(l.url) as url,
|
||||
COUNT(*) as lots_count,
|
||||
MIN(l.closing_time) as first_lot_closing_time,
|
||||
MIN(l.scraped_at) as scraped_at
|
||||
FROM lots l
|
||||
WHERE l.auction_id IS NOT NULL
|
||||
GROUP BY l.auction_id
|
||||
""";
|
||||
|
||||
return handle.createQuery(sql)
|
||||
.map((rs, ctx) -> {
|
||||
try {
|
||||
var auction = ScraperDataAdapter.fromScraperAuction(rs);
|
||||
if (auction.auctionId() != 0L) {
|
||||
auctionRepository.upsert(auction);
|
||||
return auction;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to import auction: {}", e.getMessage());
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.list()
|
||||
.stream()
|
||||
.filter(a -> a != null)
|
||||
.toList();
|
||||
});
|
||||
}
|
||||
|
||||
synchronized List<Lot> importLotsFromScraper() {
|
||||
return jdbi.withHandle(handle -> {
|
||||
var sql = "SELECT * FROM lots";
|
||||
|
||||
return handle.createQuery(sql)
|
||||
.map((rs, ctx) -> {
|
||||
try {
|
||||
var lot = ScraperDataAdapter.fromScraperLot(rs);
|
||||
if (lot.lotId() != 0L && lot.saleId() != 0L) {
|
||||
lotRepository.upsert(lot);
|
||||
return lot;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to import lot: {}", e.getMessage());
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.list()
|
||||
.stream()
|
||||
.filter(l -> l != null)
|
||||
.toList();
|
||||
});
|
||||
}
|
||||
|
||||
// ==================== DIRECT REPOSITORY ACCESS ====================
|
||||
// Expose repositories for modern usage patterns
|
||||
|
||||
public LotRepository lots() {
|
||||
return lotRepository;
|
||||
}
|
||||
|
||||
public AuctionRepository auctions() {
|
||||
return auctionRepository;
|
||||
}
|
||||
|
||||
public ImageRepository images() {
|
||||
return imageRepository;
|
||||
}
|
||||
|
||||
public Jdbi getJdbi() {
|
||||
return jdbi;
|
||||
}
|
||||
|
||||
// ==================== LEGACY RECORDS ====================
|
||||
// Keep records for backward compatibility with existing code
|
||||
|
||||
public record ImageRecord(int id, long lotId, String url, String filePath, String labels) { }
|
||||
|
||||
public record ImageDetectionRecord(int id, long lotId, String filePath) { }
|
||||
}
|
||||
|
||||
@@ -4,9 +4,9 @@ import javax.mail.Authenticator;
|
||||
import javax.mail.Message;
|
||||
import javax.mail.PasswordAuthentication;
|
||||
import javax.mail.Session;
|
||||
import javax.mail.Transport;
|
||||
import javax.mail.internet.InternetAddress;
|
||||
import javax.mail.internet.MimeMessage;
|
||||
import com.sun.mail.smtp.SMTPTransport;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.awt.SystemTray;
|
||||
@@ -14,27 +14,47 @@ import java.awt.Toolkit;
|
||||
import java.awt.TrayIcon;
|
||||
import java.util.Date;
|
||||
import java.util.Properties;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
@Slf4j
|
||||
public record NotificationService(Config cfg) {
|
||||
|
||||
|
||||
// Extra convenience constructor: raw string → Config
|
||||
public NotificationService(String raw) {
|
||||
this(Config.parse(raw));
|
||||
}
|
||||
|
||||
|
||||
public void sendNotification(String msg, String title, int prio) {
|
||||
if (cfg.useDesktop()) sendDesktop(title, msg, prio);
|
||||
if (cfg.useEmail()) sendEmail(title, msg, prio);
|
||||
}
|
||||
|
||||
|
||||
private void sendDesktop(String title, String msg, int prio) {
|
||||
// During tests, aggregate and send a single summary at JVM shutdown
|
||||
if (isUnderTest()) {
|
||||
TestRunAggregator.recordDesktop(title, msg, prio, cfg);
|
||||
return;
|
||||
}
|
||||
// Optional mute flag for non-test runs
|
||||
if (Boolean.getBoolean("auctiora.desktop.mute")) {
|
||||
log.debug("Desktop notification suppressed: {}", title);
|
||||
return;
|
||||
}
|
||||
// Deliver immediately outside tests
|
||||
deliverDesktop(title, msg, prio);
|
||||
}
|
||||
|
||||
// Raw desktop delivery (no test suppression)
|
||||
private void deliverDesktop(String title, String msg, int prio) {
|
||||
try {
|
||||
if (!SystemTray.isSupported()) {
|
||||
log.info("Desktop not supported: {}", title);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
var tray = SystemTray.getSystemTray();
|
||||
var icon = new TrayIcon(
|
||||
Toolkit.getDefaultToolkit().createImage(new byte[0]),
|
||||
@@ -42,43 +62,56 @@ public record NotificationService(Config cfg) {
|
||||
);
|
||||
icon.setImageAutoSize(true);
|
||||
tray.add(icon);
|
||||
|
||||
|
||||
var type = prio > 0 ? TrayIcon.MessageType.WARNING : TrayIcon.MessageType.INFO;
|
||||
icon.displayMessage(title, msg, type);
|
||||
|
||||
// Remove tray icon asynchronously to avoid blocking the caller
|
||||
int delayMs = Integer.getInteger("auctiora.desktop.delay.ms", 0);
|
||||
if (delayMs <= 0) {
|
||||
var t = new Thread(() -> {
|
||||
try {
|
||||
Thread.sleep(50);
|
||||
} catch (InterruptedException ignored) {
|
||||
}
|
||||
try {
|
||||
tray.remove(icon);
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}, "tray-remove");
|
||||
t.setDaemon(true);
|
||||
t.start();
|
||||
var t = new Thread(() -> {
|
||||
try {
|
||||
Thread.sleep(50);
|
||||
} catch (InterruptedException ignored) {
|
||||
}
|
||||
try {
|
||||
tray.remove(icon);
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}, "tray-remove");
|
||||
t.setDaemon(true);
|
||||
t.start();
|
||||
} else {
|
||||
try {
|
||||
Thread.sleep(delayMs);
|
||||
} catch (InterruptedException ignored) {
|
||||
} finally {
|
||||
try {
|
||||
tray.remove(icon);
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}
|
||||
try {
|
||||
Thread.sleep(delayMs);
|
||||
} catch (InterruptedException ignored) {
|
||||
} finally {
|
||||
try {
|
||||
tray.remove(icon);
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}
|
||||
}
|
||||
log.info("Desktop notification: {}", title);
|
||||
} catch (Exception e) {
|
||||
log.warn("Desktop failed: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void sendEmail(String title, String msg, int prio) {
|
||||
// During tests, aggregate and send a single summary at JVM shutdown
|
||||
if (isUnderTest()) {
|
||||
TestRunAggregator.recordEmail(title, msg, prio, cfg);
|
||||
return;
|
||||
}
|
||||
// Deliver immediately outside tests
|
||||
deliverEmail(title, msg, prio);
|
||||
}
|
||||
|
||||
// Raw email delivery (no test suppression), returns when send attempted
|
||||
private void deliverEmail(String title, String msg, int prio) {
|
||||
SMTPTransport transport = null;
|
||||
boolean sent = false;
|
||||
try {
|
||||
var props = new Properties();
|
||||
props.put("mail.smtp.auth", "true");
|
||||
@@ -88,16 +121,17 @@ public record NotificationService(Config cfg) {
|
||||
props.put("mail.smtp.port", "587");
|
||||
props.put("mail.smtp.ssl.trust", "smtp.gmail.com");
|
||||
props.put("mail.smtp.ssl.protocols", "TLSv1.2");
|
||||
// Avoid waiting for QUIT reply (prevents "Exception reading response" after successful send)
|
||||
props.put("mail.smtp.quitwait", "false");
|
||||
|
||||
// Connection timeouts (configurable; short during tests, longer otherwise)
|
||||
int smtpTimeoutMs = Integer.getInteger("auctiora.smtp.timeout.ms", isUnderTest() ? 200 : 10000);
|
||||
String t = String.valueOf(smtpTimeoutMs);
|
||||
int smtpTimeoutMs = Integer.getInteger("auctiora.smtp.timeout.ms", isUnderTest() ? 200 : 10000);
|
||||
String t = String.valueOf(smtpTimeoutMs);
|
||||
props.put("mail.smtp.connectiontimeout", t);
|
||||
props.put("mail.smtp.timeout", t);
|
||||
props.put("mail.smtp.writetimeout", t);
|
||||
|
||||
var session = Session.getInstance(props, new Authenticator() {
|
||||
|
||||
@Override
|
||||
protected PasswordAuthentication getPasswordAuthentication() {
|
||||
return new PasswordAuthentication(cfg.smtpUsername(), cfg.smtpPassword());
|
||||
@@ -116,14 +150,34 @@ public record NotificationService(Config cfg) {
|
||||
m.setHeader("Importance", "High");
|
||||
}
|
||||
|
||||
Transport.send(m);
|
||||
transport = (SMTPTransport) session.getTransport("smtp");
|
||||
transport.connect("smtp.gmail.com", cfg.smtpUsername(), cfg.smtpPassword());
|
||||
transport.sendMessage(m, m.getAllRecipients());
|
||||
sent = true;
|
||||
|
||||
log.info("Email notification sent: {}", title);
|
||||
String resp = transport.getLastServerResponse();
|
||||
if (resp != null) {
|
||||
log.debug("SMTP response: {}", resp);
|
||||
}
|
||||
} catch (javax.mail.AuthenticationFailedException e) {
|
||||
log.warn("Email authentication failed - check Gmail App Password: {}", e.getMessage());
|
||||
} catch (javax.mail.MessagingException e) {
|
||||
log.warn("Email connection failed (network/firewall issue?): {}", e.getMessage());
|
||||
if (sent) {
|
||||
// Message accepted by server; subsequent connection close caused exception
|
||||
log.info("Email sent (connection closed early): {}", e.getMessage());
|
||||
} else {
|
||||
log.warn("Email connection failed (network/firewall issue?): {}", e.getMessage());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Email failed: {}", e.getMessage());
|
||||
} finally {
|
||||
if (transport != null) {
|
||||
try {
|
||||
transport.close();
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,15 +205,15 @@ public record NotificationService(Config cfg) {
|
||||
throw new IllegalArgumentException("Use 'desktop' or 'smtp:username:password:toEmail'");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static boolean isUnderTest() {
|
||||
try {
|
||||
// Explicit override
|
||||
if (Boolean.getBoolean("auctiora.test")) return true;
|
||||
|
||||
|
||||
// Maven Surefire commonly sets this property
|
||||
if (System.getProperty("surefire.test.class.path") != null) return true;
|
||||
|
||||
|
||||
// Fallback: check classpath hint
|
||||
String cp = System.getProperty("java.class.path", "");
|
||||
return cp.contains("surefire") || cp.contains("junit");
|
||||
@@ -167,4 +221,72 @@ public record NotificationService(Config cfg) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Aggregates notifications during test runs and emits a single summary at shutdown
|
||||
private static final class TestRunAggregator {
|
||||
private static final AtomicBoolean HOOK_ADDED = new AtomicBoolean(false);
|
||||
private static final List<String> TITLES = Collections.synchronizedList(new ArrayList<>());
|
||||
private static final List<String> MESSAGES = Collections.synchronizedList(new ArrayList<>());
|
||||
private static final AtomicInteger TOTAL = new AtomicInteger(0);
|
||||
private static final AtomicInteger CRITICAL = new AtomicInteger(0);
|
||||
private static volatile Config LAST_EMAIL_CFG;
|
||||
|
||||
static void recordDesktop(String title, String msg, int prio, Config cfg) {
|
||||
record(title, msg, prio, cfg);
|
||||
}
|
||||
|
||||
static void recordEmail(String title, String msg, int prio, Config cfg) {
|
||||
LAST_EMAIL_CFG = cfg != null && cfg.useEmail() ? cfg : LAST_EMAIL_CFG;
|
||||
record(title, msg, prio, cfg);
|
||||
}
|
||||
|
||||
private static void record(String title, String msg, int prio, Config cfg) {
|
||||
TOTAL.incrementAndGet();
|
||||
if (prio > 0) CRITICAL.incrementAndGet();
|
||||
TITLES.add(title != null ? title : "");
|
||||
MESSAGES.add(msg != null ? msg : "");
|
||||
registerShutdownHookOnce();
|
||||
}
|
||||
|
||||
private static void registerShutdownHookOnce() {
|
||||
if (HOOK_ADDED.compareAndSet(false, true)) {
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||
try {
|
||||
if (!isUnderTest()) return; // Only summarize during tests
|
||||
|
||||
int total = TOTAL.get();
|
||||
int critical = CRITICAL.get();
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("Notifications during tests: ").append(total)
|
||||
.append(" total, ").append(critical).append(" critical");
|
||||
|
||||
// Include up to 3 sample titles
|
||||
int limit = Math.min(3, TITLES.size());
|
||||
if (limit > 0) {
|
||||
sb.append("\nExamples:");
|
||||
for (int i = 0; i < limit; i++) {
|
||||
sb.append("\n - ").append(TITLES.get(i));
|
||||
}
|
||||
}
|
||||
|
||||
String summaryTitle = "Test run summary";
|
||||
String summaryBody = sb.toString();
|
||||
|
||||
// One desktop summary
|
||||
try {
|
||||
new NotificationService("desktop").deliverDesktop(summaryTitle, summaryBody, critical > 0 ? 1 : 0);
|
||||
} catch (Exception ignored) {}
|
||||
|
||||
// One email summary (if config available)
|
||||
if (LAST_EMAIL_CFG != null && LAST_EMAIL_CFG.useEmail()) {
|
||||
try {
|
||||
new NotificationService(LAST_EMAIL_CFG).deliverEmail(summaryTitle, summaryBody, critical > 0 ? 1 : 0);
|
||||
} catch (Exception ignored) {}
|
||||
}
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}, "notify-summary"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -104,10 +104,10 @@ public class AuctionRepository {
|
||||
return jdbi.withHandle(handle ->
|
||||
handle.createQuery("SELECT * FROM auctions")
|
||||
.map((rs, ctx) -> {
|
||||
var closingStr = rs.getString("closing_time");
|
||||
LocalDateTime closingTime = auctiora.ScraperDataAdapter.parseTimestamp(closingStr);
|
||||
|
||||
return new AuctionInfo(
|
||||
var closingStr = rs.getString("closing_time");
|
||||
var closingTime = auctiora.ScraperDataAdapter.parseTimestamp(closingStr);
|
||||
|
||||
return new AuctionInfo(
|
||||
rs.getLong("auction_id"),
|
||||
rs.getString("title"),
|
||||
rs.getString("location"),
|
||||
|
||||
@@ -113,7 +113,7 @@ class NotificationServiceTest {
|
||||
@DisplayName("Should include both desktop and email when SMTP configured")
|
||||
void testBothNotificationChannels() {
|
||||
var service = new NotificationService(
|
||||
"smtp:user@gmail.com:password:recipient@example.com"
|
||||
"smtp:michael.bakker1986@gmail.com:agrepolhlnvhipkv:michael.bakker1986@gmail.com"
|
||||
);
|
||||
|
||||
// Both desktop and email should be attempted
|
||||
|
||||
65
src/test/resources/application.properties
Normal file
65
src/test/resources/application.properties
Normal file
@@ -0,0 +1,65 @@
|
||||
# Application Configuration
|
||||
# Values will be injected from pom.xml during build
|
||||
quarkus.application.name=${project.artifactId}
|
||||
quarkus.application.version=${project.version}
|
||||
# Custom properties for groupId if needed
|
||||
application.groupId=${project.groupId}
|
||||
application.artifactId=${project.artifactId}
|
||||
application.version=${project.version}
|
||||
|
||||
|
||||
# HTTP Configuration
|
||||
quarkus.http.port=8081
|
||||
# ========== DEVELOPMENT (quarkus:dev) ==========
|
||||
%dev.quarkus.http.host=127.0.0.1
|
||||
# ========== PRODUCTION (Docker/JAR) ==========
|
||||
%prod.quarkus.http.host=0.0.0.0
|
||||
# ========== TEST PROFILE ==========
|
||||
%test.quarkus.http.host=localhost
|
||||
|
||||
# Enable CORS for frontend development
|
||||
quarkus.http.cors=true
|
||||
quarkus.http.cors.origins=*
|
||||
quarkus.http.cors.methods=GET,POST,PUT,DELETE,OPTIONS
|
||||
quarkus.http.cors.headers=accept,authorization,content-type,x-requested-with
|
||||
|
||||
# Logging Configuration
|
||||
quarkus.log.console.format=%d{HH:mm:ss} %-5p [%c{2.}] (%t) %s%e%n
|
||||
quarkus.log.console.level=INFO
|
||||
|
||||
# Development mode settings
|
||||
%dev.quarkus.log.console.level=DEBUG
|
||||
%dev.quarkus.live-reload.instrumentation=true
|
||||
|
||||
# JVM Arguments for native access (Jansi, OpenCV, etc.)
|
||||
quarkus.native.additional-build-args=--enable-native-access=ALL-UNNAMED
|
||||
|
||||
# Production optimizations
|
||||
%prod.quarkus.package.type=fast-jar
|
||||
%prod.quarkus.http.enable-compression=true
|
||||
|
||||
# Static resources
|
||||
quarkus.http.enable-compression=true
|
||||
quarkus.rest.path=/
|
||||
quarkus.http.root-path=/
|
||||
|
||||
# Auction Monitor Configuration
|
||||
auction.database.path=/mnt/okcomputer/output/cache.db
|
||||
auction.images.path=/mnt/okcomputer/output/images
|
||||
# auction.notification.config=desktop
|
||||
# Format: smtp:username:password:recipient_email
|
||||
auction.notification.config=smtp:michael.bakker1986@gmail.com:agrepolhlnvhipkv:michael.bakker1986@gmail.com
|
||||
|
||||
auction.yolo.config=/mnt/okcomputer/output/models/yolov4.cfg
|
||||
auction.yolo.weights=/mnt/okcomputer/output/models/yolov4.weights
|
||||
auction.yolo.classes=/mnt/okcomputer/output/models/coco.names
|
||||
|
||||
# HTTP Rate Limiting Configuration
|
||||
# Prevents overloading external services and getting blocked
|
||||
auction.http.rate-limit.default-max-rps=2
|
||||
auction.http.rate-limit.troostwijk-max-rps=1
|
||||
auction.http.timeout-seconds=30
|
||||
|
||||
# Health Check Configuration
|
||||
quarkus.smallrye-health.root-path=/health
|
||||
|
||||
Reference in New Issue
Block a user