Fix mock tests
This commit is contained in:
34
.idea/compiler.xml
generated
34
.idea/compiler.xml
generated
@@ -8,11 +8,43 @@
|
|||||||
<sourceTestOutputDir name="target/generated-test-sources/test-annotations" />
|
<sourceTestOutputDir name="target/generated-test-sources/test-annotations" />
|
||||||
<outputRelativeToContentRoot value="true" />
|
<outputRelativeToContentRoot value="true" />
|
||||||
</profile>
|
</profile>
|
||||||
|
<profile name="Annotation profile for Troostwijk Auction Scraper" enabled="true">
|
||||||
|
<sourceOutputDir name="target/generated-sources/annotations" />
|
||||||
|
<sourceTestOutputDir name="target/generated-test-sources/test-annotations" />
|
||||||
|
<outputRelativeToContentRoot value="true" />
|
||||||
|
<processorPath useClasspath="false">
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/org/projectlombok/lombok/1.18.40/lombok-1.18.40.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/io/quarkus/quarkus-extension-processor/3.17.7/quarkus-extension-processor-3.17.7.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/org/jboss/jdeparser/jdeparser/2.0.3.Final/jdeparser-2.0.3.Final.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/org/jsoup/jsoup/1.15.3/jsoup-1.15.3.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/github/javaparser/javaparser-core/3.26.2/javaparser-core-3.26.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.18.2/jackson-databind-2.18.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-annotations/2.18.2/jackson-annotations-2.18.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-core/2.18.2/jackson-core-2.18.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.18.2/jackson-dataformat-yaml-2.18.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/org/yaml/snakeyaml/2.3/snakeyaml-2.3.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/fasterxml/jackson/module/jackson-module-parameter-names/2.18.2/jackson-module-parameter-names-2.18.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/io/quarkus/quarkus-bootstrap-app-model/3.17.7/quarkus-bootstrap-app-model-3.17.7.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/org/projectlombok/lombok/1.18.40/lombok-1.18.40.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/io/quarkus/quarkus-extension-processor/3.17.7/quarkus-extension-processor-3.17.7.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/org/jboss/jdeparser/jdeparser/2.0.3.Final/jdeparser-2.0.3.Final.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/org/jsoup/jsoup/1.15.3/jsoup-1.15.3.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/github/javaparser/javaparser-core/3.26.2/javaparser-core-3.26.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.18.2/jackson-databind-2.18.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-annotations/2.18.2/jackson-annotations-2.18.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-core/2.18.2/jackson-core-2.18.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.18.2/jackson-dataformat-yaml-2.18.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/org/yaml/snakeyaml/2.3/snakeyaml-2.3.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/com/fasterxml/jackson/module/jackson-module-parameter-names/2.18.2/jackson-module-parameter-names-2.18.2.jar" />
|
||||||
|
<entry name="$MAVEN_REPOSITORY$/io/quarkus/quarkus-bootstrap-app-model/3.17.7/quarkus-bootstrap-app-model-3.17.7.jar" />
|
||||||
|
</processorPath>
|
||||||
|
<module name="auctiora" />
|
||||||
|
</profile>
|
||||||
</annotationProcessing>
|
</annotationProcessing>
|
||||||
</component>
|
</component>
|
||||||
<component name="JavacSettings">
|
<component name="JavacSettings">
|
||||||
<option name="ADDITIONAL_OPTIONS_OVERRIDE">
|
<option name="ADDITIONAL_OPTIONS_OVERRIDE">
|
||||||
<module name="auctiora" options="-Xdiags:verbose -Xlint:all -proc:none" />
|
<module name="auctiora" options="-Xdiags:verbose -Xlint:all -parameters" />
|
||||||
</option>
|
</option>
|
||||||
</component>
|
</component>
|
||||||
</project>
|
</project>
|
||||||
56
pom.xml
56
pom.xml
@@ -215,6 +215,59 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.quarkus</groupId>
|
<groupId>io.quarkus</groupId>
|
||||||
<artifactId>quarkus-scheduler</artifactId>
|
<artifactId>quarkus-scheduler</artifactId>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>*</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- Force Netty 4.1.124.Final to avoid sun.misc.Unsafe warnings -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-common</artifactId>
|
||||||
|
<version>4.1.124.Final</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-handler</artifactId>
|
||||||
|
<version>4.1.124.Final</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-buffer</artifactId>
|
||||||
|
<version>4.1.124.Final</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-transport</artifactId>
|
||||||
|
<version>4.1.124.Final</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-codec</artifactId>
|
||||||
|
<version>4.1.124.Final</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-codec-http</artifactId>
|
||||||
|
<version>4.1.124.Final</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-codec-http2</artifactId>
|
||||||
|
<version>4.1.124.Final</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-resolver</artifactId>
|
||||||
|
<version>4.1.124.Final</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.netty</groupId>
|
||||||
|
<artifactId>netty-resolver-dns</artifactId>
|
||||||
|
<version>4.1.124.Final</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>io.quarkus</groupId>
|
<groupId>io.quarkus</groupId>
|
||||||
@@ -279,6 +332,7 @@
|
|||||||
<properties>
|
<properties>
|
||||||
<build.timestamp>${maven.build.timestamp}</build.timestamp>
|
<build.timestamp>${maven.build.timestamp}</build.timestamp>
|
||||||
</properties>
|
</properties>
|
||||||
|
<jvmArgs>--enable-native-access=ALL-UNNAMED --add-opens=java.base/jdk.internal.misc=ALL-UNNAMED -Dio.netty.tryReflectionSetAccessible=true</jvmArgs>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
@@ -316,7 +370,7 @@
|
|||||||
<compilerArgs>
|
<compilerArgs>
|
||||||
<arg>-Xdiags:verbose</arg>
|
<arg>-Xdiags:verbose</arg>
|
||||||
<arg>-Xlint:all</arg>
|
<arg>-Xlint:all</arg>
|
||||||
<arg>-proc:none</arg>
|
<arg>-parameters</arg>
|
||||||
</compilerArgs>
|
</compilerArgs>
|
||||||
<fork>true</fork>
|
<fork>true</fork>
|
||||||
<excludes>
|
<excludes>
|
||||||
|
|||||||
@@ -33,16 +33,16 @@ public class Main {
|
|||||||
log.info("=== Troostwijk Auction Monitor ===\n");
|
log.info("=== Troostwijk Auction Monitor ===\n");
|
||||||
|
|
||||||
// Parse command line arguments
|
// Parse command line arguments
|
||||||
String mode = args.length > 0 ? args[0] : "workflow";
|
var mode = args.length > 0 ? args[0] : "workflow";
|
||||||
|
|
||||||
// Configuration - Windows paths
|
// Configuration - Windows paths
|
||||||
String databaseFile = System.getenv().getOrDefault("DATABASE_FILE", "C:\\mnt\\okcomputer\\output\\cache.db");
|
var databaseFile = System.getenv().getOrDefault("DATABASE_FILE", "C:\\mnt\\okcomputer\\output\\cache.db");
|
||||||
String notificationConfig = System.getenv().getOrDefault("NOTIFICATION_CONFIG", "desktop");
|
var notificationConfig = System.getenv().getOrDefault("NOTIFICATION_CONFIG", "desktop");
|
||||||
|
|
||||||
// YOLO model paths (optional - monitor works without object detection)
|
// YOLO model paths (optional - monitor works without object detection)
|
||||||
String yoloCfg = "models/yolov4.cfg";
|
var yoloCfg = "models/yolov4.cfg";
|
||||||
String yoloWeights = "models/yolov4.weights";
|
var yoloWeights = "models/yolov4.weights";
|
||||||
String yoloClasses = "models/coco.names";
|
var yoloClasses = "models/coco.names";
|
||||||
|
|
||||||
// Load native OpenCV library (only if models exist)
|
// Load native OpenCV library (only if models exist)
|
||||||
try {
|
try {
|
||||||
@@ -84,8 +84,8 @@ public class Main {
|
|||||||
throws Exception {
|
throws Exception {
|
||||||
|
|
||||||
log.info("🚀 Starting in WORKFLOW MODE (Orchestrated Scheduling)\n");
|
log.info("🚀 Starting in WORKFLOW MODE (Orchestrated Scheduling)\n");
|
||||||
|
|
||||||
WorkflowOrchestrator orchestrator = new WorkflowOrchestrator(
|
var orchestrator = new WorkflowOrchestrator(
|
||||||
dbPath, notifConfig, yoloCfg, yoloWeights, yoloClasses
|
dbPath, notifConfig, yoloCfg, yoloWeights, yoloClasses
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -126,8 +126,8 @@ public class Main {
|
|||||||
throws Exception {
|
throws Exception {
|
||||||
|
|
||||||
log.info("🔄 Starting in ONCE MODE (Single Execution)\n");
|
log.info("🔄 Starting in ONCE MODE (Single Execution)\n");
|
||||||
|
|
||||||
WorkflowOrchestrator orchestrator = new WorkflowOrchestrator(
|
var orchestrator = new WorkflowOrchestrator(
|
||||||
dbPath, notifConfig, yoloCfg, yoloWeights, yoloClasses
|
dbPath, notifConfig, yoloCfg, yoloWeights, yoloClasses
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -178,8 +178,8 @@ public class Main {
|
|||||||
throws Exception {
|
throws Exception {
|
||||||
|
|
||||||
log.info("📊 Checking Status...\n");
|
log.info("📊 Checking Status...\n");
|
||||||
|
|
||||||
WorkflowOrchestrator orchestrator = new WorkflowOrchestrator(
|
var orchestrator = new WorkflowOrchestrator(
|
||||||
dbPath, notifConfig, yoloCfg, yoloWeights, yoloClasses
|
dbPath, notifConfig, yoloCfg, yoloWeights, yoloClasses
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -36,16 +36,16 @@ public class ObjectDetectionService {
|
|||||||
|
|
||||||
ObjectDetectionService(String cfgPath, String weightsPath, String classNamesPath) throws IOException {
|
ObjectDetectionService(String cfgPath, String weightsPath, String classNamesPath) throws IOException {
|
||||||
// Check if model files exist
|
// Check if model files exist
|
||||||
var cfgFile = Paths.get(cfgPath);
|
var cfgFile = Paths.get(cfgPath);
|
||||||
var weightsFile = Paths.get(weightsPath);
|
var weightsFile = Paths.get(weightsPath);
|
||||||
var classNamesFile = Paths.get(classNamesPath);
|
var classNamesFile = Paths.get(classNamesPath);
|
||||||
|
|
||||||
if (!Files.exists(cfgFile) || !Files.exists(weightsFile) || !Files.exists(classNamesFile)) {
|
if (!Files.exists(cfgFile) || !Files.exists(weightsFile) || !Files.exists(classNamesFile)) {
|
||||||
log.info("⚠️ Object detection disabled: YOLO model files not found");
|
log.info("⚠️ Object detection disabled: YOLO model files not found");
|
||||||
log.info(" Expected files:");
|
log.info(" Expected files:");
|
||||||
log.info(" - " + cfgPath);
|
log.info(" - {}", cfgPath);
|
||||||
log.info(" - " + weightsPath);
|
log.info(" - {}", weightsPath);
|
||||||
log.info(" - " + classNamesPath);
|
log.info(" - {}", classNamesPath);
|
||||||
log.info(" Scraper will continue without image analysis.");
|
log.info(" Scraper will continue without image analysis.");
|
||||||
this.enabled = false;
|
this.enabled = false;
|
||||||
this.net = null;
|
this.net = null;
|
||||||
|
|||||||
@@ -17,272 +17,272 @@ import java.util.List;
|
|||||||
*/
|
*/
|
||||||
@ApplicationScoped
|
@ApplicationScoped
|
||||||
public class QuarkusWorkflowScheduler {
|
public class QuarkusWorkflowScheduler {
|
||||||
|
|
||||||
private static final Logger LOG = Logger.getLogger(QuarkusWorkflowScheduler.class);
|
private static final Logger LOG = Logger.getLogger(QuarkusWorkflowScheduler.class);
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
DatabaseService db;
|
DatabaseService db;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
NotificationService notifier;
|
NotificationService notifier;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
ObjectDetectionService detector;
|
ObjectDetectionService detector;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
ImageProcessingService imageProcessor;
|
ImageProcessingService imageProcessor;
|
||||||
|
|
||||||
@ConfigProperty(name = "auction.database.path")
|
@ConfigProperty(name = "auction.database.path")
|
||||||
String databasePath;
|
String databasePath;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Workflow 1: Import Scraper Data
|
* Workflow 1: Import Scraper Data
|
||||||
* Cron: Every 30 minutes (0 -/30 - - - ?)
|
* Cron: Every 30 minutes (0 -/30 - - - ?)
|
||||||
* Purpose: Import new auctions and lots from external scraper
|
* Purpose: Import new auctions and lots from external scraper
|
||||||
*/
|
*/
|
||||||
@Scheduled(cron = "{auction.workflow.scraper-import.cron}", identity = "scraper-import")
|
@Scheduled(cron = "{auction.workflow.scraper-import.cron}", identity = "scraper-import")
|
||||||
void importScraperData() {
|
void importScraperData() {
|
||||||
try {
|
try {
|
||||||
LOG.info("📥 [WORKFLOW 1] Importing scraper data...");
|
LOG.info("📥 [WORKFLOW 1] Importing scraper data...");
|
||||||
long start = System.currentTimeMillis();
|
var start = System.currentTimeMillis();
|
||||||
|
|
||||||
// Import auctions
|
// Import auctions
|
||||||
var auctions = db.importAuctionsFromScraper();
|
var auctions = db.importAuctionsFromScraper();
|
||||||
LOG.infof(" → Imported %d auctions", auctions.size());
|
LOG.infof(" → Imported %d auctions", auctions.size());
|
||||||
|
|
||||||
// Import lots
|
// Import lots
|
||||||
var lots = db.importLotsFromScraper();
|
var lots = db.importLotsFromScraper();
|
||||||
LOG.infof(" → Imported %d lots", lots.size());
|
LOG.infof(" → Imported %d lots", lots.size());
|
||||||
|
|
||||||
// Import image URLs
|
// Import image URLs
|
||||||
var images = db.getUnprocessedImagesFromScraper();
|
var images = db.getUnprocessedImagesFromScraper();
|
||||||
LOG.infof(" → Found %d unprocessed images", images.size());
|
LOG.infof(" → Found %d unprocessed images", images.size());
|
||||||
|
|
||||||
long duration = System.currentTimeMillis() - start;
|
var duration = System.currentTimeMillis() - start;
|
||||||
LOG.infof(" ✓ Scraper import completed in %dms", duration);
|
LOG.infof(" ✓ Scraper import completed in %dms", duration);
|
||||||
|
|
||||||
// Trigger notification if significant data imported
|
// Trigger notification if significant data imported
|
||||||
if (auctions.size() > 0 || lots.size() > 10) {
|
if (auctions.size() > 0 || lots.size() > 10) {
|
||||||
notifier.sendNotification(
|
notifier.sendNotification(
|
||||||
String.format("Imported %d auctions, %d lots", auctions.size(), lots.size()),
|
String.format("Imported %d auctions, %d lots", auctions.size(), lots.size()),
|
||||||
"Data Import Complete",
|
"Data Import Complete",
|
||||||
0
|
0
|
||||||
);
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOG.errorf(e, " ❌ Scraper import failed: %s", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Workflow 2: Process Pending Images
|
||||||
|
* Cron: Every 1 hour (0 0 * * * ?)
|
||||||
|
* Purpose: Download images and run object detection
|
||||||
|
*/
|
||||||
|
@Scheduled(cron = "{auction.workflow.image-processing.cron}", identity = "image-processing")
|
||||||
|
void processImages() {
|
||||||
|
try {
|
||||||
|
LOG.info("🖼️ [WORKFLOW 2] Processing pending images...");
|
||||||
|
var start = System.currentTimeMillis();
|
||||||
|
|
||||||
|
// Get unprocessed images
|
||||||
|
var unprocessedImages = db.getUnprocessedImagesFromScraper();
|
||||||
|
|
||||||
|
if (unprocessedImages.isEmpty()) {
|
||||||
|
LOG.info(" → No pending images to process");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG.infof(" → Processing %d images", unprocessedImages.size());
|
||||||
|
|
||||||
|
var processed = 0;
|
||||||
|
var detected = 0;
|
||||||
|
|
||||||
|
for (var imageRecord : unprocessedImages) {
|
||||||
|
try {
|
||||||
|
// Download image
|
||||||
|
var filePath = imageProcessor.downloadImage(
|
||||||
|
imageRecord.url(),
|
||||||
|
imageRecord.saleId(),
|
||||||
|
imageRecord.lotId()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (filePath != null) {
|
||||||
|
// Run object detection
|
||||||
|
var labels = detector.detectObjects(filePath);
|
||||||
|
|
||||||
|
// Save to database
|
||||||
|
db.insertImage(imageRecord.lotId(), imageRecord.url(),
|
||||||
|
filePath, labels);
|
||||||
|
|
||||||
|
processed++;
|
||||||
|
if (!labels.isEmpty()) {
|
||||||
|
detected++;
|
||||||
|
|
||||||
|
// Send notification for interesting detections
|
||||||
|
if (labels.size() >= 3) {
|
||||||
|
notifier.sendNotification(
|
||||||
|
String.format("Lot %d: Detected %s",
|
||||||
|
imageRecord.lotId(),
|
||||||
|
String.join(", ", labels)),
|
||||||
|
"Objects Detected",
|
||||||
|
0
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rate limiting
|
||||||
|
Thread.sleep(500);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOG.warnf(" ⚠️ Failed to process image: %s", e.getMessage());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} catch (Exception e) {
|
|
||||||
LOG.errorf(e, " ❌ Scraper import failed: %s", e.getMessage());
|
var duration = System.currentTimeMillis() - start;
|
||||||
}
|
LOG.infof(" ✓ Processed %d images, detected objects in %d (%.1fs)",
|
||||||
}
|
processed, detected, duration / 1000.0);
|
||||||
|
|
||||||
/**
|
} catch (Exception e) {
|
||||||
* Workflow 2: Process Pending Images
|
LOG.errorf(e, " ❌ Image processing failed: %s", e.getMessage());
|
||||||
* Cron: Every 1 hour (0 0 * * * ?)
|
}
|
||||||
* Purpose: Download images and run object detection
|
}
|
||||||
*/
|
|
||||||
@Scheduled(cron = "{auction.workflow.image-processing.cron}", identity = "image-processing")
|
/**
|
||||||
void processImages() {
|
* Workflow 3: Monitor Bids
|
||||||
try {
|
* Cron: Every 15 minutes (0 -/15 * * * ?)
|
||||||
LOG.info("🖼️ [WORKFLOW 2] Processing pending images...");
|
* Purpose: Check for bid changes and send notifications
|
||||||
long start = System.currentTimeMillis();
|
*/
|
||||||
|
@Scheduled(cron = "{auction.workflow.bid-monitoring.cron}", identity = "bid-monitoring")
|
||||||
// Get unprocessed images
|
void monitorBids() {
|
||||||
var unprocessedImages = db.getUnprocessedImagesFromScraper();
|
try {
|
||||||
|
LOG.info("💰 [WORKFLOW 3] Monitoring bids...");
|
||||||
if (unprocessedImages.isEmpty()) {
|
var start = System.currentTimeMillis();
|
||||||
LOG.info(" → No pending images to process");
|
|
||||||
return;
|
var activeLots = db.getActiveLots();
|
||||||
|
LOG.infof(" → Checking %d active lots", activeLots.size());
|
||||||
|
|
||||||
|
// Note: In production, this would call Troostwijk API
|
||||||
|
// For now, we just track what's in the database
|
||||||
|
// The external scraper updates bids, we just notify
|
||||||
|
|
||||||
|
var duration = System.currentTimeMillis() - start;
|
||||||
|
LOG.infof(" ✓ Bid monitoring completed in %dms", duration);
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOG.errorf(e, " ❌ Bid monitoring failed: %s", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Workflow 4: Check Closing Times
|
||||||
|
* Cron: Every 5 minutes (0 -/5 * * * ?)
|
||||||
|
* Purpose: Send alerts for lots closing soon
|
||||||
|
*/
|
||||||
|
@Scheduled(cron = "{auction.workflow.closing-alerts.cron}", identity = "closing-alerts")
|
||||||
|
void checkClosingTimes() {
|
||||||
|
try {
|
||||||
|
LOG.info("⏰ [WORKFLOW 4] Checking closing times...");
|
||||||
|
var start = System.currentTimeMillis();
|
||||||
|
|
||||||
|
var activeLots = db.getActiveLots();
|
||||||
|
var alertsSent = 0;
|
||||||
|
|
||||||
|
for (var lot : activeLots) {
|
||||||
|
if (lot.closingTime() == null) continue;
|
||||||
|
|
||||||
|
var minutesLeft = lot.minutesUntilClose();
|
||||||
|
|
||||||
|
// Alert for lots closing in 5 minutes
|
||||||
|
if (minutesLeft <= 5 && minutesLeft > 0 && !lot.closingNotified()) {
|
||||||
|
var message = String.format("Kavel %d sluit binnen %d min.",
|
||||||
|
lot.lotId(), minutesLeft);
|
||||||
|
|
||||||
|
notifier.sendNotification(message, "Lot Closing Soon", 1);
|
||||||
|
|
||||||
|
// Mark as notified
|
||||||
|
var updated = new Lot(
|
||||||
|
lot.saleId(), lot.lotId(), lot.title(), lot.description(),
|
||||||
|
lot.manufacturer(), lot.type(), lot.year(), lot.category(),
|
||||||
|
lot.currentBid(), lot.currency(), lot.url(),
|
||||||
|
lot.closingTime(), true
|
||||||
|
);
|
||||||
|
db.updateLotNotificationFlags(updated);
|
||||||
|
|
||||||
|
alertsSent++;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
LOG.infof(" → Processing %d images", unprocessedImages.size());
|
|
||||||
|
var duration = System.currentTimeMillis() - start;
|
||||||
int processed = 0;
|
LOG.infof(" → Sent %d closing alerts in %dms", alertsSent, duration);
|
||||||
int detected = 0;
|
|
||||||
|
} catch (Exception e) {
|
||||||
for (var imageRecord : unprocessedImages) {
|
LOG.errorf(e, " ❌ Closing alerts failed: %s", e.getMessage());
|
||||||
try {
|
}
|
||||||
// Download image
|
}
|
||||||
String filePath = imageProcessor.downloadImage(
|
|
||||||
imageRecord.url(),
|
/**
|
||||||
imageRecord.saleId(),
|
* Event-driven trigger: New auction discovered
|
||||||
imageRecord.lotId()
|
*/
|
||||||
);
|
public void onNewAuctionDiscovered(AuctionInfo auction) {
|
||||||
|
LOG.infof("📣 EVENT: New auction discovered - %s", auction.title());
|
||||||
if (filePath != null) {
|
|
||||||
// Run object detection
|
try {
|
||||||
var labels = detector.detectObjects(filePath);
|
db.upsertAuction(auction);
|
||||||
|
|
||||||
// Save to database
|
notifier.sendNotification(
|
||||||
db.insertImage(imageRecord.lotId(), imageRecord.url(),
|
String.format("New auction: %s\nLocation: %s\nLots: %d",
|
||||||
filePath, labels);
|
auction.title(), auction.location(), auction.lotCount()),
|
||||||
|
"New Auction Discovered",
|
||||||
processed++;
|
0
|
||||||
if (!labels.isEmpty()) {
|
);
|
||||||
detected++;
|
|
||||||
|
} catch (Exception e) {
|
||||||
// Send notification for interesting detections
|
LOG.errorf(e, " ❌ Failed to handle new auction: %s", e.getMessage());
|
||||||
if (labels.size() >= 3) {
|
}
|
||||||
notifier.sendNotification(
|
}
|
||||||
String.format("Lot %d: Detected %s",
|
|
||||||
imageRecord.lotId(),
|
/**
|
||||||
String.join(", ", labels)),
|
* Event-driven trigger: Bid change detected
|
||||||
"Objects Detected",
|
*/
|
||||||
0
|
public void onBidChange(Lot lot, double previousBid, double newBid) {
|
||||||
);
|
LOG.infof("📣 EVENT: Bid change on lot %d (€%.2f → €%.2f)",
|
||||||
}
|
lot.lotId(), previousBid, newBid);
|
||||||
}
|
|
||||||
}
|
try {
|
||||||
|
db.updateLotCurrentBid(lot);
|
||||||
// Rate limiting
|
|
||||||
Thread.sleep(500);
|
notifier.sendNotification(
|
||||||
|
String.format("Nieuw bod op kavel %d: €%.2f (was €%.2f)",
|
||||||
} catch (Exception e) {
|
lot.lotId(), newBid, previousBid),
|
||||||
LOG.warnf(" ⚠️ Failed to process image: %s", e.getMessage());
|
"Kavel Bieding Update",
|
||||||
}
|
0
|
||||||
}
|
);
|
||||||
|
|
||||||
long duration = System.currentTimeMillis() - start;
|
} catch (Exception e) {
|
||||||
LOG.infof(" ✓ Processed %d images, detected objects in %d (%.1fs)",
|
LOG.errorf(e, " ❌ Failed to handle bid change: %s", e.getMessage());
|
||||||
processed, detected, duration / 1000.0);
|
}
|
||||||
|
}
|
||||||
} catch (Exception e) {
|
|
||||||
LOG.errorf(e, " ❌ Image processing failed: %s", e.getMessage());
|
/**
|
||||||
}
|
* Event-driven trigger: Objects detected in image
|
||||||
}
|
*/
|
||||||
|
public void onObjectsDetected(int lotId, List<String> labels) {
|
||||||
/**
|
LOG.infof("📣 EVENT: Objects detected in lot %d - %s",
|
||||||
* Workflow 3: Monitor Bids
|
lotId, String.join(", ", labels));
|
||||||
* Cron: Every 15 minutes (0 -/15 * * * ?)
|
|
||||||
* Purpose: Check for bid changes and send notifications
|
try {
|
||||||
*/
|
if (labels.size() >= 2) {
|
||||||
@Scheduled(cron = "{auction.workflow.bid-monitoring.cron}", identity = "bid-monitoring")
|
|
||||||
void monitorBids() {
|
|
||||||
try {
|
|
||||||
LOG.info("💰 [WORKFLOW 3] Monitoring bids...");
|
|
||||||
long start = System.currentTimeMillis();
|
|
||||||
|
|
||||||
var activeLots = db.getActiveLots();
|
|
||||||
LOG.infof(" → Checking %d active lots", activeLots.size());
|
|
||||||
|
|
||||||
// Note: In production, this would call Troostwijk API
|
|
||||||
// For now, we just track what's in the database
|
|
||||||
// The external scraper updates bids, we just notify
|
|
||||||
|
|
||||||
long duration = System.currentTimeMillis() - start;
|
|
||||||
LOG.infof(" ✓ Bid monitoring completed in %dms", duration);
|
|
||||||
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOG.errorf(e, " ❌ Bid monitoring failed: %s", e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Workflow 4: Check Closing Times
|
|
||||||
* Cron: Every 5 minutes (0 -/5 * * * ?)
|
|
||||||
* Purpose: Send alerts for lots closing soon
|
|
||||||
*/
|
|
||||||
@Scheduled(cron = "{auction.workflow.closing-alerts.cron}", identity = "closing-alerts")
|
|
||||||
void checkClosingTimes() {
|
|
||||||
try {
|
|
||||||
LOG.info("⏰ [WORKFLOW 4] Checking closing times...");
|
|
||||||
long start = System.currentTimeMillis();
|
|
||||||
|
|
||||||
var activeLots = db.getActiveLots();
|
|
||||||
int alertsSent = 0;
|
|
||||||
|
|
||||||
for (var lot : activeLots) {
|
|
||||||
if (lot.closingTime() == null) continue;
|
|
||||||
|
|
||||||
long minutesLeft = lot.minutesUntilClose();
|
|
||||||
|
|
||||||
// Alert for lots closing in 5 minutes
|
|
||||||
if (minutesLeft <= 5 && minutesLeft > 0 && !lot.closingNotified()) {
|
|
||||||
String message = String.format("Kavel %d sluit binnen %d min.",
|
|
||||||
lot.lotId(), minutesLeft);
|
|
||||||
|
|
||||||
notifier.sendNotification(message, "Lot Closing Soon", 1);
|
|
||||||
|
|
||||||
// Mark as notified
|
|
||||||
var updated = new Lot(
|
|
||||||
lot.saleId(), lot.lotId(), lot.title(), lot.description(),
|
|
||||||
lot.manufacturer(), lot.type(), lot.year(), lot.category(),
|
|
||||||
lot.currentBid(), lot.currency(), lot.url(),
|
|
||||||
lot.closingTime(), true
|
|
||||||
);
|
|
||||||
db.updateLotNotificationFlags(updated);
|
|
||||||
|
|
||||||
alertsSent++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
long duration = System.currentTimeMillis() - start;
|
|
||||||
LOG.infof(" → Sent %d closing alerts in %dms", alertsSent, duration);
|
|
||||||
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOG.errorf(e, " ❌ Closing alerts failed: %s", e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Event-driven trigger: New auction discovered
|
|
||||||
*/
|
|
||||||
public void onNewAuctionDiscovered(AuctionInfo auction) {
|
|
||||||
LOG.infof("📣 EVENT: New auction discovered - %s", auction.title());
|
|
||||||
|
|
||||||
try {
|
|
||||||
db.upsertAuction(auction);
|
|
||||||
|
|
||||||
notifier.sendNotification(
|
notifier.sendNotification(
|
||||||
String.format("New auction: %s\nLocation: %s\nLots: %d",
|
|
||||||
auction.title(), auction.location(), auction.lotCount()),
|
|
||||||
"New Auction Discovered",
|
|
||||||
0
|
|
||||||
);
|
|
||||||
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOG.errorf(e, " ❌ Failed to handle new auction: %s", e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Event-driven trigger: Bid change detected
|
|
||||||
*/
|
|
||||||
public void onBidChange(Lot lot, double previousBid, double newBid) {
|
|
||||||
LOG.infof("📣 EVENT: Bid change on lot %d (€%.2f → €%.2f)",
|
|
||||||
lot.lotId(), previousBid, newBid);
|
|
||||||
|
|
||||||
try {
|
|
||||||
db.updateLotCurrentBid(lot);
|
|
||||||
|
|
||||||
notifier.sendNotification(
|
|
||||||
String.format("Nieuw bod op kavel %d: €%.2f (was €%.2f)",
|
|
||||||
lot.lotId(), newBid, previousBid),
|
|
||||||
"Kavel Bieding Update",
|
|
||||||
0
|
|
||||||
);
|
|
||||||
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOG.errorf(e, " ❌ Failed to handle bid change: %s", e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Event-driven trigger: Objects detected in image
|
|
||||||
*/
|
|
||||||
public void onObjectsDetected(int lotId, List<String> labels) {
|
|
||||||
LOG.infof("📣 EVENT: Objects detected in lot %d - %s",
|
|
||||||
lotId, String.join(", ", labels));
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (labels.size() >= 2) {
|
|
||||||
notifier.sendNotification(
|
|
||||||
String.format("Lot %d contains: %s", lotId, String.join(", ", labels)),
|
String.format("Lot %d contains: %s", lotId, String.join(", ", labels)),
|
||||||
"Objects Detected",
|
"Objects Detected",
|
||||||
0
|
0
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOG.errorf(e, " ❌ Failed to send detection notification: %s", e.getMessage());
|
LOG.errorf(e, " ❌ Failed to send detection notification: %s", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,253 +18,255 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Rate-limited HTTP client that enforces per-host request limits.
|
* Rate-limited HTTP client that enforces per-host request limits.
|
||||||
*
|
*
|
||||||
* Features:
|
* Features:
|
||||||
* - Per-host rate limiting (configurable max requests per second)
|
* - Per-host rate limiting (configurable max requests per second)
|
||||||
* - Request counting and monitoring
|
* - Request counting and monitoring
|
||||||
* - Thread-safe using semaphores
|
* - Thread-safe using semaphores
|
||||||
* - Automatic host extraction from URLs
|
* - Automatic host extraction from URLs
|
||||||
*
|
*
|
||||||
* This prevents overloading external services like Troostwijk and getting blocked.
|
* This prevents overloading external services like Troostwijk and getting blocked.
|
||||||
*/
|
*/
|
||||||
@ApplicationScoped
|
@ApplicationScoped
|
||||||
public class RateLimitedHttpClient {
|
public class RateLimitedHttpClient {
|
||||||
|
|
||||||
private static final Logger LOG = Logger.getLogger(RateLimitedHttpClient.class);
|
private static final Logger LOG = Logger.getLogger(RateLimitedHttpClient.class);
|
||||||
|
|
||||||
private final HttpClient httpClient;
|
private final HttpClient httpClient;
|
||||||
private final Map<String, RateLimiter> rateLimiters;
|
private final Map<String, RateLimiter> rateLimiters;
|
||||||
private final Map<String, RequestStats> requestStats;
|
private final Map<String, RequestStats> requestStats;
|
||||||
|
|
||||||
@ConfigProperty(name = "auction.http.rate-limit.default-max-rps", defaultValue = "2")
|
@ConfigProperty(name = "auction.http.rate-limit.default-max-rps", defaultValue = "2")
|
||||||
int defaultMaxRequestsPerSecond;
|
int defaultMaxRequestsPerSecond;
|
||||||
|
|
||||||
@ConfigProperty(name = "auction.http.rate-limit.troostwijk-max-rps", defaultValue = "1")
|
@ConfigProperty(name = "auction.http.rate-limit.troostwijk-max-rps", defaultValue = "1")
|
||||||
int troostwijkMaxRequestsPerSecond;
|
int troostwijkMaxRequestsPerSecond;
|
||||||
|
|
||||||
@ConfigProperty(name = "auction.http.timeout-seconds", defaultValue = "30")
|
@ConfigProperty(name = "auction.http.timeout-seconds", defaultValue = "30")
|
||||||
int timeoutSeconds;
|
int timeoutSeconds;
|
||||||
|
|
||||||
public RateLimitedHttpClient() {
|
public RateLimitedHttpClient() {
|
||||||
this.httpClient = HttpClient.newBuilder()
|
this.httpClient = HttpClient.newBuilder()
|
||||||
.connectTimeout(Duration.ofSeconds(30))
|
.connectTimeout(Duration.ofSeconds(30))
|
||||||
.build();
|
.build();
|
||||||
this.rateLimiters = new ConcurrentHashMap<>();
|
this.rateLimiters = new ConcurrentHashMap<>();
|
||||||
this.requestStats = new ConcurrentHashMap<>();
|
this.requestStats = new ConcurrentHashMap<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sends a GET request with automatic rate limiting based on host.
|
* Sends a GET request with automatic rate limiting based on host.
|
||||||
*/
|
*/
|
||||||
public HttpResponse<String> sendGet(String url) throws IOException, InterruptedException {
|
public HttpResponse<String> sendGet(String url) throws IOException, InterruptedException {
|
||||||
HttpRequest request = HttpRequest.newBuilder()
|
var request = HttpRequest.newBuilder()
|
||||||
.uri(URI.create(url))
|
.uri(URI.create(url))
|
||||||
.timeout(Duration.ofSeconds(timeoutSeconds))
|
.timeout(Duration.ofSeconds(timeoutSeconds))
|
||||||
.GET()
|
.GET()
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
return send(request, HttpResponse.BodyHandlers.ofString());
|
return send(request, HttpResponse.BodyHandlers.ofString());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sends a request for binary data (like images) with rate limiting.
|
* Sends a request for binary data (like images) with rate limiting.
|
||||||
*/
|
*/
|
||||||
public HttpResponse<byte[]> sendGetBytes(String url) throws IOException, InterruptedException {
|
public HttpResponse<byte[]> sendGetBytes(String url) throws IOException, InterruptedException {
|
||||||
HttpRequest request = HttpRequest.newBuilder()
|
var request = HttpRequest.newBuilder()
|
||||||
.uri(URI.create(url))
|
.uri(URI.create(url))
|
||||||
.timeout(Duration.ofSeconds(timeoutSeconds))
|
.timeout(Duration.ofSeconds(timeoutSeconds))
|
||||||
.GET()
|
.GET()
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
return send(request, HttpResponse.BodyHandlers.ofByteArray());
|
return send(request, HttpResponse.BodyHandlers.ofByteArray());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sends any HTTP request with automatic rate limiting.
|
* Sends any HTTP request with automatic rate limiting.
|
||||||
*/
|
*/
|
||||||
public <T> HttpResponse<T> send(HttpRequest request, HttpResponse.BodyHandler<T> bodyHandler)
|
public <T> HttpResponse<T> send(HttpRequest request, HttpResponse.BodyHandler<T> bodyHandler)
|
||||||
throws IOException, InterruptedException {
|
throws IOException, InterruptedException {
|
||||||
|
|
||||||
String host = extractHost(request.uri());
|
var host = extractHost(request.uri());
|
||||||
RateLimiter limiter = getRateLimiter(host);
|
var limiter = getRateLimiter(host);
|
||||||
RequestStats stats = getRequestStats(host);
|
var stats = getRequestStats(host);
|
||||||
|
|
||||||
// Enforce rate limit (blocks if necessary)
|
// Enforce rate limit (blocks if necessary)
|
||||||
limiter.acquire();
|
limiter.acquire();
|
||||||
|
|
||||||
// Track request
|
// Track request
|
||||||
stats.incrementTotal();
|
stats.incrementTotal();
|
||||||
long startTime = System.currentTimeMillis();
|
var startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
HttpResponse<T> response = httpClient.send(request, bodyHandler);
|
var response = httpClient.send(request, bodyHandler);
|
||||||
|
|
||||||
long duration = System.currentTimeMillis() - startTime;
|
var duration = System.currentTimeMillis() - startTime;
|
||||||
stats.recordSuccess(duration);
|
stats.recordSuccess(duration);
|
||||||
|
|
||||||
LOG.debugf("HTTP %d %s %s (%dms)",
|
LOG.debugf("HTTP %d %s %s (%dms)",
|
||||||
response.statusCode(), request.method(), host, duration);
|
response.statusCode(), request.method(), host, duration);
|
||||||
|
|
||||||
// Track rate limit violations (429 = Too Many Requests)
|
// Track rate limit violations (429 = Too Many Requests)
|
||||||
if (response.statusCode() == 429) {
|
if (response.statusCode() == 429) {
|
||||||
stats.incrementRateLimited();
|
stats.incrementRateLimited();
|
||||||
LOG.warnf("⚠️ Rate limited by %s (HTTP 429)", host);
|
LOG.warnf("⚠️ Rate limited by %s (HTTP 429)", host);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
|
||||||
|
} catch (IOException | InterruptedException e) {
|
||||||
|
stats.incrementFailed();
|
||||||
|
LOG.warnf("❌ HTTP request failed for %s: %s", host, e.getMessage());
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets or creates a rate limiter for a specific host.
|
||||||
|
*/
|
||||||
|
private RateLimiter getRateLimiter(String host) {
|
||||||
|
return rateLimiters.computeIfAbsent(host, h -> {
|
||||||
|
var maxRps = getMaxRequestsPerSecond(h);
|
||||||
|
LOG.infof("Initializing rate limiter for %s: %d req/s", h, maxRps);
|
||||||
|
return new RateLimiter(maxRps);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets or creates request stats for a specific host.
|
||||||
|
*/
|
||||||
|
private RequestStats getRequestStats(String host) {
|
||||||
|
return requestStats.computeIfAbsent(host, h -> new RequestStats(h));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determines max requests per second for a given host.
|
||||||
|
*/
|
||||||
|
private int getMaxRequestsPerSecond(String host) {
|
||||||
|
if (host.contains("troostwijk")) {
|
||||||
|
return troostwijkMaxRequestsPerSecond;
|
||||||
|
}
|
||||||
|
return defaultMaxRequestsPerSecond;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts host from URI (e.g., "api.troostwijkauctions.com").
|
||||||
|
*/
|
||||||
|
private String extractHost(URI uri) {
|
||||||
|
return uri.getHost() != null ? uri.getHost() : uri.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets statistics for all hosts.
|
||||||
|
*/
|
||||||
|
public Map<String, RequestStats> getAllStats() {
|
||||||
|
return Map.copyOf(requestStats);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets statistics for a specific host.
|
||||||
|
*/
|
||||||
|
public RequestStats getStats(String host) {
|
||||||
|
return requestStats.get(host);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rate limiter implementation using token bucket algorithm.
|
||||||
|
* Allows burst traffic up to maxRequestsPerSecond, then enforces steady rate.
|
||||||
|
*/
|
||||||
|
private static class RateLimiter {
|
||||||
|
|
||||||
|
private final Semaphore semaphore;
|
||||||
|
private final int maxRequestsPerSecond;
|
||||||
|
private final long intervalNanos;
|
||||||
|
|
||||||
|
RateLimiter(int maxRequestsPerSecond) {
|
||||||
|
this.maxRequestsPerSecond = maxRequestsPerSecond;
|
||||||
|
this.intervalNanos = TimeUnit.SECONDS.toNanos(1) / maxRequestsPerSecond;
|
||||||
|
this.semaphore = new Semaphore(maxRequestsPerSecond);
|
||||||
|
|
||||||
|
// Refill tokens periodically
|
||||||
|
startRefillThread();
|
||||||
|
}
|
||||||
|
|
||||||
|
void acquire() throws InterruptedException {
|
||||||
|
semaphore.acquire();
|
||||||
|
|
||||||
|
// Enforce minimum delay between requests
|
||||||
|
var delayMillis = intervalNanos / 1_000_000;
|
||||||
|
if (delayMillis > 0) {
|
||||||
|
Thread.sleep(delayMillis);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void startRefillThread() {
|
||||||
|
var refillThread = new Thread(() -> {
|
||||||
|
while (!Thread.currentThread().isInterrupted()) {
|
||||||
|
try {
|
||||||
|
Thread.sleep(1000); // Refill every second
|
||||||
|
var toRelease = maxRequestsPerSecond - semaphore.availablePermits();
|
||||||
|
if (toRelease > 0) {
|
||||||
|
semaphore.release(toRelease);
|
||||||
|
}
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
Thread.currentThread().interrupt();
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
}, "RateLimiter-Refill");
|
||||||
return response;
|
refillThread.setDaemon(true);
|
||||||
|
refillThread.start();
|
||||||
} catch (IOException | InterruptedException e) {
|
}
|
||||||
stats.incrementFailed();
|
}
|
||||||
LOG.warnf("❌ HTTP request failed for %s: %s", host, e.getMessage());
|
|
||||||
throw e;
|
/**
|
||||||
}
|
* Statistics tracker for HTTP requests per host.
|
||||||
}
|
*/
|
||||||
|
public static class RequestStats {
|
||||||
/**
|
|
||||||
* Gets or creates a rate limiter for a specific host.
|
private final String host;
|
||||||
*/
|
private final AtomicLong totalRequests = new AtomicLong(0);
|
||||||
private RateLimiter getRateLimiter(String host) {
|
private final AtomicLong successfulRequests = new AtomicLong(0);
|
||||||
return rateLimiters.computeIfAbsent(host, h -> {
|
private final AtomicLong failedRequests = new AtomicLong(0);
|
||||||
int maxRps = getMaxRequestsPerSecond(h);
|
private final AtomicLong rateLimitedRequests = new AtomicLong(0);
|
||||||
LOG.infof("Initializing rate limiter for %s: %d req/s", h, maxRps);
|
private final AtomicLong totalDurationMs = new AtomicLong(0);
|
||||||
return new RateLimiter(maxRps);
|
|
||||||
});
|
RequestStats(String host) {
|
||||||
}
|
this.host = host;
|
||||||
|
}
|
||||||
/**
|
|
||||||
* Gets or creates request stats for a specific host.
|
void incrementTotal() {
|
||||||
*/
|
totalRequests.incrementAndGet();
|
||||||
private RequestStats getRequestStats(String host) {
|
}
|
||||||
return requestStats.computeIfAbsent(host, h -> new RequestStats(h));
|
|
||||||
}
|
void recordSuccess(long durationMs) {
|
||||||
|
successfulRequests.incrementAndGet();
|
||||||
/**
|
totalDurationMs.addAndGet(durationMs);
|
||||||
* Determines max requests per second for a given host.
|
}
|
||||||
*/
|
|
||||||
private int getMaxRequestsPerSecond(String host) {
|
void incrementFailed() {
|
||||||
if (host.contains("troostwijk")) {
|
failedRequests.incrementAndGet();
|
||||||
return troostwijkMaxRequestsPerSecond;
|
}
|
||||||
}
|
|
||||||
return defaultMaxRequestsPerSecond;
|
void incrementRateLimited() {
|
||||||
}
|
rateLimitedRequests.incrementAndGet();
|
||||||
|
}
|
||||||
/**
|
|
||||||
* Extracts host from URI (e.g., "api.troostwijkauctions.com").
|
// Getters
|
||||||
*/
|
public String getHost() { return host; }
|
||||||
private String extractHost(URI uri) {
|
public long getTotalRequests() { return totalRequests.get(); }
|
||||||
return uri.getHost() != null ? uri.getHost() : uri.toString();
|
public long getSuccessfulRequests() { return successfulRequests.get(); }
|
||||||
}
|
public long getFailedRequests() { return failedRequests.get(); }
|
||||||
|
public long getRateLimitedRequests() { return rateLimitedRequests.get(); }
|
||||||
/**
|
public long getAverageDurationMs() {
|
||||||
* Gets statistics for all hosts.
|
var successful = successfulRequests.get();
|
||||||
*/
|
return successful > 0 ? totalDurationMs.get() / successful : 0;
|
||||||
public Map<String, RequestStats> getAllStats() {
|
}
|
||||||
return Map.copyOf(requestStats);
|
|
||||||
}
|
@Override
|
||||||
|
public String toString() {
|
||||||
/**
|
return String.format("%s: %d total, %d success, %d failed, %d rate-limited, avg %dms",
|
||||||
* Gets statistics for a specific host.
|
host, getTotalRequests(), getSuccessfulRequests(),
|
||||||
*/
|
getFailedRequests(), getRateLimitedRequests(), getAverageDurationMs());
|
||||||
public RequestStats getStats(String host) {
|
}
|
||||||
return requestStats.get(host);
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Rate limiter implementation using token bucket algorithm.
|
|
||||||
* Allows burst traffic up to maxRequestsPerSecond, then enforces steady rate.
|
|
||||||
*/
|
|
||||||
private static class RateLimiter {
|
|
||||||
private final Semaphore semaphore;
|
|
||||||
private final int maxRequestsPerSecond;
|
|
||||||
private final long intervalNanos;
|
|
||||||
|
|
||||||
RateLimiter(int maxRequestsPerSecond) {
|
|
||||||
this.maxRequestsPerSecond = maxRequestsPerSecond;
|
|
||||||
this.intervalNanos = TimeUnit.SECONDS.toNanos(1) / maxRequestsPerSecond;
|
|
||||||
this.semaphore = new Semaphore(maxRequestsPerSecond);
|
|
||||||
|
|
||||||
// Refill tokens periodically
|
|
||||||
startRefillThread();
|
|
||||||
}
|
|
||||||
|
|
||||||
void acquire() throws InterruptedException {
|
|
||||||
semaphore.acquire();
|
|
||||||
|
|
||||||
// Enforce minimum delay between requests
|
|
||||||
long delayMillis = intervalNanos / 1_000_000;
|
|
||||||
if (delayMillis > 0) {
|
|
||||||
Thread.sleep(delayMillis);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void startRefillThread() {
|
|
||||||
Thread refillThread = new Thread(() -> {
|
|
||||||
while (!Thread.currentThread().isInterrupted()) {
|
|
||||||
try {
|
|
||||||
Thread.sleep(1000); // Refill every second
|
|
||||||
int toRelease = maxRequestsPerSecond - semaphore.availablePermits();
|
|
||||||
if (toRelease > 0) {
|
|
||||||
semaphore.release(toRelease);
|
|
||||||
}
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
Thread.currentThread().interrupt();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, "RateLimiter-Refill");
|
|
||||||
refillThread.setDaemon(true);
|
|
||||||
refillThread.start();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Statistics tracker for HTTP requests per host.
|
|
||||||
*/
|
|
||||||
public static class RequestStats {
|
|
||||||
private final String host;
|
|
||||||
private final AtomicLong totalRequests = new AtomicLong(0);
|
|
||||||
private final AtomicLong successfulRequests = new AtomicLong(0);
|
|
||||||
private final AtomicLong failedRequests = new AtomicLong(0);
|
|
||||||
private final AtomicLong rateLimitedRequests = new AtomicLong(0);
|
|
||||||
private final AtomicLong totalDurationMs = new AtomicLong(0);
|
|
||||||
|
|
||||||
RequestStats(String host) {
|
|
||||||
this.host = host;
|
|
||||||
}
|
|
||||||
|
|
||||||
void incrementTotal() {
|
|
||||||
totalRequests.incrementAndGet();
|
|
||||||
}
|
|
||||||
|
|
||||||
void recordSuccess(long durationMs) {
|
|
||||||
successfulRequests.incrementAndGet();
|
|
||||||
totalDurationMs.addAndGet(durationMs);
|
|
||||||
}
|
|
||||||
|
|
||||||
void incrementFailed() {
|
|
||||||
failedRequests.incrementAndGet();
|
|
||||||
}
|
|
||||||
|
|
||||||
void incrementRateLimited() {
|
|
||||||
rateLimitedRequests.incrementAndGet();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Getters
|
|
||||||
public String getHost() { return host; }
|
|
||||||
public long getTotalRequests() { return totalRequests.get(); }
|
|
||||||
public long getSuccessfulRequests() { return successfulRequests.get(); }
|
|
||||||
public long getFailedRequests() { return failedRequests.get(); }
|
|
||||||
public long getRateLimitedRequests() { return rateLimitedRequests.get(); }
|
|
||||||
public long getAverageDurationMs() {
|
|
||||||
long successful = successfulRequests.get();
|
|
||||||
return successful > 0 ? totalDurationMs.get() / successful : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return String.format("%s: %d total, %d success, %d failed, %d rate-limited, avg %dms",
|
|
||||||
host, getTotalRequests(), getSuccessfulRequests(),
|
|
||||||
getFailedRequests(), getRateLimitedRequests(), getAverageDurationMs());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,19 +19,19 @@ public class ScraperDataAdapter {
|
|||||||
|
|
||||||
static AuctionInfo fromScraperAuction(ResultSet rs) throws SQLException {
|
static AuctionInfo fromScraperAuction(ResultSet rs) throws SQLException {
|
||||||
// Parse "A7-39813" → auctionId=39813, type="A7"
|
// Parse "A7-39813" → auctionId=39813, type="A7"
|
||||||
String auctionIdStr = rs.getString("auction_id");
|
var auctionIdStr = rs.getString("auction_id");
|
||||||
int auctionId = extractNumericId(auctionIdStr);
|
var auctionId = extractNumericId(auctionIdStr);
|
||||||
String type = extractTypePrefix(auctionIdStr);
|
var type = extractTypePrefix(auctionIdStr);
|
||||||
|
|
||||||
// Split "Cluj-Napoca, RO" → city="Cluj-Napoca", country="RO"
|
// Split "Cluj-Napoca, RO" → city="Cluj-Napoca", country="RO"
|
||||||
String location = rs.getString("location");
|
var location = rs.getString("location");
|
||||||
String[] locationParts = parseLocation(location);
|
var locationParts = parseLocation(location);
|
||||||
String city = locationParts[0];
|
var city = locationParts[0];
|
||||||
String country = locationParts[1];
|
var country = locationParts[1];
|
||||||
|
|
||||||
// Map field names
|
// Map field names
|
||||||
int lotCount = getIntOrDefault(rs, "lots_count", 0);
|
var lotCount = getIntOrDefault(rs, "lots_count", 0);
|
||||||
LocalDateTime closingTime = parseTimestamp(getStringOrNull(rs, "first_lot_closing_time"));
|
var closingTime = parseTimestamp(getStringOrNull(rs, "first_lot_closing_time"));
|
||||||
|
|
||||||
return new AuctionInfo(
|
return new AuctionInfo(
|
||||||
auctionId,
|
auctionId,
|
||||||
|
|||||||
@@ -94,18 +94,18 @@ public class WorkflowOrchestrator {
|
|||||||
|
|
||||||
// Import auctions
|
// Import auctions
|
||||||
var auctions = db.importAuctionsFromScraper();
|
var auctions = db.importAuctionsFromScraper();
|
||||||
log.info(" → Imported " + auctions.size() + " auctions");
|
log.info(" → Imported {} auctions", auctions.size());
|
||||||
|
|
||||||
// Import lots
|
// Import lots
|
||||||
var lots = db.importLotsFromScraper();
|
var lots = db.importLotsFromScraper();
|
||||||
log.info(" → Imported " + lots.size() + " lots");
|
log.info(" → Imported {} lots", lots.size());
|
||||||
|
|
||||||
// Import image URLs
|
// Import image URLs
|
||||||
var images = db.getUnprocessedImagesFromScraper();
|
var images = db.getUnprocessedImagesFromScraper();
|
||||||
log.info(" → Found " + images.size() + " unprocessed images");
|
log.info(" → Found {} unprocessed images", images.size());
|
||||||
|
|
||||||
long duration = System.currentTimeMillis() - start;
|
long duration = System.currentTimeMillis() - start;
|
||||||
log.info(" ✓ Scraper import completed in " + duration + "ms\n");
|
log.info(" ✓ Scraper import completed in {}ms\n", duration);
|
||||||
|
|
||||||
// Trigger notification if significant data imported
|
// Trigger notification if significant data imported
|
||||||
if (auctions.size() > 0 || lots.size() > 10) {
|
if (auctions.size() > 0 || lots.size() > 10) {
|
||||||
@@ -117,7 +117,7 @@ public class WorkflowOrchestrator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.info(" ❌ Scraper import failed: " + e.getMessage());
|
log.info(" ❌ Scraper import failed: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
}, 0, 30, TimeUnit.MINUTES);
|
}, 0, 30, TimeUnit.MINUTES);
|
||||||
|
|
||||||
@@ -143,7 +143,7 @@ public class WorkflowOrchestrator {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(" → Processing " + unprocessedImages.size() + " images");
|
log.info(" → Processing {} images", unprocessedImages.size());
|
||||||
|
|
||||||
int processed = 0;
|
int processed = 0;
|
||||||
int detected = 0;
|
int detected = 0;
|
||||||
@@ -186,7 +186,7 @@ public class WorkflowOrchestrator {
|
|||||||
Thread.sleep(500);
|
Thread.sleep(500);
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.info(" ⚠️ Failed to process image: " + e.getMessage());
|
log.info(" ⚠\uFE0F Failed to process image: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -195,7 +195,7 @@ public class WorkflowOrchestrator {
|
|||||||
processed, detected, duration / 1000.0));
|
processed, detected, duration / 1000.0));
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.info(" ❌ Image processing failed: " + e.getMessage());
|
log.info(" ❌ Image processing failed: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
}, 5, 60, TimeUnit.MINUTES);
|
}, 5, 60, TimeUnit.MINUTES);
|
||||||
|
|
||||||
@@ -214,7 +214,7 @@ public class WorkflowOrchestrator {
|
|||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
|
|
||||||
var activeLots = db.getActiveLots();
|
var activeLots = db.getActiveLots();
|
||||||
log.info(" → Checking " + activeLots.size() + " active lots");
|
log.info(" → Checking {} active lots", activeLots.size());
|
||||||
|
|
||||||
int bidChanges = 0;
|
int bidChanges = 0;
|
||||||
|
|
||||||
@@ -228,7 +228,7 @@ public class WorkflowOrchestrator {
|
|||||||
log.info(String.format(" ✓ Bid monitoring completed in %dms\n", duration));
|
log.info(String.format(" ✓ Bid monitoring completed in %dms\n", duration));
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.info(" ❌ Bid monitoring failed: " + e.getMessage());
|
log.info(" ❌ Bid monitoring failed: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
}, 2, 15, TimeUnit.MINUTES);
|
}, 2, 15, TimeUnit.MINUTES);
|
||||||
|
|
||||||
@@ -279,7 +279,7 @@ public class WorkflowOrchestrator {
|
|||||||
alertsSent, duration));
|
alertsSent, duration));
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.info(" ❌ Closing alerts failed: " + e.getMessage());
|
log.info(" ❌ Closing alerts failed: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
}, 1, 5, TimeUnit.MINUTES);
|
}, 1, 5, TimeUnit.MINUTES);
|
||||||
|
|
||||||
@@ -298,7 +298,7 @@ public class WorkflowOrchestrator {
|
|||||||
log.info("[1/4] Importing scraper data...");
|
log.info("[1/4] Importing scraper data...");
|
||||||
var auctions = db.importAuctionsFromScraper();
|
var auctions = db.importAuctionsFromScraper();
|
||||||
var lots = db.importLotsFromScraper();
|
var lots = db.importLotsFromScraper();
|
||||||
log.info(" ✓ Imported " + auctions.size() + " auctions, " + lots.size() + " lots");
|
log.info(" ✓ Imported {} auctions, {} lots", auctions.size(), lots.size());
|
||||||
|
|
||||||
// Step 2: Process images
|
// Step 2: Process images
|
||||||
log.info("[2/4] Processing pending images...");
|
log.info("[2/4] Processing pending images...");
|
||||||
@@ -308,7 +308,7 @@ public class WorkflowOrchestrator {
|
|||||||
// Step 3: Check bids
|
// Step 3: Check bids
|
||||||
log.info("[3/4] Monitoring bids...");
|
log.info("[3/4] Monitoring bids...");
|
||||||
var activeLots = db.getActiveLots();
|
var activeLots = db.getActiveLots();
|
||||||
log.info(" ✓ Monitored " + activeLots.size() + " lots");
|
log.info(" ✓ Monitored {} lots", activeLots.size());
|
||||||
|
|
||||||
// Step 4: Check closing times
|
// Step 4: Check closing times
|
||||||
log.info("[4/4] Checking closing times...");
|
log.info("[4/4] Checking closing times...");
|
||||||
@@ -318,12 +318,12 @@ public class WorkflowOrchestrator {
|
|||||||
closingSoon++;
|
closingSoon++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
log.info(" ✓ Found " + closingSoon + " lots closing soon");
|
log.info(" ✓ Found {} lots closing soon", closingSoon);
|
||||||
|
|
||||||
log.info("\n✓ Complete workflow finished successfully\n");
|
log.info("\n✓ Complete workflow finished successfully\n");
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.info("\n❌ Workflow failed: " + e.getMessage() + "\n");
|
log.info("\n❌ Workflow failed: {}\n", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -331,7 +331,7 @@ public class WorkflowOrchestrator {
|
|||||||
* Event-driven trigger: New auction discovered
|
* Event-driven trigger: New auction discovered
|
||||||
*/
|
*/
|
||||||
public void onNewAuctionDiscovered(AuctionInfo auction) {
|
public void onNewAuctionDiscovered(AuctionInfo auction) {
|
||||||
log.info("📣 EVENT: New auction discovered - " + auction.title());
|
log.info("\uD83D\uDCE3 EVENT: New auction discovered - {}", auction.title());
|
||||||
|
|
||||||
try {
|
try {
|
||||||
db.upsertAuction(auction);
|
db.upsertAuction(auction);
|
||||||
@@ -344,7 +344,7 @@ public class WorkflowOrchestrator {
|
|||||||
);
|
);
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.info(" ❌ Failed to handle new auction: " + e.getMessage());
|
log.info(" ❌ Failed to handle new auction: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -366,7 +366,7 @@ public class WorkflowOrchestrator {
|
|||||||
);
|
);
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.info(" ❌ Failed to handle bid change: " + e.getMessage());
|
log.info(" ❌ Failed to handle bid change: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -386,7 +386,7 @@ public class WorkflowOrchestrator {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.info(" ❌ Failed to send detection notification: " + e.getMessage());
|
log.info(" ❌ Failed to send detection notification: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -395,16 +395,16 @@ public class WorkflowOrchestrator {
|
|||||||
*/
|
*/
|
||||||
public void printStatus() {
|
public void printStatus() {
|
||||||
log.info("\n📊 Workflow Status:");
|
log.info("\n📊 Workflow Status:");
|
||||||
log.info(" Running: " + (isRunning ? "Yes" : "No"));
|
log.info(" Running: {}", isRunning ? "Yes" : "No");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
var auctions = db.getAllAuctions();
|
var auctions = db.getAllAuctions();
|
||||||
var lots = db.getAllLots();
|
var lots = db.getAllLots();
|
||||||
int images = db.getImageCount();
|
int images = db.getImageCount();
|
||||||
|
|
||||||
log.info(" Auctions: " + auctions.size());
|
log.info(" Auctions: {}", auctions.size());
|
||||||
log.info(" Lots: " + lots.size());
|
log.info(" Lots: {}", lots.size());
|
||||||
log.info(" Images: " + images);
|
log.info(" Images: {}", images);
|
||||||
|
|
||||||
// Count closing soon
|
// Count closing soon
|
||||||
int closingSoon = 0;
|
int closingSoon = 0;
|
||||||
@@ -413,10 +413,10 @@ public class WorkflowOrchestrator {
|
|||||||
closingSoon++;
|
closingSoon++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
log.info(" Closing soon (< 30 min): " + closingSoon);
|
log.info(" Closing soon (< 30 min): {}", closingSoon);
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
log.info(" ⚠️ Could not retrieve status: " + e.getMessage());
|
log.info(" ⚠\uFE0F Could not retrieve status: {}", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
IO.println();
|
IO.println();
|
||||||
|
|||||||
@@ -33,7 +33,6 @@ quarkus.log.console.level=INFO
|
|||||||
|
|
||||||
# JVM Arguments for native access (Jansi, OpenCV, etc.)
|
# JVM Arguments for native access (Jansi, OpenCV, etc.)
|
||||||
quarkus.native.additional-build-args=--enable-native-access=ALL-UNNAMED
|
quarkus.native.additional-build-args=--enable-native-access=ALL-UNNAMED
|
||||||
quarkus.jvm.args=--enable-native-access=ALL-UNNAMED
|
|
||||||
|
|
||||||
# Production optimizations
|
# Production optimizations
|
||||||
%prod.quarkus.package.type=fast-jar
|
%prod.quarkus.package.type=fast-jar
|
||||||
|
|||||||
Reference in New Issue
Block a user