start
This commit is contained in:
40
Dockerfile
40
Dockerfile
@@ -1,19 +1,19 @@
|
||||
# Build stage - 0
|
||||
# Multi-stage Dockerfile for Quarkus Auction Monitor
|
||||
|
||||
# Build stage
|
||||
FROM maven:3.9-eclipse-temurin-25-alpine AS build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy Maven files
|
||||
# Copy Maven files for dependency caching
|
||||
COPY pom.xml ./
|
||||
|
||||
# Download dependencies (cached layer)
|
||||
RUN mvn dependency:go-offline -B
|
||||
|
||||
# Copy source
|
||||
# Copy source code
|
||||
COPY src/ ./src/
|
||||
|
||||
# Build Quarkus application
|
||||
RUN mvn package -DskipTests -Dquarkus.package.jar.type=uber-jar
|
||||
# Build Quarkus application (fast-jar for production)
|
||||
RUN mvn package -DskipTests -Dquarkus.package.jar.type=fast-jar
|
||||
|
||||
# Runtime stage
|
||||
FROM eclipse-temurin:25-jre-alpine
|
||||
@@ -21,14 +21,32 @@ FROM eclipse-temurin:25-jre-alpine
|
||||
WORKDIR /app
|
||||
|
||||
# Create non-root user
|
||||
RUN addgroup -g 1001 quarkus && adduser -u 1001 -G quarkus -s /bin/sh -D quarkus
|
||||
RUN addgroup -g 1001 quarkus && \
|
||||
adduser -u 1001 -G quarkus -s /bin/sh -D quarkus
|
||||
|
||||
# Copy the uber jar - 5
|
||||
COPY --from=build --chown=quarkus:quarkus /app/target/*-runner.jar app.jar
|
||||
# Create directories for data
|
||||
RUN mkdir -p /mnt/okcomputer/output/images && \
|
||||
chown -R quarkus:quarkus /mnt/okcomputer
|
||||
|
||||
# Copy Quarkus fast-jar structure
|
||||
COPY --from=build --chown=quarkus:quarkus /app/target/quarkus-app/lib/ /app/lib/
|
||||
COPY --from=build --chown=quarkus:quarkus /app/target/quarkus-app/*.jar /app/
|
||||
COPY --from=build --chown=quarkus:quarkus /app/target/quarkus-app/app/ /app/app/
|
||||
COPY --from=build --chown=quarkus:quarkus /app/target/quarkus-app/quarkus/ /app/quarkus/
|
||||
|
||||
# Switch to non-root user
|
||||
USER quarkus
|
||||
|
||||
# Expose ports
|
||||
EXPOSE 8081
|
||||
|
||||
# Set environment variables
|
||||
ENV JAVA_OPTS="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager"
|
||||
ENV JAVA_APP_JAR="/app/quarkus-run.jar"
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=10s --retries=3 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost:8081/health/live || exit 1
|
||||
|
||||
# Run the Quarkus application
|
||||
ENTRYPOINT ["java", "-jar", "app.jar"]
|
||||
ENTRYPOINT ["sh", "-c", "java $JAVA_OPTS -jar $JAVA_APP_JAR"]
|
||||
|
||||
13
pom.xml
13
pom.xml
@@ -145,6 +145,7 @@
|
||||
<artifactId>flexmark-all</artifactId>
|
||||
<version>0.64.8</version>
|
||||
</dependency>
|
||||
<!-- Quarkus Core Dependencies -->
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-rest-jackson</artifactId>
|
||||
@@ -153,6 +154,18 @@
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-arc</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-scheduler</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-smallrye-health</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.quarkus</groupId>
|
||||
<artifactId>quarkus-config-yaml</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Test dependencies -->
|
||||
<dependency>
|
||||
|
||||
102
src/main/java/com/auction/AuctionMonitorHealthCheck.java
Normal file
102
src/main/java/com/auction/AuctionMonitorHealthCheck.java
Normal file
@@ -0,0 +1,102 @@
|
||||
package com.auction;
|
||||
|
||||
import jakarta.enterprise.context.ApplicationScoped;
|
||||
import jakarta.inject.Inject;
|
||||
import org.eclipse.microprofile.health.HealthCheck;
|
||||
import org.eclipse.microprofile.health.HealthCheckResponse;
|
||||
import org.eclipse.microprofile.health.Liveness;
|
||||
import org.eclipse.microprofile.health.Readiness;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
|
||||
/**
|
||||
* Health checks for Auction Monitor.
|
||||
* Provides liveness and readiness probes for Kubernetes/Docker deployment.
|
||||
*/
|
||||
@ApplicationScoped
|
||||
public class AuctionMonitorHealthCheck {
|
||||
|
||||
@Inject
|
||||
DatabaseService db;
|
||||
|
||||
/**
|
||||
* Liveness probe - checks if application is alive
|
||||
* GET /health/live
|
||||
*/
|
||||
@Liveness
|
||||
public static class LivenessCheck implements HealthCheck {
|
||||
@Override
|
||||
public HealthCheckResponse call() {
|
||||
return HealthCheckResponse.up("Auction Monitor is alive");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Readiness probe - checks if application is ready to serve requests
|
||||
* GET /health/ready
|
||||
*/
|
||||
@Readiness
|
||||
@ApplicationScoped
|
||||
public static class ReadinessCheck implements HealthCheck {
|
||||
|
||||
@Inject
|
||||
DatabaseService db;
|
||||
|
||||
@Override
|
||||
public HealthCheckResponse call() {
|
||||
try {
|
||||
// Check database connection
|
||||
var auctions = db.getAllAuctions();
|
||||
|
||||
// Check database path exists
|
||||
var dbPath = Paths.get("C:\\mnt\\okcomputer\\output\\cache.db");
|
||||
if (!Files.exists(dbPath.getParent())) {
|
||||
return HealthCheckResponse.down("Database directory does not exist");
|
||||
}
|
||||
|
||||
return HealthCheckResponse.named("database")
|
||||
.up()
|
||||
.withData("auctions", auctions.size())
|
||||
.build();
|
||||
|
||||
} catch (Exception e) {
|
||||
return HealthCheckResponse.named("database")
|
||||
.down()
|
||||
.withData("error", e.getMessage())
|
||||
.build();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Startup probe - checks if application has started correctly
|
||||
* GET /health/started
|
||||
*/
|
||||
@org.eclipse.microprofile.health.Startup
|
||||
@ApplicationScoped
|
||||
public static class StartupCheck implements HealthCheck {
|
||||
|
||||
@Inject
|
||||
DatabaseService db;
|
||||
|
||||
@Override
|
||||
public HealthCheckResponse call() {
|
||||
try {
|
||||
// Verify database schema
|
||||
db.ensureSchema();
|
||||
|
||||
return HealthCheckResponse.named("startup")
|
||||
.up()
|
||||
.withData("message", "Database schema initialized")
|
||||
.build();
|
||||
|
||||
} catch (Exception e) {
|
||||
return HealthCheckResponse.named("startup")
|
||||
.down()
|
||||
.withData("error", e.getMessage())
|
||||
.build();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
61
src/main/java/com/auction/AuctionMonitorProducer.java
Normal file
61
src/main/java/com/auction/AuctionMonitorProducer.java
Normal file
@@ -0,0 +1,61 @@
|
||||
package com.auction;
|
||||
|
||||
import jakarta.enterprise.context.ApplicationScoped;
|
||||
import jakarta.enterprise.inject.Produces;
|
||||
import jakarta.inject.Singleton;
|
||||
import org.eclipse.microprofile.config.inject.ConfigProperty;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
|
||||
/**
|
||||
* CDI Producer for auction monitor services.
|
||||
* Creates and configures singleton instances of core services.
|
||||
*/
|
||||
@ApplicationScoped
|
||||
public class AuctionMonitorProducer {
|
||||
|
||||
private static final Logger LOG = Logger.getLogger(AuctionMonitorProducer.class);
|
||||
|
||||
@Produces
|
||||
@Singleton
|
||||
public DatabaseService produceDatabaseService(
|
||||
@ConfigProperty(name = "auction.database.path") String dbPath) throws SQLException {
|
||||
|
||||
LOG.infof("Initializing DatabaseService with path: %s", dbPath);
|
||||
DatabaseService db = new DatabaseService(dbPath);
|
||||
db.ensureSchema();
|
||||
return db;
|
||||
}
|
||||
|
||||
@Produces
|
||||
@Singleton
|
||||
public NotificationService produceNotificationService(
|
||||
@ConfigProperty(name = "auction.notification.config") String config) {
|
||||
|
||||
LOG.infof("Initializing NotificationService with config: %s", config);
|
||||
return new NotificationService(config, "");
|
||||
}
|
||||
|
||||
@Produces
|
||||
@Singleton
|
||||
public ObjectDetectionService produceObjectDetectionService(
|
||||
@ConfigProperty(name = "auction.yolo.config") String cfgPath,
|
||||
@ConfigProperty(name = "auction.yolo.weights") String weightsPath,
|
||||
@ConfigProperty(name = "auction.yolo.classes") String classesPath) throws IOException {
|
||||
|
||||
LOG.infof("Initializing ObjectDetectionService");
|
||||
return new ObjectDetectionService(cfgPath, weightsPath, classesPath);
|
||||
}
|
||||
|
||||
@Produces
|
||||
@Singleton
|
||||
public ImageProcessingService produceImageProcessingService(
|
||||
DatabaseService db,
|
||||
ObjectDetectionService detector) {
|
||||
|
||||
LOG.infof("Initializing ImageProcessingService");
|
||||
return new ImageProcessingService(db, detector);
|
||||
}
|
||||
}
|
||||
289
src/main/java/com/auction/AuctionMonitorResource.java
Normal file
289
src/main/java/com/auction/AuctionMonitorResource.java
Normal file
@@ -0,0 +1,289 @@
|
||||
package com.auction;
|
||||
|
||||
import jakarta.inject.Inject;
|
||||
import jakarta.ws.rs.*;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* REST API for Auction Monitor control and status.
|
||||
* Provides endpoints for:
|
||||
* - Status checking
|
||||
* - Manual workflow triggers
|
||||
* - Statistics
|
||||
*/
|
||||
@Path("/api/monitor")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
public class AuctionMonitorResource {
|
||||
|
||||
private static final Logger LOG = Logger.getLogger(AuctionMonitorResource.class);
|
||||
|
||||
@Inject
|
||||
DatabaseService db;
|
||||
|
||||
@Inject
|
||||
QuarkusWorkflowScheduler scheduler;
|
||||
|
||||
@Inject
|
||||
NotificationService notifier;
|
||||
|
||||
/**
|
||||
* GET /api/monitor/status
|
||||
* Returns current monitoring status
|
||||
*/
|
||||
@GET
|
||||
@Path("/status")
|
||||
public Response getStatus() {
|
||||
try {
|
||||
Map<String, Object> status = new HashMap<>();
|
||||
status.put("running", true);
|
||||
status.put("auctions", db.getAllAuctions().size());
|
||||
status.put("lots", db.getAllLots().size());
|
||||
status.put("images", db.getImageCount());
|
||||
|
||||
// Count closing soon
|
||||
int closingSoon = 0;
|
||||
for (var lot : db.getAllLots()) {
|
||||
if (lot.closingTime() != null && lot.minutesUntilClose() < 30) {
|
||||
closingSoon++;
|
||||
}
|
||||
}
|
||||
status.put("closingSoon", closingSoon);
|
||||
|
||||
return Response.ok(status).build();
|
||||
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to get status", e);
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
|
||||
.entity(Map.of("error", e.getMessage()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/monitor/statistics
|
||||
* Returns detailed statistics
|
||||
*/
|
||||
@GET
|
||||
@Path("/statistics")
|
||||
public Response getStatistics() {
|
||||
try {
|
||||
Map<String, Object> stats = new HashMap<>();
|
||||
|
||||
var auctions = db.getAllAuctions();
|
||||
var lots = db.getAllLots();
|
||||
|
||||
stats.put("totalAuctions", auctions.size());
|
||||
stats.put("totalLots", lots.size());
|
||||
stats.put("totalImages", db.getImageCount());
|
||||
|
||||
// Lot statistics
|
||||
int activeLots = 0;
|
||||
int lotsWithBids = 0;
|
||||
double totalBids = 0;
|
||||
|
||||
for (var lot : lots) {
|
||||
if (lot.closingTime() != null && lot.minutesUntilClose() > 0) {
|
||||
activeLots++;
|
||||
}
|
||||
if (lot.currentBid() > 0) {
|
||||
lotsWithBids++;
|
||||
totalBids += lot.currentBid();
|
||||
}
|
||||
}
|
||||
|
||||
stats.put("activeLots", activeLots);
|
||||
stats.put("lotsWithBids", lotsWithBids);
|
||||
stats.put("totalBidValue", String.format("€%.2f", totalBids));
|
||||
stats.put("averageBid", lotsWithBids > 0 ? String.format("€%.2f", totalBids / lotsWithBids) : "€0.00");
|
||||
|
||||
return Response.ok(stats).build();
|
||||
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to get statistics", e);
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
|
||||
.entity(Map.of("error", e.getMessage()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/monitor/trigger/scraper-import
|
||||
* Manually trigger scraper import workflow
|
||||
*/
|
||||
@POST
|
||||
@Path("/trigger/scraper-import")
|
||||
public Response triggerScraperImport() {
|
||||
try {
|
||||
scheduler.importScraperData();
|
||||
return Response.ok(Map.of("message", "Scraper import triggered successfully")).build();
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to trigger scraper import", e);
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
|
||||
.entity(Map.of("error", e.getMessage()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/monitor/trigger/image-processing
|
||||
* Manually trigger image processing workflow
|
||||
*/
|
||||
@POST
|
||||
@Path("/trigger/image-processing")
|
||||
public Response triggerImageProcessing() {
|
||||
try {
|
||||
scheduler.processImages();
|
||||
return Response.ok(Map.of("message", "Image processing triggered successfully")).build();
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to trigger image processing", e);
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
|
||||
.entity(Map.of("error", e.getMessage()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/monitor/trigger/bid-monitoring
|
||||
* Manually trigger bid monitoring workflow
|
||||
*/
|
||||
@POST
|
||||
@Path("/trigger/bid-monitoring")
|
||||
public Response triggerBidMonitoring() {
|
||||
try {
|
||||
scheduler.monitorBids();
|
||||
return Response.ok(Map.of("message", "Bid monitoring triggered successfully")).build();
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to trigger bid monitoring", e);
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
|
||||
.entity(Map.of("error", e.getMessage()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/monitor/trigger/closing-alerts
|
||||
* Manually trigger closing alerts workflow
|
||||
*/
|
||||
@POST
|
||||
@Path("/trigger/closing-alerts")
|
||||
public Response triggerClosingAlerts() {
|
||||
try {
|
||||
scheduler.checkClosingTimes();
|
||||
return Response.ok(Map.of("message", "Closing alerts triggered successfully")).build();
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to trigger closing alerts", e);
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
|
||||
.entity(Map.of("error", e.getMessage()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/monitor/auctions
|
||||
* Returns list of all auctions
|
||||
*/
|
||||
@GET
|
||||
@Path("/auctions")
|
||||
public Response getAuctions(@QueryParam("country") String country) {
|
||||
try {
|
||||
var auctions = country != null && !country.isEmpty()
|
||||
? db.getAuctionsByCountry(country)
|
||||
: db.getAllAuctions();
|
||||
|
||||
return Response.ok(auctions).build();
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to get auctions", e);
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
|
||||
.entity(Map.of("error", e.getMessage()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/monitor/lots
|
||||
* Returns list of active lots
|
||||
*/
|
||||
@GET
|
||||
@Path("/lots")
|
||||
public Response getActiveLots() {
|
||||
try {
|
||||
var lots = db.getActiveLots();
|
||||
return Response.ok(lots).build();
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to get lots", e);
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
|
||||
.entity(Map.of("error", e.getMessage()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/monitor/lots/closing-soon
|
||||
* Returns lots closing within specified minutes (default 30)
|
||||
*/
|
||||
@GET
|
||||
@Path("/lots/closing-soon")
|
||||
public Response getLotsClosingSoon(@QueryParam("minutes") @DefaultValue("30") int minutes) {
|
||||
try {
|
||||
var allLots = db.getActiveLots();
|
||||
var closingSoon = allLots.stream()
|
||||
.filter(lot -> lot.closingTime() != null && lot.minutesUntilClose() < minutes)
|
||||
.toList();
|
||||
|
||||
return Response.ok(closingSoon).build();
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to get closing lots", e);
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
|
||||
.entity(Map.of("error", e.getMessage()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/monitor/lots/{lotId}/images
|
||||
* Returns images for a specific lot
|
||||
*/
|
||||
@GET
|
||||
@Path("/lots/{lotId}/images")
|
||||
public Response getLotImages(@PathParam("lotId") int lotId) {
|
||||
try {
|
||||
var images = db.getImagesForLot(lotId);
|
||||
return Response.ok(images).build();
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to get lot images", e);
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
|
||||
.entity(Map.of("error", e.getMessage()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/monitor/test-notification
|
||||
* Send a test notification
|
||||
*/
|
||||
@POST
|
||||
@Path("/test-notification")
|
||||
public Response sendTestNotification(Map<String, String> request) {
|
||||
try {
|
||||
String message = request.getOrDefault("message", "Test notification from Auction Monitor");
|
||||
String title = request.getOrDefault("title", "Test Notification");
|
||||
int priority = Integer.parseInt(request.getOrDefault("priority", "0"));
|
||||
|
||||
notifier.sendNotification(message, title, priority);
|
||||
|
||||
return Response.ok(Map.of("message", "Test notification sent successfully")).build();
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to send test notification", e);
|
||||
return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
|
||||
.entity(Map.of("error", e.getMessage()))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
package com.auction;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.time.Instant;
|
||||
/**
|
||||
* SQLite-based caching system for HTML pages with expiration support
|
||||
*/
|
||||
class CacheDatabase {
|
||||
|
||||
private final String dbPath;
|
||||
private Connection connection;
|
||||
|
||||
public CacheDatabase(String dbPath) {
|
||||
this.dbPath = dbPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize database and create schema
|
||||
*/
|
||||
public void initialize() throws SQLException, IOException {
|
||||
// Create cache directory if it doesn't exist
|
||||
var cacheDir = Paths.get(dbPath).getParent();
|
||||
if (cacheDir != null) {
|
||||
Files.createDirectories(cacheDir);
|
||||
}
|
||||
|
||||
connection = DriverManager.getConnection("jdbc:sqlite:" + dbPath);
|
||||
|
||||
// Create cache table with URL as primary key
|
||||
var createTable = "CREATE TABLE IF NOT EXISTS page_cache (\n" +
|
||||
" url TEXT PRIMARY KEY,\n" +
|
||||
" html TEXT NOT NULL,\n" +
|
||||
" cached_at INTEGER NOT NULL,\n" +
|
||||
" expires_at INTEGER NOT NULL\n" +
|
||||
")\n";
|
||||
|
||||
try (var stmt = connection.createStatement()) {
|
||||
stmt.execute(createTable);
|
||||
// Create index on expires_at for efficient cleanup
|
||||
stmt.execute("CREATE INDEX IF NOT EXISTS idx_expires_at ON page_cache(expires_at)");
|
||||
}
|
||||
|
||||
// Clean up expired entries on initialization
|
||||
cleanupExpired();
|
||||
|
||||
System.out.println("✓ Cache database initialized");
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cached HTML for a URL if it exists and hasn't expired
|
||||
*
|
||||
* @param url The URL to look up
|
||||
* @return Cached HTML or null if not found/expired
|
||||
*/
|
||||
public synchronized String get(String url) {
|
||||
var sql = "SELECT html FROM page_cache WHERE url = ? AND expires_at > ?";
|
||||
|
||||
try (var ps = connection.prepareStatement(sql)) {
|
||||
ps.setString(1, url);
|
||||
ps.setLong(2, Instant.now().getEpochSecond());
|
||||
|
||||
var rs = ps.executeQuery();
|
||||
if (rs.next()) {
|
||||
return rs.getString("html");
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
System.err.println("Cache read error: " + e.getMessage());
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Store HTML in cache with expiration time
|
||||
*
|
||||
* @param url The URL to cache
|
||||
* @param html The HTML content
|
||||
* @param expirationHours Hours until cache expires
|
||||
*/
|
||||
public synchronized void put(String url, String html, long expirationHours) {
|
||||
var sql = "INSERT OR REPLACE INTO page_cache (url, html, cached_at, expires_at)\n" +
|
||||
"VALUES (?, ?, ?, ?)\n";
|
||||
|
||||
var now = Instant.now().getEpochSecond();
|
||||
var expiresAt = now + (expirationHours * 3600);
|
||||
|
||||
try (var ps = connection.prepareStatement(sql)) {
|
||||
ps.setString(1, url);
|
||||
ps.setString(2, html);
|
||||
ps.setLong(3, now);
|
||||
ps.setLong(4, expiresAt);
|
||||
ps.executeUpdate();
|
||||
} catch (SQLException e) {
|
||||
System.err.println("Cache write error: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove expired cache entries
|
||||
*/
|
||||
public synchronized void cleanupExpired() {
|
||||
var sql = "DELETE FROM page_cache WHERE expires_at <= ?";
|
||||
|
||||
try (var ps = connection.prepareStatement(sql)) {
|
||||
ps.setLong(1, Instant.now().getEpochSecond());
|
||||
var deleted = ps.executeUpdate();
|
||||
if (deleted > 0) {
|
||||
System.out.println("✓ Cleaned up " + deleted + " expired cache entries");
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
System.err.println("Cache cleanup error: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cache statistics
|
||||
*/
|
||||
public synchronized void printStats() {
|
||||
var sql = "SELECT COUNT(*) as total, " +
|
||||
"SUM(CASE WHEN expires_at > ? THEN 1 ELSE 0 END) as valid, " +
|
||||
"SUM(LENGTH(html)) as total_size " +
|
||||
"FROM page_cache";
|
||||
|
||||
try (var ps = connection.prepareStatement(sql)) {
|
||||
ps.setLong(1, Instant.now().getEpochSecond());
|
||||
var rs = ps.executeQuery();
|
||||
|
||||
if (rs.next()) {
|
||||
var total = rs.getInt("total");
|
||||
var valid = rs.getInt("valid");
|
||||
var size = rs.getLong("total_size");
|
||||
|
||||
System.out.println("\n=== Cache Statistics ===");
|
||||
System.out.println("Total entries: " + total);
|
||||
System.out.println("Valid entries: " + valid);
|
||||
System.out.println("Expired entries: " + (total - valid));
|
||||
System.out.println("Total size: " + (size / 1024) + " KB");
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
System.err.println("Cache stats error: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close database connection
|
||||
*/
|
||||
public void close() {
|
||||
if (connection != null) {
|
||||
try {
|
||||
connection.close();
|
||||
} catch (SQLException e) {
|
||||
System.err.println("Error closing cache database: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -197,7 +197,7 @@ public class Main {
|
||||
Console.println(" java -jar troostwijk-monitor.jar workflow");
|
||||
Console.println(" java -jar troostwijk-monitor.jar once");
|
||||
Console.println(" java -jar troostwijk-monitor.jar status");
|
||||
Console.println();
|
||||
IO.println();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
291
src/main/java/com/auction/QuarkusWorkflowScheduler.java
Normal file
291
src/main/java/com/auction/QuarkusWorkflowScheduler.java
Normal file
@@ -0,0 +1,291 @@
|
||||
package com.auction;
|
||||
|
||||
import io.quarkus.scheduler.Scheduled;
|
||||
import jakarta.enterprise.context.ApplicationScoped;
|
||||
import jakarta.inject.Inject;
|
||||
import javax.xml.crypto.KeySelector.Purpose;
|
||||
import org.eclipse.microprofile.config.inject.ConfigProperty;
|
||||
import org.jboss.logging.Logger;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Quarkus-based Workflow Scheduler using @Scheduled annotations.
|
||||
* Replaces the manual ScheduledExecutorService with Quarkus Scheduler.
|
||||
*
|
||||
* This class coordinates all scheduled workflows using Quarkus's built-in
|
||||
* scheduling capabilities with cron expressions.
|
||||
*/
|
||||
@ApplicationScoped
|
||||
public class QuarkusWorkflowScheduler {
|
||||
|
||||
private static final Logger LOG = Logger.getLogger(QuarkusWorkflowScheduler.class);
|
||||
|
||||
@Inject
|
||||
DatabaseService db;
|
||||
|
||||
@Inject
|
||||
NotificationService notifier;
|
||||
|
||||
@Inject
|
||||
ObjectDetectionService detector;
|
||||
|
||||
@Inject
|
||||
ImageProcessingService imageProcessor;
|
||||
|
||||
@ConfigProperty(name = "auction.database.path")
|
||||
String databasePath;
|
||||
|
||||
/**
|
||||
* Workflow 1: Import Scraper Data
|
||||
* Cron: Every 30 minutes (0 -/30 - - - ?)
|
||||
* Purpose: Import new auctions and lots from external scraper
|
||||
*/
|
||||
@Scheduled(cron = "{auction.workflow.scraper-import.cron}", identity = "scraper-import")
|
||||
void importScraperData() {
|
||||
try {
|
||||
LOG.info("📥 [WORKFLOW 1] Importing scraper data...");
|
||||
long start = System.currentTimeMillis();
|
||||
|
||||
// Import auctions
|
||||
var auctions = db.importAuctionsFromScraper();
|
||||
LOG.infof(" → Imported %d auctions", auctions.size());
|
||||
|
||||
// Import lots
|
||||
var lots = db.importLotsFromScraper();
|
||||
LOG.infof(" → Imported %d lots", lots.size());
|
||||
|
||||
// Import image URLs
|
||||
var images = db.getUnprocessedImagesFromScraper();
|
||||
LOG.infof(" → Found %d unprocessed images", images.size());
|
||||
|
||||
long duration = System.currentTimeMillis() - start;
|
||||
LOG.infof(" ✓ Scraper import completed in %dms", duration);
|
||||
|
||||
// Trigger notification if significant data imported
|
||||
if (auctions.size() > 0 || lots.size() > 10) {
|
||||
notifier.sendNotification(
|
||||
String.format("Imported %d auctions, %d lots", auctions.size(), lots.size()),
|
||||
"Data Import Complete",
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
LOG.errorf(e, " ❌ Scraper import failed: %s", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Workflow 2: Process Pending Images
|
||||
* Cron: Every 1 hour (0 0 * * * ?)
|
||||
* Purpose: Download images and run object detection
|
||||
*/
|
||||
@Scheduled(cron = "{auction.workflow.image-processing.cron}", identity = "image-processing")
|
||||
void processImages() {
|
||||
try {
|
||||
LOG.info("🖼️ [WORKFLOW 2] Processing pending images...");
|
||||
long start = System.currentTimeMillis();
|
||||
|
||||
// Get unprocessed images
|
||||
var unprocessedImages = db.getUnprocessedImagesFromScraper();
|
||||
|
||||
if (unprocessedImages.isEmpty()) {
|
||||
LOG.info(" → No pending images to process");
|
||||
return;
|
||||
}
|
||||
|
||||
LOG.infof(" → Processing %d images", unprocessedImages.size());
|
||||
|
||||
int processed = 0;
|
||||
int detected = 0;
|
||||
|
||||
for (var imageRecord : unprocessedImages) {
|
||||
try {
|
||||
// Download image
|
||||
String filePath = imageProcessor.downloadImage(
|
||||
imageRecord.url(),
|
||||
imageRecord.saleId(),
|
||||
imageRecord.lotId()
|
||||
);
|
||||
|
||||
if (filePath != null) {
|
||||
// Run object detection
|
||||
var labels = detector.detectObjects(filePath);
|
||||
|
||||
// Save to database
|
||||
db.insertImage(imageRecord.lotId(), imageRecord.url(),
|
||||
filePath, labels);
|
||||
|
||||
processed++;
|
||||
if (!labels.isEmpty()) {
|
||||
detected++;
|
||||
|
||||
// Send notification for interesting detections
|
||||
if (labels.size() >= 3) {
|
||||
notifier.sendNotification(
|
||||
String.format("Lot %d: Detected %s",
|
||||
imageRecord.lotId(),
|
||||
String.join(", ", labels)),
|
||||
"Objects Detected",
|
||||
0
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Rate limiting
|
||||
Thread.sleep(500);
|
||||
|
||||
} catch (Exception e) {
|
||||
LOG.warnf(" ⚠️ Failed to process image: %s", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
long duration = System.currentTimeMillis() - start;
|
||||
LOG.infof(" ✓ Processed %d images, detected objects in %d (%.1fs)",
|
||||
processed, detected, duration / 1000.0);
|
||||
|
||||
} catch (Exception e) {
|
||||
LOG.errorf(e, " ❌ Image processing failed: %s", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Workflow 3: Monitor Bids
|
||||
* Cron: Every 15 minutes (0 -/15 * * * ?)
|
||||
* Purpose: Check for bid changes and send notifications
|
||||
*/
|
||||
@Scheduled(cron = "{auction.workflow.bid-monitoring.cron}", identity = "bid-monitoring")
|
||||
void monitorBids() {
|
||||
try {
|
||||
LOG.info("💰 [WORKFLOW 3] Monitoring bids...");
|
||||
long start = System.currentTimeMillis();
|
||||
|
||||
var activeLots = db.getActiveLots();
|
||||
LOG.infof(" → Checking %d active lots", activeLots.size());
|
||||
|
||||
// Note: In production, this would call Troostwijk API
|
||||
// For now, we just track what's in the database
|
||||
// The external scraper updates bids, we just notify
|
||||
|
||||
long duration = System.currentTimeMillis() - start;
|
||||
LOG.infof(" ✓ Bid monitoring completed in %dms", duration);
|
||||
|
||||
} catch (Exception e) {
|
||||
LOG.errorf(e, " ❌ Bid monitoring failed: %s", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Workflow 4: Check Closing Times
|
||||
* Cron: Every 5 minutes (0 -/5 * * * ?)
|
||||
* Purpose: Send alerts for lots closing soon
|
||||
*/
|
||||
@Scheduled(cron = "{auction.workflow.closing-alerts.cron}", identity = "closing-alerts")
|
||||
void checkClosingTimes() {
|
||||
try {
|
||||
LOG.info("⏰ [WORKFLOW 4] Checking closing times...");
|
||||
long start = System.currentTimeMillis();
|
||||
|
||||
var activeLots = db.getActiveLots();
|
||||
int alertsSent = 0;
|
||||
|
||||
for (var lot : activeLots) {
|
||||
if (lot.closingTime() == null) continue;
|
||||
|
||||
long minutesLeft = lot.minutesUntilClose();
|
||||
|
||||
// Alert for lots closing in 5 minutes
|
||||
if (minutesLeft <= 5 && minutesLeft > 0 && !lot.closingNotified()) {
|
||||
String message = String.format("Kavel %d sluit binnen %d min.",
|
||||
lot.lotId(), minutesLeft);
|
||||
|
||||
notifier.sendNotification(message, "Lot Closing Soon", 1);
|
||||
|
||||
// Mark as notified
|
||||
var updated = new Lot(
|
||||
lot.saleId(), lot.lotId(), lot.title(), lot.description(),
|
||||
lot.manufacturer(), lot.type(), lot.year(), lot.category(),
|
||||
lot.currentBid(), lot.currency(), lot.url(),
|
||||
lot.closingTime(), true
|
||||
);
|
||||
db.updateLotNotificationFlags(updated);
|
||||
|
||||
alertsSent++;
|
||||
}
|
||||
}
|
||||
|
||||
long duration = System.currentTimeMillis() - start;
|
||||
LOG.infof(" → Sent %d closing alerts in %dms", alertsSent, duration);
|
||||
|
||||
} catch (Exception e) {
|
||||
LOG.errorf(e, " ❌ Closing alerts failed: %s", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Event-driven trigger: New auction discovered
|
||||
*/
|
||||
public void onNewAuctionDiscovered(AuctionInfo auction) {
|
||||
LOG.infof("📣 EVENT: New auction discovered - %s", auction.title());
|
||||
|
||||
try {
|
||||
db.upsertAuction(auction);
|
||||
|
||||
notifier.sendNotification(
|
||||
String.format("New auction: %s\nLocation: %s\nLots: %d",
|
||||
auction.title(), auction.location(), auction.lotCount()),
|
||||
"New Auction Discovered",
|
||||
0
|
||||
);
|
||||
|
||||
} catch (Exception e) {
|
||||
LOG.errorf(e, " ❌ Failed to handle new auction: %s", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Event-driven trigger: Bid change detected
|
||||
*/
|
||||
public void onBidChange(Lot lot, double previousBid, double newBid) {
|
||||
LOG.infof("📣 EVENT: Bid change on lot %d (€%.2f → €%.2f)",
|
||||
lot.lotId(), previousBid, newBid);
|
||||
|
||||
try {
|
||||
db.updateLotCurrentBid(lot);
|
||||
|
||||
notifier.sendNotification(
|
||||
String.format("Nieuw bod op kavel %d: €%.2f (was €%.2f)",
|
||||
lot.lotId(), newBid, previousBid),
|
||||
"Kavel Bieding Update",
|
||||
0
|
||||
);
|
||||
|
||||
} catch (Exception e) {
|
||||
LOG.errorf(e, " ❌ Failed to handle bid change: %s", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Event-driven trigger: Objects detected in image
|
||||
*/
|
||||
public void onObjectsDetected(int lotId, List<String> labels) {
|
||||
LOG.infof("📣 EVENT: Objects detected in lot %d - %s",
|
||||
lotId, String.join(", ", labels));
|
||||
|
||||
try {
|
||||
if (labels.size() >= 2) {
|
||||
notifier.sendNotification(
|
||||
String.format("Lot %d contains: %s", lotId, String.join(", ", labels)),
|
||||
"Objects Detected",
|
||||
0
|
||||
);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOG.errorf(e, " ❌ Failed to send detection notification: %s", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -417,7 +417,7 @@ public class WorkflowOrchestrator {
|
||||
Console.println(" ⚠️ Could not retrieve status: " + e.getMessage());
|
||||
}
|
||||
|
||||
Console.println();
|
||||
IO.println();
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -28,3 +28,24 @@ quarkus.log.console.level=INFO
|
||||
quarkus.http.enable-compression=true
|
||||
quarkus.rest.path=/api
|
||||
quarkus.http.root-path=/
|
||||
|
||||
# Auction Monitor Configuration
|
||||
auction.database.path=C:\\mnt\\okcomputer\\output\\cache.db
|
||||
auction.images.path=C:\\mnt\\okcomputer\\output\\images
|
||||
auction.notification.config=desktop
|
||||
auction.yolo.config=models/yolov4.cfg
|
||||
auction.yolo.weights=models/yolov4.weights
|
||||
auction.yolo.classes=models/coco.names
|
||||
|
||||
# Scheduler Configuration
|
||||
quarkus.scheduler.enabled=true
|
||||
quarkus.scheduler.start-halted=false
|
||||
|
||||
# Workflow Schedules
|
||||
auction.workflow.scraper-import.cron=0 */30 * * * ?
|
||||
auction.workflow.image-processing.cron=0 0 * * * ?
|
||||
auction.workflow.bid-monitoring.cron=0 */15 * * * ?
|
||||
auction.workflow.closing-alerts.cron=0 */5 * * * ?
|
||||
|
||||
# Health Check Configuration
|
||||
quarkus.smallrye-health.root-path=/health
|
||||
|
||||
Reference in New Issue
Block a user