Skip to content

Commit

Permalink
SRVLOGIC-472: Job service job status change events are not sent in order
Browse files Browse the repository at this point in the history
(cherry picked from commit a829621)
  • Loading branch information
wmedvede committed Nov 26, 2024
1 parent 34f0baa commit 1a65e30
Show file tree
Hide file tree
Showing 6 changed files with 57 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -65,23 +65,27 @@ public boolean isEnabled() {
public void jobStatusChange(JobDetails job) {
if (isEnabled()) {
try {
JobDataEvent event = JobDataEvent
.builder()
.source(url + RestApiConstants.JOBS_PATH)
.data(ScheduledJobAdapter.of(job))//this should support jobs crated with V1 and V2
.build();
JobDataEvent event = buildEvent(job);
LOGGER.debug("emit jobStatusChange, hasRequests: {}, eventId: {}, jobDetails: {}", emitter.hasRequests(), event.getId(), job);
String json = objectMapper.writeValueAsString(event);
emitter.send(decorate(ContextAwareMessage.of(json)
.withAck(() -> onAck(event.getId(), job))
.withNack(reason -> onNack(reason, job))));
.withNack(reason -> onNack(reason, job)), event));
} catch (Exception e) {
String msg = String.format("An unexpected error was produced while processing a Job status change for the job: %s", job);
LOGGER.error(msg, e);
}
}
}

protected JobDataEvent buildEvent(JobDetails job) {
return JobDataEvent
.builder()
.source(url + RestApiConstants.JOBS_PATH)
.data(ScheduledJobAdapter.of(job))//this should support jobs crated with V1 and V2
.build();
}

protected CompletionStage<Void> onAck(String eventId, JobDetails job) {
LOGGER.debug("Job Status change emitted successfully, eventId: {}, jobDetails: {}", eventId, job);
return CompletableFuture.completedFuture(null);
Expand All @@ -93,7 +97,7 @@ protected CompletionStage<Void> onNack(Throwable reason, JobDetails job) {
return CompletableFuture.completedFuture(null);
}

protected Message<String> decorate(Message<String> message) {
protected Message<String> decorate(Message<String> message, JobDataEvent event) {
return message;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ public abstract class AbstractJobStreamsTest<T extends AbstractJobStreams> {
protected static final String URL = "http://localhost:8180";
private static final String SERIALIZED_MESSAGE = "SERIALIZED_MESSAGE";

private static final String JOB_ID = "JOB_ID";
protected static final String JOB_ID = "JOB_ID";
private static final String CORRELATION_ID = "CORRELATION_ID";
private static final JobStatus STATUS = JobStatus.SCHEDULED;
private static final ZonedDateTime LAST_UPDATE = ZonedDateTime.parse("2022-08-03T18:00:15.001+01:00");
Expand Down Expand Up @@ -170,7 +170,7 @@ private JobDetails mockJobDetails() {

}

private void assertExpectedEvent(JobDataEvent event) {
protected void assertExpectedEvent(JobDataEvent event) {
assertThat(event.getId()).isNotNull();
assertThat(event.getType()).isEqualTo(JobDataEvent.JOB_EVENT_TYPE);
assertThat(event.getSource()).hasToString(URL + "/jobs");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.eclipse.microprofile.reactive.messaging.Emitter;
import org.eclipse.microprofile.reactive.messaging.Message;
import org.eclipse.microprofile.reactive.messaging.OnOverflow;
import org.kie.kogito.jobs.service.events.JobDataEvent;
import org.kie.kogito.jobs.service.model.JobDetails;
import org.kie.kogito.jobs.service.stream.AbstractJobStreams;
import org.slf4j.Logger;
Expand All @@ -45,6 +46,7 @@ public class HttpJobStreams extends AbstractJobStreams {

public static final String PUBLISH_EVENTS_CONFIG_KEY = "kogito.jobs-service.http.job-status-change-events";
public static final String JOB_STATUS_CHANGE_EVENTS_HTTP = "kogito-job-service-job-status-events-http";
public static final String PARTITION_KEY_EXTENSION = "partitionkey";

private static final Logger LOGGER = LoggerFactory.getLogger(HttpJobStreams.class);

Expand All @@ -70,7 +72,16 @@ public void jobStatusChange(JobDetails job) {
}

@Override
protected Message<String> decorate(Message<String> message) {
protected JobDataEvent buildEvent(JobDetails job) {
JobDataEvent event = super.buildEvent(job);
// use the well-known extension https://github.com/cloudevents/spec/blob/main/cloudevents/extensions/partitioning.md
// to instruct potential http driven Brokers like, Knative Eventing Kafka Broker, to process accordingly.
event.addExtensionAttribute(PARTITION_KEY_EXTENSION, event.getData().getId());
return event;
}

@Override
protected Message<String> decorate(Message<String> message, JobDataEvent event) {
return message.addMetadata(OUTGOING_HTTP_METADATA.get());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.util.Optional;

import org.eclipse.microprofile.reactive.messaging.Message;
import org.kie.kogito.jobs.service.events.JobDataEvent;
import org.kie.kogito.jobs.service.stream.AbstractJobStreamsTest;

import io.cloudevents.jackson.JsonFormat;
Expand All @@ -29,6 +30,7 @@
import jakarta.ws.rs.core.HttpHeaders;

import static org.assertj.core.api.Assertions.assertThat;
import static org.kie.kogito.jobs.service.messaging.http.stream.HttpJobStreams.PARTITION_KEY_EXTENSION;

class HttpJobStreamsTest extends AbstractJobStreamsTest<HttpJobStreams> {

Expand All @@ -44,4 +46,12 @@ protected void assertExpectedMetadata(Message<String> message) {
assertThat(metadata.getHeaders()).hasSize(1);
assertThat(metadata.getHeaders().get(HttpHeaders.CONTENT_TYPE)).containsExactlyInAnyOrder(JsonFormat.CONTENT_TYPE);
}

@Override
protected void assertExpectedEvent(JobDataEvent event) {
super.assertExpectedEvent(event);
assertThat(event.getExtension(PARTITION_KEY_EXTENSION))
.isNotNull()
.isEqualTo(JOB_ID);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@
import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.eclipse.microprofile.reactive.messaging.Channel;
import org.eclipse.microprofile.reactive.messaging.Emitter;
import org.eclipse.microprofile.reactive.messaging.Message;
import org.eclipse.microprofile.reactive.messaging.OnOverflow;
import org.kie.kogito.jobs.service.events.JobDataEvent;
import org.kie.kogito.jobs.service.model.JobDetails;
import org.kie.kogito.jobs.service.stream.AbstractJobStreams;
import org.kie.kogito.jobs.service.stream.AvailableStreams;
Expand All @@ -32,6 +34,8 @@

import com.fasterxml.jackson.databind.ObjectMapper;

import io.smallrye.reactive.messaging.kafka.api.OutgoingKafkaRecordMetadata;

import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;

Expand All @@ -54,4 +58,10 @@ public void jobStatusChange(JobDetails job) {
LOGGER.debug("jobStatusChange call received, enabled: {}, job: {}", enabled, job);
super.jobStatusChange(job);
}

@Override
protected Message<String> decorate(Message<String> message, JobDataEvent event) {
// regular kafka partitioning.
return message.addMetadata(OutgoingKafkaRecordMetadata.builder().withKey(event.getData().getId()).build());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,24 @@

import java.util.Optional;

import org.eclipse.microprofile.reactive.messaging.Message;
import org.kie.kogito.jobs.service.stream.AbstractJobStreamsTest;

import io.smallrye.reactive.messaging.kafka.api.OutgoingKafkaRecordMetadata;

import static org.assertj.core.api.Assertions.assertThat;

class KafkaJobStreamsTest extends AbstractJobStreamsTest<KafkaJobStreams> {

@Override
protected KafkaJobStreams createJobStreams() {
return new KafkaJobStreams(objectMapper, Optional.of(true), emitter, URL);
}

@Override
protected void assertExpectedMetadata(Message<String> message) {
OutgoingKafkaRecordMetadata<?> metadata = message.getMetadata(OutgoingKafkaRecordMetadata.class).orElse(null);
assertThat(metadata).isNotNull();
assertThat(metadata.getKey()).isEqualTo(JOB_ID);
}
}

0 comments on commit 1a65e30

Please sign in to comment.