From 04d4db5f33a36d83fc53eb3d01e72ce35542caa8 Mon Sep 17 00:00:00 2001 From: George Raduta Date: Tue, 7 Oct 2025 18:07:34 +0200 Subject: [PATCH 01/16] Add Kafka producer interface to be used --- .../dip/kafka/KafkaProducerInterface.java | 64 +++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 src/alice/dip/kafka/KafkaProducerInterface.java diff --git a/src/alice/dip/kafka/KafkaProducerInterface.java b/src/alice/dip/kafka/KafkaProducerInterface.java new file mode 100644 index 0000000..d79d3a7 --- /dev/null +++ b/src/alice/dip/kafka/KafkaProducerInterface.java @@ -0,0 +1,64 @@ +/** + * @license + * Copyright CERN and copyright holders of ALICE O2. This software is + * distributed under the terms of the GNU General Public License v3 (GPL + * Version 3), copied verbatim in the file "COPYING". + * + * See http://alice-o2.web.cern.ch/license for full licensing information. + * + * In applying this license CERN does not waive the privileges and immunities + * granted to it by virtue of its status as an Intergovernmental Organization + * or submit itself to any jurisdiction. + */ + +package alice.dip.kafka; + +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.Serializer; + +import java.util.Properties; + +/** + * Generic Kafka Producer interface to send messages to a specified topic. + * @param - Type of the message key (to be used for partitioning) + * @param - Type of the message value (payload) + */ +public class KafkaProducerInterface implements AutoCloseable { + private final KafkaProducer producer; + private final String topic; + + /** + * Constructor to create a KafkaProducerInterface + * @param bootstrapServers - Kafka bootstrap servers connection string in format of host:port + * @param topic - Kafka topic to which messages will be sent + * @param keySerializer - Kafka supported serializer for the message key + * @param valueSerializer - Kafka supported serializer for the message value + */ + public KafkaProducerInterface(String bootstrapServers, String topic, Serializer keySerializer, Serializer valueSerializer) { + this.topic = topic; + Properties props = new Properties(); + props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + props.setProperty(ProducerConfig.ACKS_CONFIG, "all"); + this.producer = new KafkaProducer<>(props, keySerializer, valueSerializer); + } + + /** + * Send a message to the configured Kafka topic + * @param key - message key for partitioning + * @param value - message value (payload) + */ + public void send(K key, V value) { + ProducerRecord record = new ProducerRecord<>(topic, key, value); + producer.send(record); + } + + /** + * Method to close the Kafka producer instance + */ + @Override + public void close() { + producer.close(); + } +} From 9db930cd81b89a0ee2761549cb697423ee030ef0 Mon Sep 17 00:00:00 2001 From: George Raduta Date: Tue, 7 Oct 2025 18:07:52 +0200 Subject: [PATCH 02/16] Add producer dedicated to beam mode change tracks --- .../kafka/BeamModeEventsKafkaProducer.java | 64 +++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 src/alice/dip/kafka/BeamModeEventsKafkaProducer.java diff --git a/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java b/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java new file mode 100644 index 0000000..5e486d4 --- /dev/null +++ b/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java @@ -0,0 +1,64 @@ +/** + * @license + * Copyright CERN and copyright holders of ALICE O2. This software is + * distributed under the terms of the GNU General Public License v3 (GPL + * Version 3), copied verbatim in the file "COPYING". + * + * See http://alice-o2.web.cern.ch/license for full licensing information. + * + * In applying this license CERN does not waive the privileges and immunities + * granted to it by virtue of its status as an Intergovernmental Organization + * or submit itself to any jurisdiction. + */ + +package alice.dip.kafka; + +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.apache.kafka.common.serialization.IntegerSerializer; + +import alice.dip.AliDip2BK; +import alice.dip.LhcInfoObj; +import alice.dip.kafka.events.Events; + +/** + * Kafka producer for LHC Beam Mode events, serialized using Protocol Buffers. + */ +public class BeamModeEventsKafkaProducer extends KafkaProducerInterface { + public static String KAFKA_PRODUCER_TOPIC_DIP = "dip.lhc.beam_mode"; + + /** + * Constructor to create a BeamModeEventsKafkaProducer + * @param bootstrapServers - Kafka bootstrap servers connection string in format of host:port + */ + public BeamModeEventsKafkaProducer(String bootstrapServers) { + super(bootstrapServers, KAFKA_PRODUCER_TOPIC_DIP, new IntegerSerializer(), new ByteArraySerializer()); + AliDip2BK.log(2, "BeamModeEventsKafkaProducer", "Initialized producer for topic: " + KAFKA_PRODUCER_TOPIC_DIP); + } + + /** + * Given a fill number for partitioning, a LhcInfoObj containing fill information, + * and a timestamp, creates and sends a proto serialized Beam Mode Event to the Kafka topic. + * @param fillNumber - fill number to be used for partition to ensure ordering + * @param fill - LhcInfoObj containing fill information + * @param timestamp - event timestamp at which the beam mode change event was received from DIP + */ + public void sendEvent(Integer fillNumber, LhcInfoObj fill, long timestamp) { + Events.BeamInfo beamInfo = Events.BeamInfo.newBuilder() + .setStableBeamsStart(fill.getStableBeamStart()) + .setStableBeamsEnd(fill.getStableBeamStop()) + .setFillNumber(fill.fillNo) + .setFillingSchemeName(fill.LHCFillingSchemeName) + .setBeamType(Events.BeamType.valueOf(fill.beamType)) + .build(); + + Events.Ev_BeamModeEvent event = Events.Ev_BeamModeEvent.newBuilder() + .setBeamMode(fill.getBeamMode()) + .setTimestamp(timestamp) + .setBeamInfo(beamInfo) + .build(); + byte[] value = event.toByteArray(); + + send(fillNumber, value); + AliDip2BK.log(2, "BeamModeEventsKafkaProducer", "Sent Beam Mode event for fill " + fill.fillNo + " with mode " + fill.getBeamMode() + " at timestamp " + timestamp); + } +} From 8242631c4e609ef39e92395c31c8a6b5ba77b873 Mon Sep 17 00:00:00 2001 From: George Raduta Date: Tue, 7 Oct 2025 18:08:12 +0200 Subject: [PATCH 03/16] Use the newly added beam mode event producer --- src/alice/dip/AliDip2BK.java | 24 +++++++++++++----- src/alice/dip/DipMessagesProcessor.java | 33 ++++++++++++++++++++++--- 2 files changed, 47 insertions(+), 10 deletions(-) diff --git a/src/alice/dip/AliDip2BK.java b/src/alice/dip/AliDip2BK.java index ba7073b..a0675e8 100644 --- a/src/alice/dip/AliDip2BK.java +++ b/src/alice/dip/AliDip2BK.java @@ -1,10 +1,14 @@ -/************* - * cil - **************/ - -/* - * Main Class +/** + * @license + * Copyright CERN and copyright holders of ALICE O2. This software is + * distributed under the terms of the GNU General Public License v3 (GPL + * Version 3), copied verbatim in the file "COPYING". + * + * See http://alice-o2.web.cern.ch/license for full licensing information. * + * In applying this license CERN does not waive the privileges and immunities + * granted to it by virtue of its status as an Intergovernmental Organization + * or submit itself to any jurisdiction. */ package alice.dip; @@ -18,6 +22,8 @@ import java.util.Date; import java.util.Properties; +import alice.dip.kafka.BeamModeEventsKafkaProducer; + public class AliDip2BK implements Runnable { public static String Version = "2.1.2 22-Jul-2025"; public static String DNSnode = "dipnsdev.cern.ch"; @@ -52,6 +58,7 @@ public class AliDip2BK implements Runnable { BookkeepingClient bookkeepingClient; StartOfRunKafkaConsumer kcs; EndOfRunKafkaConsumer kce; + BeamModeEventsKafkaProducer beamModeEventsKafkaProducer; public AliDip2BK() { startDate = (new Date()).getTime(); @@ -83,6 +90,9 @@ public AliDip2BK() { kce = new EndOfRunKafkaConsumer(dipMessagesProcessor); + beamModeEventsKafkaProducer = new BeamModeEventsKafkaProducer(AliDip2BK.bootstrapServers); + dipMessagesProcessor.setEventsProducer(beamModeEventsKafkaProducer); + shutdownProc(); Thread t = new Thread(this); @@ -145,6 +155,8 @@ public void run() { } dipMessagesProcessor.saveState(); writeStat("AliDip2BK.stat", true); + beamModeEventsKafkaProducer.close(); + log(4, "AliDip2BK", "Beam Mode Events Kafka Producer closed"); } }); } diff --git a/src/alice/dip/DipMessagesProcessor.java b/src/alice/dip/DipMessagesProcessor.java index 37c827e..4fe346b 100644 --- a/src/alice/dip/DipMessagesProcessor.java +++ b/src/alice/dip/DipMessagesProcessor.java @@ -1,6 +1,15 @@ -/************* - * cil - **************/ +/** + * @license + * Copyright CERN and copyright holders of ALICE O2. This software is + * distributed under the terms of the GNU General Public License v3 (GPL + * Version 3), copied verbatim in the file "COPYING". + * + * See http://alice-o2.web.cern.ch/license for full licensing information. + * + * In applying this license CERN does not waive the privileges and immunities + * granted to it by virtue of its status as an Intergovernmental Organization + * or submit itself to any jurisdiction. + */ package alice.dip; @@ -19,6 +28,7 @@ import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; +import alice.dip.kafka.BeamModeEventsKafkaProducer; import cern.dip.BadParameter; import cern.dip.DipData; import cern.dip.DipTimestamp; @@ -46,12 +56,13 @@ public class DipMessagesProcessor implements Runnable { private BlockingQueue outputQueue = new ArrayBlockingQueue(100); private final LuminosityManager luminosityManager; + private BeamModeEventsKafkaProducer beamModeEventsKafkaProducer; public DipMessagesProcessor(BookkeepingClient bookkeepingClient, LuminosityManager luminosityManager) { this.bookkeepingClient = bookkeepingClient; this.luminosityManager = luminosityManager; - + this.beamModeEventsKafkaProducer = null; Thread t = new Thread(this); t.start(); @@ -59,6 +70,14 @@ public DipMessagesProcessor(BookkeepingClient bookkeepingClient, LuminosityManag loadState(); } + /** + * Setter of events producer + * @param beamModeEventsKafkaProducer - instance of BeamModeEventsKafkaProducer to be used to send events + */ + public void setEventsProducer(BeamModeEventsKafkaProducer beamModeEventsKafkaProducer) { + this.beamModeEventsKafkaProducer = beamModeEventsKafkaProducer; + } + /* * This method is used for receiving DipData messages from the Dip Client */ @@ -399,6 +418,9 @@ public void newSafeMode(long time, int val) { } else { currentFill.setBeamMode(time, "LOST BEAMS"); + if (beamModeEventsKafkaProducer != null) { + beamModeEventsKafkaProducer.sendEvent(currentFill.fillNo, currentFill, time); + } AliDip2BK.log(5, "ProcData.newSafeBeams", " CHANGE BEAM MODE TO LOST BEAMS !!! "); } @@ -580,6 +602,9 @@ public void newBeamMode(long date, String BeamMode) { ); bookkeepingClient.updateLhcFill(currentFill); saveState(); + if (beamModeEventsKafkaProducer != null) { + beamModeEventsKafkaProducer.sendEvent(currentFill.fillNo, currentFill, date); + } } else { currentFill.endedTime = date; bookkeepingClient.updateLhcFill(currentFill); From 3d94def5c382f3f2b1080d645f191070d51df8c3 Mon Sep 17 00:00:00 2001 From: George Raduta Date: Tue, 7 Oct 2025 18:09:08 +0200 Subject: [PATCH 04/16] Add proto files used by FLP suite --- src/alice/dip/protos/events.proto | 159 +++++++++++++++++++++++ src/alice/dip/protos/protos/common.proto | 93 +++++++++++++ 2 files changed, 252 insertions(+) create mode 100644 src/alice/dip/protos/events.proto create mode 100644 src/alice/dip/protos/protos/common.proto diff --git a/src/alice/dip/protos/events.proto b/src/alice/dip/protos/events.proto new file mode 100644 index 0000000..a40ecb6 --- /dev/null +++ b/src/alice/dip/protos/events.proto @@ -0,0 +1,159 @@ +/* + * === This file is part of ALICE O² === + * + * Copyright 2024 CERN and copyright holders of ALICE O². + * Author: Teo Mrnjavac + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + * + * In applying this license CERN does not waive the privileges and + * immunities granted to it by virtue of its status as an + * Intergovernmental Organization or submit itself to any jurisdiction. + */ + +syntax = "proto3"; + +package events; +option java_package = "ch.cern.alice.o2.control.events"; +option go_package = "github.com/AliceO2Group/Control/common/protos;pb"; + +import public "protos/common.proto"; + +//////////////// Common event messages /////////////// + +enum OpStatus { + NULL = 0; + STARTED = 1; + ONGOING = 2; + DONE_OK = 3; + DONE_ERROR = 4; + DONE_TIMEOUT = 5; +} + +message Ev_MetaEvent_MesosHeartbeat { +} + +message Ev_MetaEvent_CoreStart { + string frameworkId = 1; +} + +message Ev_MetaEvent_FrameworkEvent { + string frameworkId = 1; + string message = 2; +} + +message Ev_EnvironmentEvent { + string environmentId = 1; + string state = 2; + uint32 runNumber = 3; // only when the environment is in the running state + string error = 4; + string message = 5; // any additional message concerning the current state or transition + string transition = 6; + string transitionStep = 7; + OpStatus transitionStatus = 8; + map vars = 9; // consolidated environment variables at the root role of the environment + common.User lastRequestUser = 10; + common.WorkflowTemplateInfo workflowTemplateInfo = 11; +} + +message Traits { + string trigger = 1; + string await = 2; + string timeout = 3; + bool critical = 4; +} + +message Ev_TaskEvent { + string name = 1; // task name, based on the name of the task class + string taskid = 2; // task id, unique + string state = 3; // state machine state for this task + string status = 4; // posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. + string hostname = 5; + string className = 6; // name of the task class from which this task was spawned + Traits traits = 7; + string environmentId = 8; + string path = 9; // path to the parent taskRole of this task within the environment +} + +message Ev_CallEvent { + string func = 1; // name of the function being called, within the workflow template context + OpStatus callStatus = 2; // progress or success/failure state of the call + string return = 3; // return value of the function + Traits traits = 4; + string output = 5; // any additional output of the function + string error = 6; // error value, if returned + string environmentId = 7; + string path = 8; // path to the parent callRole of this call within the environment +} + +message Ev_RoleEvent { + string name = 1; // role name + string status = 2; // posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles + string state = 3; // state machine state for this role + string rolePath = 4; // path to this role within the environment + string environmentId = 5; +} + +message Ev_IntegratedServiceEvent { + string name = 1; // name of the context, usually the path of the callRole that calls a given integrated service function e.g. readout-dataflow.dd-scheduler.terminate + string error = 2; // error message, if any + string operationName = 3; // name of the operation, usually the name of the integrated service function being called e.g. ddsched.PartitionTerminate()" + OpStatus operationStatus = 4; // progress or success/failure state of the operation + string operationStep = 5; // if the operation has substeps, this is the name of the current substep, like an API call or polling phase + OpStatus operationStepStatus = 6; // progress or success/failure state of the current substep + string environmentId = 7; + string payload = 8; // any additional payload, depending on the integrated service; there is no schema, it can even be the raw return structure of a remote API call +} + +message Ev_RunEvent { + string environmentId = 1; + uint32 runNumber = 2; + string state = 3; + string error = 4; + string transition = 5; + OpStatus transitionStatus = 6; + reserved 7; // 7 was used for `vars` field that was removed + common.User lastRequestUser = 8; +} + +/** + * Beam mode changes are propagated as Kafka events and to be sent by the BKP-LHC-Client on a dedicated topic + * e.g. dip.lhc.beam_mode + */ +message Ev_BeamModeEvent { + int64 timestamp = 1; // milliseconds since epoch when the beam mode change happened + common.BeamInfo beamInfo = 2; +} + +message Event { + int64 timestamp = 1; + int64 timestampNano = 2; + reserved 3 to 10; + reserved 17 to 100; + + oneof Payload { + Ev_EnvironmentEvent environmentEvent = 11; + Ev_TaskEvent taskEvent = 12; + Ev_RoleEvent roleEvent = 13; + Ev_CallEvent callEvent = 14; + Ev_IntegratedServiceEvent integratedServiceEvent = 15; + Ev_RunEvent runEvent = 16; + + Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + Ev_MetaEvent_CoreStart coreStartEvent = 103; + + Ev_BeamModeEvent beamModeEvent = 110; + } +} diff --git a/src/alice/dip/protos/protos/common.proto b/src/alice/dip/protos/protos/common.proto new file mode 100644 index 0000000..f1e7787 --- /dev/null +++ b/src/alice/dip/protos/protos/common.proto @@ -0,0 +1,93 @@ +/* + * === This file is part of ALICE O² === + * + * Copyright 2024 CERN and copyright holders of ALICE O². + * Author: Teo Mrnjavac + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + * + * In applying this license CERN does not waive the privileges and + * immunities granted to it by virtue of its status as an + * Intergovernmental Organization or submit itself to any jurisdiction. + */ + + +syntax = "proto3"; + +package common; +option java_package = "ch.cern.alice.o2.control.common"; +option go_package = "github.com/AliceO2Group/Control/common/protos;pb"; + +//////////////// Common types /////////////// + +message User { + // The unique CERN identifier of this user. + optional int32 externalId = 1; + // The unique identifier of this entity. + optional int32 id = 2; + // Name of the user. + string name = 3; +} + +message WorkflowTemplateInfo { + string name = 1; + string description = 2; + string path = 3; + bool public = 4; // whether the environment is public or not +} + +/** + * Beam information at a specific point in time (e.g. start or end of stable beams) + */ +message BeamInfo { + int64 stableBeamsStart = 1; // milliseconds since epoch when stable beams started + int64 stableBeamsEnd = 2; // milliseconds since epoch when stable beams ended + int32 fillNumber = 3; // LHC fill number + string fillingSchemeName = 4; // LHC filling scheme name e.g. 25ns_2460b_2448_2089_2227_144bpi_20inj + float beam1Energy = 5; // in GeV + float beam2Energy = 6; // in GeV + string beamType = 7; // e.g. PROTON-PROTON, O8-O8, Pb-Pb, p-Pb, Pb-p + BeamMode beamMode = 8; +} + +/** + * Beam modes as defined and sent by LHC DIP client plus: + * * virtual type LOST_BEAMS - that is generated when beam 1 and beam 2 energy values are not equal anymore as per LHC DIP track: dip/acc/LHC/RunControl/SafeBeam + * * virtual type UNKNOWN - that is generated when there is no beam in the machine or value not added by the BKP-LHC Client + * Source of Beam Modes: https://lhc-commissioning.web.cern.ch/systems/data-exchange/doc/LHC-OP-ES-0005-10-00.pdf + */ +enum BeamMode { + UNKNOWN = 0; // virtual type + SETUP = 1; + ABORT = 2; + INJECTION_PROBE_BEAM = 3; + INJECTION_SETUP_BEAM = 4; + INJECTION_PHYSICS_BEAM = 5; + PREPARE_RAMP = 6; + RAMP = 7; + FLAT_TOP = 8; + SQUEEZE = 9; + ADJUST = 10; + STABLE_BEAMS = 11; + LOST_BEAMS = 12; // virtual type + UNSTABLE_BEAMS = 13; + BEAM_DUMP_WARNING = 14; + BEAM_DUMP = 15; + RAMP_DOWN = 16; + CYCLING = 17; + RECOVERY = 18; + INJECT_AND_DUMP = 19; + CIRCULATE_AND_DUMP = 20; + NO_BEAM = 21; +} From 6623a2cc3cb41d5e78ec1189164355fe3a8ef832 Mon Sep 17 00:00:00 2001 From: George Raduta Date: Tue, 7 Oct 2025 18:09:19 +0200 Subject: [PATCH 05/16] Update readme --- README.md | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index 7463d1d..f565023 100644 --- a/README.md +++ b/README.md @@ -1,26 +1,26 @@ -# AliDip2BK +# BKP-LHC Client Initial Repository based on work from @iclegrand in repository: https://github.com/iclegrand/AliDip2BK -Collect selected Info from the CERN DIP system (LHC & ALICE -DCS) and publish them into the Bookkeeping/InfoLogger systems +The BKP-LHC Client is a java based application which uses the CERN DIP `jar` dependency to consume events from desired tracks. These events are then either: +- published on O2 Kafka Topics to be consumed further by O2 applications (e.g. ECS) +- updates the O2 Bookkeeping application via their HTTP endpoints. A detailed description for this project is provided by Roberto in this document: https://codimd.web.cern.ch/G0TSXqA1R8iPqWw2w2wuew - -This program requires java 11 on a 64 bit system -(this is a constrain from the DIP library) - -To test the java version run +### Requirements +- java 11 on a 64 bit system (this is a constrain from the DIP library) +- to test the java version run +``` java -version +``` -The run configuration is defined in the AliDip2BK.properties file. - -To run the program : - -sh runAliDip2BK.sh +### Configuration +The run configuration is defined in the `AliDip2BK.properties` file. -When the the program is stopped, it enters into the shutdown mode and it will -unsubscribe to the DIP data providers will wait to process the DipData queue -and saves the state of the fills and runs. +### Published Events +Currently the BKP-LHC-Client publishes on Kafka (topic: "dip.lhc.beam_mode") events for the start and end of stable beams in the format of `Ev_BeamModeEvent`. The proto file's source of truth is within the [Control Repository](https://github.com/AliceO2Group/Control/blob/master/common/protos/events.proto) +### Bookkeeping Updates +- TBC From fe23d9e5297a6943d8704104673a594a0375b46b Mon Sep 17 00:00:00 2001 From: George Raduta Date: Tue, 7 Oct 2025 18:16:16 +0200 Subject: [PATCH 06/16] Add protoc generated classes --- .../kafka/BeamModeEventsKafkaProducer.java | 7 +- src/alice/dip/kafka/events/Common.java | 3443 ++++ src/alice/dip/kafka/events/Events.java | 16603 ++++++++++++++++ 3 files changed, 20050 insertions(+), 3 deletions(-) create mode 100644 src/alice/dip/kafka/events/Common.java create mode 100644 src/alice/dip/kafka/events/Events.java diff --git a/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java b/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java index 5e486d4..c8a50ae 100644 --- a/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java +++ b/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java @@ -19,6 +19,7 @@ import alice.dip.AliDip2BK; import alice.dip.LhcInfoObj; import alice.dip.kafka.events.Events; +import alice.dip.kafka.events.Common; /** * Kafka producer for LHC Beam Mode events, serialized using Protocol Buffers. @@ -43,16 +44,16 @@ public BeamModeEventsKafkaProducer(String bootstrapServers) { * @param timestamp - event timestamp at which the beam mode change event was received from DIP */ public void sendEvent(Integer fillNumber, LhcInfoObj fill, long timestamp) { - Events.BeamInfo beamInfo = Events.BeamInfo.newBuilder() + Common.BeamInfo beamInfo = Common.BeamInfo.newBuilder() .setStableBeamsStart(fill.getStableBeamStart()) .setStableBeamsEnd(fill.getStableBeamStop()) .setFillNumber(fill.fillNo) .setFillingSchemeName(fill.LHCFillingSchemeName) - .setBeamType(Events.BeamType.valueOf(fill.beamType)) + .setBeamMode(Common.BeamMode.valueOf(fill.getBeamMode())) + .setBeamType(fill.beamType) .build(); Events.Ev_BeamModeEvent event = Events.Ev_BeamModeEvent.newBuilder() - .setBeamMode(fill.getBeamMode()) .setTimestamp(timestamp) .setBeamInfo(beamInfo) .build(); diff --git a/src/alice/dip/kafka/events/Common.java b/src/alice/dip/kafka/events/Common.java new file mode 100644 index 0000000..eb325ca --- /dev/null +++ b/src/alice/dip/kafka/events/Common.java @@ -0,0 +1,3443 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE +// source: common.proto +// Protobuf Java Version: 4.32.1 + +package alice.dip.kafka.events; + +@com.google.protobuf.Generated +public final class Common extends com.google.protobuf.GeneratedFile { + private Common() {} + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Common.class.getName()); + } + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + /** + *
+   * *
+   * Beam modes as defined and sent by LHC DIP client plus:
+   * * virtual type LOST_BEAMS - that is generated when beam 1 and beam 2 energy values are not equal anymore as per LHC DIP track: dip/acc/LHC/RunControl/SafeBeam
+   * * virtual type UNKNOWN - that is generated when there is no beam in the machine or value not added by the BKP-LHC Client
+   * Source of Beam Modes: https://lhc-commissioning.web.cern.ch/systems/data-exchange/doc/LHC-OP-ES-0005-10-00.pdf
+   * 
+ * + * Protobuf enum {@code common.BeamMode} + */ + public enum BeamMode + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+     * virtual type
+     * 
+ * + * UNKNOWN = 0; + */ + UNKNOWN(0), + /** + * SETUP = 1; + */ + SETUP(1), + /** + * ABORT = 2; + */ + ABORT(2), + /** + * INJECTION_PROBE_BEAM = 3; + */ + INJECTION_PROBE_BEAM(3), + /** + * INJECTION_SETUP_BEAM = 4; + */ + INJECTION_SETUP_BEAM(4), + /** + * INJECTION_PHYSICS_BEAM = 5; + */ + INJECTION_PHYSICS_BEAM(5), + /** + * PREPARE_RAMP = 6; + */ + PREPARE_RAMP(6), + /** + * RAMP = 7; + */ + RAMP(7), + /** + * FLAT_TOP = 8; + */ + FLAT_TOP(8), + /** + * SQUEEZE = 9; + */ + SQUEEZE(9), + /** + * ADJUST = 10; + */ + ADJUST(10), + /** + * STABLE_BEAMS = 11; + */ + STABLE_BEAMS(11), + /** + *
+     * virtual type
+     * 
+ * + * LOST_BEAMS = 12; + */ + LOST_BEAMS(12), + /** + * UNSTABLE_BEAMS = 13; + */ + UNSTABLE_BEAMS(13), + /** + * BEAM_DUMP_WARNING = 14; + */ + BEAM_DUMP_WARNING(14), + /** + * BEAM_DUMP = 15; + */ + BEAM_DUMP(15), + /** + * RAMP_DOWN = 16; + */ + RAMP_DOWN(16), + /** + * CYCLING = 17; + */ + CYCLING(17), + /** + * RECOVERY = 18; + */ + RECOVERY(18), + /** + * INJECT_AND_DUMP = 19; + */ + INJECT_AND_DUMP(19), + /** + * CIRCULATE_AND_DUMP = 20; + */ + CIRCULATE_AND_DUMP(20), + /** + * NO_BEAM = 21; + */ + NO_BEAM(21), + UNRECOGNIZED(-1), + ; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + BeamMode.class.getName()); + } + /** + *
+     * virtual type
+     * 
+ * + * UNKNOWN = 0; + */ + public static final int UNKNOWN_VALUE = 0; + /** + * SETUP = 1; + */ + public static final int SETUP_VALUE = 1; + /** + * ABORT = 2; + */ + public static final int ABORT_VALUE = 2; + /** + * INJECTION_PROBE_BEAM = 3; + */ + public static final int INJECTION_PROBE_BEAM_VALUE = 3; + /** + * INJECTION_SETUP_BEAM = 4; + */ + public static final int INJECTION_SETUP_BEAM_VALUE = 4; + /** + * INJECTION_PHYSICS_BEAM = 5; + */ + public static final int INJECTION_PHYSICS_BEAM_VALUE = 5; + /** + * PREPARE_RAMP = 6; + */ + public static final int PREPARE_RAMP_VALUE = 6; + /** + * RAMP = 7; + */ + public static final int RAMP_VALUE = 7; + /** + * FLAT_TOP = 8; + */ + public static final int FLAT_TOP_VALUE = 8; + /** + * SQUEEZE = 9; + */ + public static final int SQUEEZE_VALUE = 9; + /** + * ADJUST = 10; + */ + public static final int ADJUST_VALUE = 10; + /** + * STABLE_BEAMS = 11; + */ + public static final int STABLE_BEAMS_VALUE = 11; + /** + *
+     * virtual type
+     * 
+ * + * LOST_BEAMS = 12; + */ + public static final int LOST_BEAMS_VALUE = 12; + /** + * UNSTABLE_BEAMS = 13; + */ + public static final int UNSTABLE_BEAMS_VALUE = 13; + /** + * BEAM_DUMP_WARNING = 14; + */ + public static final int BEAM_DUMP_WARNING_VALUE = 14; + /** + * BEAM_DUMP = 15; + */ + public static final int BEAM_DUMP_VALUE = 15; + /** + * RAMP_DOWN = 16; + */ + public static final int RAMP_DOWN_VALUE = 16; + /** + * CYCLING = 17; + */ + public static final int CYCLING_VALUE = 17; + /** + * RECOVERY = 18; + */ + public static final int RECOVERY_VALUE = 18; + /** + * INJECT_AND_DUMP = 19; + */ + public static final int INJECT_AND_DUMP_VALUE = 19; + /** + * CIRCULATE_AND_DUMP = 20; + */ + public static final int CIRCULATE_AND_DUMP_VALUE = 20; + /** + * NO_BEAM = 21; + */ + public static final int NO_BEAM_VALUE = 21; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static BeamMode valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static BeamMode forNumber(int value) { + switch (value) { + case 0: return UNKNOWN; + case 1: return SETUP; + case 2: return ABORT; + case 3: return INJECTION_PROBE_BEAM; + case 4: return INJECTION_SETUP_BEAM; + case 5: return INJECTION_PHYSICS_BEAM; + case 6: return PREPARE_RAMP; + case 7: return RAMP; + case 8: return FLAT_TOP; + case 9: return SQUEEZE; + case 10: return ADJUST; + case 11: return STABLE_BEAMS; + case 12: return LOST_BEAMS; + case 13: return UNSTABLE_BEAMS; + case 14: return BEAM_DUMP_WARNING; + case 15: return BEAM_DUMP; + case 16: return RAMP_DOWN; + case 17: return CYCLING; + case 18: return RECOVERY; + case 19: return INJECT_AND_DUMP; + case 20: return CIRCULATE_AND_DUMP; + case 21: return NO_BEAM; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + BeamMode> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public BeamMode findValueByNumber(int number) { + return BeamMode.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return alice.dip.kafka.events.Common.getDescriptor().getEnumTypes().get(0); + } + + private static final BeamMode[] VALUES = values(); + + public static BeamMode valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private BeamMode(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:common.BeamMode) + } + + public interface UserOrBuilder extends + // @@protoc_insertion_point(interface_extends:common.User) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The unique CERN identifier of this user.
+     * 
+ * + * optional int32 externalId = 1; + * @return Whether the externalId field is set. + */ + boolean hasExternalId(); + /** + *
+     * The unique CERN identifier of this user.
+     * 
+ * + * optional int32 externalId = 1; + * @return The externalId. + */ + int getExternalId(); + + /** + *
+     * The unique identifier of this entity.
+     * 
+ * + * optional int32 id = 2; + * @return Whether the id field is set. + */ + boolean hasId(); + /** + *
+     * The unique identifier of this entity.
+     * 
+ * + * optional int32 id = 2; + * @return The id. + */ + int getId(); + + /** + *
+     * Name of the user.
+     * 
+ * + * string name = 3; + * @return The name. + */ + java.lang.String getName(); + /** + *
+     * Name of the user.
+     * 
+ * + * string name = 3; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + } + /** + * Protobuf type {@code common.User} + */ + public static final class User extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:common.User) + UserOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + User.class.getName()); + } + // Use User.newBuilder() to construct. + private User(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private User() { + name_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Common.internal_static_common_User_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Common.internal_static_common_User_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Common.User.class, alice.dip.kafka.events.Common.User.Builder.class); + } + + private int bitField0_; + public static final int EXTERNALID_FIELD_NUMBER = 1; + private int externalId_ = 0; + /** + *
+     * The unique CERN identifier of this user.
+     * 
+ * + * optional int32 externalId = 1; + * @return Whether the externalId field is set. + */ + @java.lang.Override + public boolean hasExternalId() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     * The unique CERN identifier of this user.
+     * 
+ * + * optional int32 externalId = 1; + * @return The externalId. + */ + @java.lang.Override + public int getExternalId() { + return externalId_; + } + + public static final int ID_FIELD_NUMBER = 2; + private int id_ = 0; + /** + *
+     * The unique identifier of this entity.
+     * 
+ * + * optional int32 id = 2; + * @return Whether the id field is set. + */ + @java.lang.Override + public boolean hasId() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+     * The unique identifier of this entity.
+     * 
+ * + * optional int32 id = 2; + * @return The id. + */ + @java.lang.Override + public int getId() { + return id_; + } + + public static final int NAME_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + *
+     * Name of the user.
+     * 
+ * + * string name = 3; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+     * Name of the user.
+     * 
+ * + * string name = 3; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeInt32(1, externalId_); + } + if (((bitField0_ & 0x00000002) != 0)) { + output.writeInt32(2, id_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 3, name_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, externalId_); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, id_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, name_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Common.User)) { + return super.equals(obj); + } + alice.dip.kafka.events.Common.User other = (alice.dip.kafka.events.Common.User) obj; + + if (hasExternalId() != other.hasExternalId()) return false; + if (hasExternalId()) { + if (getExternalId() + != other.getExternalId()) return false; + } + if (hasId() != other.hasId()) return false; + if (hasId()) { + if (getId() + != other.getId()) return false; + } + if (!getName() + .equals(other.getName())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasExternalId()) { + hash = (37 * hash) + EXTERNALID_FIELD_NUMBER; + hash = (53 * hash) + getExternalId(); + } + if (hasId()) { + hash = (37 * hash) + ID_FIELD_NUMBER; + hash = (53 * hash) + getId(); + } + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Common.User parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Common.User parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Common.User parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Common.User parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Common.User parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Common.User parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Common.User parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Common.User parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Common.User parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Common.User parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Common.User parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Common.User parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Common.User prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code common.User} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:common.User) + alice.dip.kafka.events.Common.UserOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Common.internal_static_common_User_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Common.internal_static_common_User_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Common.User.class, alice.dip.kafka.events.Common.User.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.common.Common.User.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + externalId_ = 0; + id_ = 0; + name_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Common.internal_static_common_User_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Common.User getDefaultInstanceForType() { + return alice.dip.kafka.events.Common.User.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Common.User build() { + alice.dip.kafka.events.Common.User result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Common.User buildPartial() { + alice.dip.kafka.events.Common.User result = new alice.dip.kafka.events.Common.User(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Common.User result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.externalId_ = externalId_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.id_ = id_; + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.name_ = name_; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Common.User) { + return mergeFrom((alice.dip.kafka.events.Common.User)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Common.User other) { + if (other == alice.dip.kafka.events.Common.User.getDefaultInstance()) return this; + if (other.hasExternalId()) { + setExternalId(other.getExternalId()); + } + if (other.hasId()) { + setId(other.getId()); + } + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000004; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + externalId_ = input.readInt32(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 16: { + id_ = input.readInt32(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 26: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int externalId_ ; + /** + *
+       * The unique CERN identifier of this user.
+       * 
+ * + * optional int32 externalId = 1; + * @return Whether the externalId field is set. + */ + @java.lang.Override + public boolean hasExternalId() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       * The unique CERN identifier of this user.
+       * 
+ * + * optional int32 externalId = 1; + * @return The externalId. + */ + @java.lang.Override + public int getExternalId() { + return externalId_; + } + /** + *
+       * The unique CERN identifier of this user.
+       * 
+ * + * optional int32 externalId = 1; + * @param value The externalId to set. + * @return This builder for chaining. + */ + public Builder setExternalId(int value) { + + externalId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The unique CERN identifier of this user.
+       * 
+ * + * optional int32 externalId = 1; + * @return This builder for chaining. + */ + public Builder clearExternalId() { + bitField0_ = (bitField0_ & ~0x00000001); + externalId_ = 0; + onChanged(); + return this; + } + + private int id_ ; + /** + *
+       * The unique identifier of this entity.
+       * 
+ * + * optional int32 id = 2; + * @return Whether the id field is set. + */ + @java.lang.Override + public boolean hasId() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       * The unique identifier of this entity.
+       * 
+ * + * optional int32 id = 2; + * @return The id. + */ + @java.lang.Override + public int getId() { + return id_; + } + /** + *
+       * The unique identifier of this entity.
+       * 
+ * + * optional int32 id = 2; + * @param value The id to set. + * @return This builder for chaining. + */ + public Builder setId(int value) { + + id_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * The unique identifier of this entity.
+       * 
+ * + * optional int32 id = 2; + * @return This builder for chaining. + */ + public Builder clearId() { + bitField0_ = (bitField0_ & ~0x00000002); + id_ = 0; + onChanged(); + return this; + } + + private java.lang.Object name_ = ""; + /** + *
+       * Name of the user.
+       * 
+ * + * string name = 3; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Name of the user.
+       * 
+ * + * string name = 3; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Name of the user.
+       * 
+ * + * string name = 3; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + name_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * Name of the user.
+       * 
+ * + * string name = 3; + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       * Name of the user.
+       * 
+ * + * string name = 3; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:common.User) + } + + // @@protoc_insertion_point(class_scope:common.User) + private static final alice.dip.kafka.events.Common.User DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Common.User(); + } + + public static alice.dip.kafka.events.Common.User getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public User parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Common.User getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface WorkflowTemplateInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:common.WorkflowTemplateInfo) + com.google.protobuf.MessageOrBuilder { + + /** + * string name = 1; + * @return The name. + */ + java.lang.String getName(); + /** + * string name = 1; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + * string description = 2; + * @return The description. + */ + java.lang.String getDescription(); + /** + * string description = 2; + * @return The bytes for description. + */ + com.google.protobuf.ByteString + getDescriptionBytes(); + + /** + * string path = 3; + * @return The path. + */ + java.lang.String getPath(); + /** + * string path = 3; + * @return The bytes for path. + */ + com.google.protobuf.ByteString + getPathBytes(); + + /** + *
+     * whether the environment is public or not
+     * 
+ * + * bool public = 4; + * @return The public. + */ + boolean getPublic(); + } + /** + * Protobuf type {@code common.WorkflowTemplateInfo} + */ + public static final class WorkflowTemplateInfo extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:common.WorkflowTemplateInfo) + WorkflowTemplateInfoOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + WorkflowTemplateInfo.class.getName()); + } + // Use WorkflowTemplateInfo.newBuilder() to construct. + private WorkflowTemplateInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private WorkflowTemplateInfo() { + name_ = ""; + description_ = ""; + path_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Common.internal_static_common_WorkflowTemplateInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Common.internal_static_common_WorkflowTemplateInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Common.WorkflowTemplateInfo.class, alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * string name = 1; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * string name = 1; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DESCRIPTION_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object description_ = ""; + /** + * string description = 2; + * @return The description. + */ + @java.lang.Override + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } + } + /** + * string description = 2; + * @return The bytes for description. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PATH_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object path_ = ""; + /** + * string path = 3; + * @return The path. + */ + @java.lang.Override + public java.lang.String getPath() { + java.lang.Object ref = path_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + path_ = s; + return s; + } + } + /** + * string path = 3; + * @return The bytes for path. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getPathBytes() { + java.lang.Object ref = path_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + path_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PUBLIC_FIELD_NUMBER = 4; + private boolean public_ = false; + /** + *
+     * whether the environment is public or not
+     * 
+ * + * bool public = 4; + * @return The public. + */ + @java.lang.Override + public boolean getPublic() { + return public_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, name_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(description_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, description_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(path_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 3, path_); + } + if (public_ != false) { + output.writeBool(4, public_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, name_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(description_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, description_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(path_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, path_); + } + if (public_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, public_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Common.WorkflowTemplateInfo)) { + return super.equals(obj); + } + alice.dip.kafka.events.Common.WorkflowTemplateInfo other = (alice.dip.kafka.events.Common.WorkflowTemplateInfo) obj; + + if (!getName() + .equals(other.getName())) return false; + if (!getDescription() + .equals(other.getDescription())) return false; + if (!getPath() + .equals(other.getPath())) return false; + if (getPublic() + != other.getPublic()) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER; + hash = (53 * hash) + getDescription().hashCode(); + hash = (37 * hash) + PATH_FIELD_NUMBER; + hash = (53 * hash) + getPath().hashCode(); + hash = (37 * hash) + PUBLIC_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getPublic()); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Common.WorkflowTemplateInfo prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code common.WorkflowTemplateInfo} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:common.WorkflowTemplateInfo) + alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Common.internal_static_common_WorkflowTemplateInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Common.internal_static_common_WorkflowTemplateInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Common.WorkflowTemplateInfo.class, alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.common.Common.WorkflowTemplateInfo.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + description_ = ""; + path_ = ""; + public_ = false; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Common.internal_static_common_WorkflowTemplateInfo_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Common.WorkflowTemplateInfo getDefaultInstanceForType() { + return alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Common.WorkflowTemplateInfo build() { + alice.dip.kafka.events.Common.WorkflowTemplateInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Common.WorkflowTemplateInfo buildPartial() { + alice.dip.kafka.events.Common.WorkflowTemplateInfo result = new alice.dip.kafka.events.Common.WorkflowTemplateInfo(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Common.WorkflowTemplateInfo result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.description_ = description_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.path_ = path_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.public_ = public_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Common.WorkflowTemplateInfo) { + return mergeFrom((alice.dip.kafka.events.Common.WorkflowTemplateInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Common.WorkflowTemplateInfo other) { + if (other == alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (!other.getDescription().isEmpty()) { + description_ = other.description_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (!other.getPath().isEmpty()) { + path_ = other.path_; + bitField0_ |= 0x00000004; + onChanged(); + } + if (other.getPublic() != false) { + setPublic(other.getPublic()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + description_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + path_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 32: { + public_ = input.readBool(); + bitField0_ |= 0x00000008; + break; + } // case 32 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string name = 1; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * string name = 1; + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * string name = 1; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object description_ = ""; + /** + * string description = 2; + * @return The description. + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string description = 2; + * @return The bytes for description. + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string description = 2; + * @param value The description to set. + * @return This builder for chaining. + */ + public Builder setDescription( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + description_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * string description = 2; + * @return This builder for chaining. + */ + public Builder clearDescription() { + description_ = getDefaultInstance().getDescription(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * string description = 2; + * @param value The bytes for description to set. + * @return This builder for chaining. + */ + public Builder setDescriptionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + description_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private java.lang.Object path_ = ""; + /** + * string path = 3; + * @return The path. + */ + public java.lang.String getPath() { + java.lang.Object ref = path_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + path_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string path = 3; + * @return The bytes for path. + */ + public com.google.protobuf.ByteString + getPathBytes() { + java.lang.Object ref = path_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + path_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string path = 3; + * @param value The path to set. + * @return This builder for chaining. + */ + public Builder setPath( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + path_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * string path = 3; + * @return This builder for chaining. + */ + public Builder clearPath() { + path_ = getDefaultInstance().getPath(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * string path = 3; + * @param value The bytes for path to set. + * @return This builder for chaining. + */ + public Builder setPathBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + path_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + private boolean public_ ; + /** + *
+       * whether the environment is public or not
+       * 
+ * + * bool public = 4; + * @return The public. + */ + @java.lang.Override + public boolean getPublic() { + return public_; + } + /** + *
+       * whether the environment is public or not
+       * 
+ * + * bool public = 4; + * @param value The public to set. + * @return This builder for chaining. + */ + public Builder setPublic(boolean value) { + + public_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       * whether the environment is public or not
+       * 
+ * + * bool public = 4; + * @return This builder for chaining. + */ + public Builder clearPublic() { + bitField0_ = (bitField0_ & ~0x00000008); + public_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:common.WorkflowTemplateInfo) + } + + // @@protoc_insertion_point(class_scope:common.WorkflowTemplateInfo) + private static final alice.dip.kafka.events.Common.WorkflowTemplateInfo DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Common.WorkflowTemplateInfo(); + } + + public static alice.dip.kafka.events.Common.WorkflowTemplateInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public WorkflowTemplateInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Common.WorkflowTemplateInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface BeamInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:common.BeamInfo) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * milliseconds since epoch when stable beams started
+     * 
+ * + * int64 stableBeamsStart = 1; + * @return The stableBeamsStart. + */ + long getStableBeamsStart(); + + /** + *
+     * milliseconds since epoch when stable beams ended
+     * 
+ * + * int64 stableBeamsEnd = 2; + * @return The stableBeamsEnd. + */ + long getStableBeamsEnd(); + + /** + *
+     * LHC fill number
+     * 
+ * + * int32 fillNumber = 3; + * @return The fillNumber. + */ + int getFillNumber(); + + /** + *
+     * LHC filling scheme name e.g. 25ns_2460b_2448_2089_2227_144bpi_20inj
+     * 
+ * + * string fillingSchemeName = 4; + * @return The fillingSchemeName. + */ + java.lang.String getFillingSchemeName(); + /** + *
+     * LHC filling scheme name e.g. 25ns_2460b_2448_2089_2227_144bpi_20inj
+     * 
+ * + * string fillingSchemeName = 4; + * @return The bytes for fillingSchemeName. + */ + com.google.protobuf.ByteString + getFillingSchemeNameBytes(); + + /** + *
+     * in GeV
+     * 
+ * + * float beam1Energy = 5; + * @return The beam1Energy. + */ + float getBeam1Energy(); + + /** + *
+     * in GeV
+     * 
+ * + * float beam2Energy = 6; + * @return The beam2Energy. + */ + float getBeam2Energy(); + + /** + *
+     * e.g. PROTON-PROTON, O8-O8, Pb-Pb, p-Pb, Pb-p
+     * 
+ * + * string beamType = 7; + * @return The beamType. + */ + java.lang.String getBeamType(); + /** + *
+     * e.g. PROTON-PROTON, O8-O8, Pb-Pb, p-Pb, Pb-p
+     * 
+ * + * string beamType = 7; + * @return The bytes for beamType. + */ + com.google.protobuf.ByteString + getBeamTypeBytes(); + + /** + * .common.BeamMode beamMode = 8; + * @return The enum numeric value on the wire for beamMode. + */ + int getBeamModeValue(); + /** + * .common.BeamMode beamMode = 8; + * @return The beamMode. + */ + alice.dip.kafka.events.Common.BeamMode getBeamMode(); + } + /** + *
+   * *
+   * Beam information at a specific point in time (e.g. start or end of stable beams)
+   * 
+ * + * Protobuf type {@code common.BeamInfo} + */ + public static final class BeamInfo extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:common.BeamInfo) + BeamInfoOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + BeamInfo.class.getName()); + } + // Use BeamInfo.newBuilder() to construct. + private BeamInfo(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private BeamInfo() { + fillingSchemeName_ = ""; + beamType_ = ""; + beamMode_ = 0; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Common.internal_static_common_BeamInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Common.internal_static_common_BeamInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Common.BeamInfo.class, alice.dip.kafka.events.Common.BeamInfo.Builder.class); + } + + public static final int STABLEBEAMSSTART_FIELD_NUMBER = 1; + private long stableBeamsStart_ = 0L; + /** + *
+     * milliseconds since epoch when stable beams started
+     * 
+ * + * int64 stableBeamsStart = 1; + * @return The stableBeamsStart. + */ + @java.lang.Override + public long getStableBeamsStart() { + return stableBeamsStart_; + } + + public static final int STABLEBEAMSEND_FIELD_NUMBER = 2; + private long stableBeamsEnd_ = 0L; + /** + *
+     * milliseconds since epoch when stable beams ended
+     * 
+ * + * int64 stableBeamsEnd = 2; + * @return The stableBeamsEnd. + */ + @java.lang.Override + public long getStableBeamsEnd() { + return stableBeamsEnd_; + } + + public static final int FILLNUMBER_FIELD_NUMBER = 3; + private int fillNumber_ = 0; + /** + *
+     * LHC fill number
+     * 
+ * + * int32 fillNumber = 3; + * @return The fillNumber. + */ + @java.lang.Override + public int getFillNumber() { + return fillNumber_; + } + + public static final int FILLINGSCHEMENAME_FIELD_NUMBER = 4; + @SuppressWarnings("serial") + private volatile java.lang.Object fillingSchemeName_ = ""; + /** + *
+     * LHC filling scheme name e.g. 25ns_2460b_2448_2089_2227_144bpi_20inj
+     * 
+ * + * string fillingSchemeName = 4; + * @return The fillingSchemeName. + */ + @java.lang.Override + public java.lang.String getFillingSchemeName() { + java.lang.Object ref = fillingSchemeName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + fillingSchemeName_ = s; + return s; + } + } + /** + *
+     * LHC filling scheme name e.g. 25ns_2460b_2448_2089_2227_144bpi_20inj
+     * 
+ * + * string fillingSchemeName = 4; + * @return The bytes for fillingSchemeName. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getFillingSchemeNameBytes() { + java.lang.Object ref = fillingSchemeName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fillingSchemeName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int BEAM1ENERGY_FIELD_NUMBER = 5; + private float beam1Energy_ = 0F; + /** + *
+     * in GeV
+     * 
+ * + * float beam1Energy = 5; + * @return The beam1Energy. + */ + @java.lang.Override + public float getBeam1Energy() { + return beam1Energy_; + } + + public static final int BEAM2ENERGY_FIELD_NUMBER = 6; + private float beam2Energy_ = 0F; + /** + *
+     * in GeV
+     * 
+ * + * float beam2Energy = 6; + * @return The beam2Energy. + */ + @java.lang.Override + public float getBeam2Energy() { + return beam2Energy_; + } + + public static final int BEAMTYPE_FIELD_NUMBER = 7; + @SuppressWarnings("serial") + private volatile java.lang.Object beamType_ = ""; + /** + *
+     * e.g. PROTON-PROTON, O8-O8, Pb-Pb, p-Pb, Pb-p
+     * 
+ * + * string beamType = 7; + * @return The beamType. + */ + @java.lang.Override + public java.lang.String getBeamType() { + java.lang.Object ref = beamType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + beamType_ = s; + return s; + } + } + /** + *
+     * e.g. PROTON-PROTON, O8-O8, Pb-Pb, p-Pb, Pb-p
+     * 
+ * + * string beamType = 7; + * @return The bytes for beamType. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getBeamTypeBytes() { + java.lang.Object ref = beamType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + beamType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int BEAMMODE_FIELD_NUMBER = 8; + private int beamMode_ = 0; + /** + * .common.BeamMode beamMode = 8; + * @return The enum numeric value on the wire for beamMode. + */ + @java.lang.Override public int getBeamModeValue() { + return beamMode_; + } + /** + * .common.BeamMode beamMode = 8; + * @return The beamMode. + */ + @java.lang.Override public alice.dip.kafka.events.Common.BeamMode getBeamMode() { + alice.dip.kafka.events.Common.BeamMode result = alice.dip.kafka.events.Common.BeamMode.forNumber(beamMode_); + return result == null ? alice.dip.kafka.events.Common.BeamMode.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (stableBeamsStart_ != 0L) { + output.writeInt64(1, stableBeamsStart_); + } + if (stableBeamsEnd_ != 0L) { + output.writeInt64(2, stableBeamsEnd_); + } + if (fillNumber_ != 0) { + output.writeInt32(3, fillNumber_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(fillingSchemeName_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 4, fillingSchemeName_); + } + if (java.lang.Float.floatToRawIntBits(beam1Energy_) != 0) { + output.writeFloat(5, beam1Energy_); + } + if (java.lang.Float.floatToRawIntBits(beam2Energy_) != 0) { + output.writeFloat(6, beam2Energy_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(beamType_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 7, beamType_); + } + if (beamMode_ != alice.dip.kafka.events.Common.BeamMode.UNKNOWN.getNumber()) { + output.writeEnum(8, beamMode_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (stableBeamsStart_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, stableBeamsStart_); + } + if (stableBeamsEnd_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, stableBeamsEnd_); + } + if (fillNumber_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, fillNumber_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(fillingSchemeName_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(4, fillingSchemeName_); + } + if (java.lang.Float.floatToRawIntBits(beam1Energy_) != 0) { + size += com.google.protobuf.CodedOutputStream + .computeFloatSize(5, beam1Energy_); + } + if (java.lang.Float.floatToRawIntBits(beam2Energy_) != 0) { + size += com.google.protobuf.CodedOutputStream + .computeFloatSize(6, beam2Energy_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(beamType_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(7, beamType_); + } + if (beamMode_ != alice.dip.kafka.events.Common.BeamMode.UNKNOWN.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(8, beamMode_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Common.BeamInfo)) { + return super.equals(obj); + } + alice.dip.kafka.events.Common.BeamInfo other = (alice.dip.kafka.events.Common.BeamInfo) obj; + + if (getStableBeamsStart() + != other.getStableBeamsStart()) return false; + if (getStableBeamsEnd() + != other.getStableBeamsEnd()) return false; + if (getFillNumber() + != other.getFillNumber()) return false; + if (!getFillingSchemeName() + .equals(other.getFillingSchemeName())) return false; + if (java.lang.Float.floatToIntBits(getBeam1Energy()) + != java.lang.Float.floatToIntBits( + other.getBeam1Energy())) return false; + if (java.lang.Float.floatToIntBits(getBeam2Energy()) + != java.lang.Float.floatToIntBits( + other.getBeam2Energy())) return false; + if (!getBeamType() + .equals(other.getBeamType())) return false; + if (beamMode_ != other.beamMode_) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + STABLEBEAMSSTART_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStableBeamsStart()); + hash = (37 * hash) + STABLEBEAMSEND_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStableBeamsEnd()); + hash = (37 * hash) + FILLNUMBER_FIELD_NUMBER; + hash = (53 * hash) + getFillNumber(); + hash = (37 * hash) + FILLINGSCHEMENAME_FIELD_NUMBER; + hash = (53 * hash) + getFillingSchemeName().hashCode(); + hash = (37 * hash) + BEAM1ENERGY_FIELD_NUMBER; + hash = (53 * hash) + java.lang.Float.floatToIntBits( + getBeam1Energy()); + hash = (37 * hash) + BEAM2ENERGY_FIELD_NUMBER; + hash = (53 * hash) + java.lang.Float.floatToIntBits( + getBeam2Energy()); + hash = (37 * hash) + BEAMTYPE_FIELD_NUMBER; + hash = (53 * hash) + getBeamType().hashCode(); + hash = (37 * hash) + BEAMMODE_FIELD_NUMBER; + hash = (53 * hash) + beamMode_; + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Common.BeamInfo parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Common.BeamInfo parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Common.BeamInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Common.BeamInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Common.BeamInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Common.BeamInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Common.BeamInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Common.BeamInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Common.BeamInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Common.BeamInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Common.BeamInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Common.BeamInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Common.BeamInfo prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * *
+     * Beam information at a specific point in time (e.g. start or end of stable beams)
+     * 
+ * + * Protobuf type {@code common.BeamInfo} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:common.BeamInfo) + alice.dip.kafka.events.Common.BeamInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Common.internal_static_common_BeamInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Common.internal_static_common_BeamInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Common.BeamInfo.class, alice.dip.kafka.events.Common.BeamInfo.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.common.Common.BeamInfo.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + stableBeamsStart_ = 0L; + stableBeamsEnd_ = 0L; + fillNumber_ = 0; + fillingSchemeName_ = ""; + beam1Energy_ = 0F; + beam2Energy_ = 0F; + beamType_ = ""; + beamMode_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Common.internal_static_common_BeamInfo_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Common.BeamInfo getDefaultInstanceForType() { + return alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Common.BeamInfo build() { + alice.dip.kafka.events.Common.BeamInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Common.BeamInfo buildPartial() { + alice.dip.kafka.events.Common.BeamInfo result = new alice.dip.kafka.events.Common.BeamInfo(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Common.BeamInfo result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.stableBeamsStart_ = stableBeamsStart_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.stableBeamsEnd_ = stableBeamsEnd_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.fillNumber_ = fillNumber_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.fillingSchemeName_ = fillingSchemeName_; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.beam1Energy_ = beam1Energy_; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.beam2Energy_ = beam2Energy_; + } + if (((from_bitField0_ & 0x00000040) != 0)) { + result.beamType_ = beamType_; + } + if (((from_bitField0_ & 0x00000080) != 0)) { + result.beamMode_ = beamMode_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Common.BeamInfo) { + return mergeFrom((alice.dip.kafka.events.Common.BeamInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Common.BeamInfo other) { + if (other == alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance()) return this; + if (other.getStableBeamsStart() != 0L) { + setStableBeamsStart(other.getStableBeamsStart()); + } + if (other.getStableBeamsEnd() != 0L) { + setStableBeamsEnd(other.getStableBeamsEnd()); + } + if (other.getFillNumber() != 0) { + setFillNumber(other.getFillNumber()); + } + if (!other.getFillingSchemeName().isEmpty()) { + fillingSchemeName_ = other.fillingSchemeName_; + bitField0_ |= 0x00000008; + onChanged(); + } + if (java.lang.Float.floatToRawIntBits(other.getBeam1Energy()) != 0) { + setBeam1Energy(other.getBeam1Energy()); + } + if (java.lang.Float.floatToRawIntBits(other.getBeam2Energy()) != 0) { + setBeam2Energy(other.getBeam2Energy()); + } + if (!other.getBeamType().isEmpty()) { + beamType_ = other.beamType_; + bitField0_ |= 0x00000040; + onChanged(); + } + if (other.beamMode_ != 0) { + setBeamModeValue(other.getBeamModeValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + stableBeamsStart_ = input.readInt64(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 16: { + stableBeamsEnd_ = input.readInt64(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 24: { + fillNumber_ = input.readInt32(); + bitField0_ |= 0x00000004; + break; + } // case 24 + case 34: { + fillingSchemeName_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000008; + break; + } // case 34 + case 45: { + beam1Energy_ = input.readFloat(); + bitField0_ |= 0x00000010; + break; + } // case 45 + case 53: { + beam2Energy_ = input.readFloat(); + bitField0_ |= 0x00000020; + break; + } // case 53 + case 58: { + beamType_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000040; + break; + } // case 58 + case 64: { + beamMode_ = input.readEnum(); + bitField0_ |= 0x00000080; + break; + } // case 64 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private long stableBeamsStart_ ; + /** + *
+       * milliseconds since epoch when stable beams started
+       * 
+ * + * int64 stableBeamsStart = 1; + * @return The stableBeamsStart. + */ + @java.lang.Override + public long getStableBeamsStart() { + return stableBeamsStart_; + } + /** + *
+       * milliseconds since epoch when stable beams started
+       * 
+ * + * int64 stableBeamsStart = 1; + * @param value The stableBeamsStart to set. + * @return This builder for chaining. + */ + public Builder setStableBeamsStart(long value) { + + stableBeamsStart_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * milliseconds since epoch when stable beams started
+       * 
+ * + * int64 stableBeamsStart = 1; + * @return This builder for chaining. + */ + public Builder clearStableBeamsStart() { + bitField0_ = (bitField0_ & ~0x00000001); + stableBeamsStart_ = 0L; + onChanged(); + return this; + } + + private long stableBeamsEnd_ ; + /** + *
+       * milliseconds since epoch when stable beams ended
+       * 
+ * + * int64 stableBeamsEnd = 2; + * @return The stableBeamsEnd. + */ + @java.lang.Override + public long getStableBeamsEnd() { + return stableBeamsEnd_; + } + /** + *
+       * milliseconds since epoch when stable beams ended
+       * 
+ * + * int64 stableBeamsEnd = 2; + * @param value The stableBeamsEnd to set. + * @return This builder for chaining. + */ + public Builder setStableBeamsEnd(long value) { + + stableBeamsEnd_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * milliseconds since epoch when stable beams ended
+       * 
+ * + * int64 stableBeamsEnd = 2; + * @return This builder for chaining. + */ + public Builder clearStableBeamsEnd() { + bitField0_ = (bitField0_ & ~0x00000002); + stableBeamsEnd_ = 0L; + onChanged(); + return this; + } + + private int fillNumber_ ; + /** + *
+       * LHC fill number
+       * 
+ * + * int32 fillNumber = 3; + * @return The fillNumber. + */ + @java.lang.Override + public int getFillNumber() { + return fillNumber_; + } + /** + *
+       * LHC fill number
+       * 
+ * + * int32 fillNumber = 3; + * @param value The fillNumber to set. + * @return This builder for chaining. + */ + public Builder setFillNumber(int value) { + + fillNumber_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * LHC fill number
+       * 
+ * + * int32 fillNumber = 3; + * @return This builder for chaining. + */ + public Builder clearFillNumber() { + bitField0_ = (bitField0_ & ~0x00000004); + fillNumber_ = 0; + onChanged(); + return this; + } + + private java.lang.Object fillingSchemeName_ = ""; + /** + *
+       * LHC filling scheme name e.g. 25ns_2460b_2448_2089_2227_144bpi_20inj
+       * 
+ * + * string fillingSchemeName = 4; + * @return The fillingSchemeName. + */ + public java.lang.String getFillingSchemeName() { + java.lang.Object ref = fillingSchemeName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + fillingSchemeName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * LHC filling scheme name e.g. 25ns_2460b_2448_2089_2227_144bpi_20inj
+       * 
+ * + * string fillingSchemeName = 4; + * @return The bytes for fillingSchemeName. + */ + public com.google.protobuf.ByteString + getFillingSchemeNameBytes() { + java.lang.Object ref = fillingSchemeName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fillingSchemeName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * LHC filling scheme name e.g. 25ns_2460b_2448_2089_2227_144bpi_20inj
+       * 
+ * + * string fillingSchemeName = 4; + * @param value The fillingSchemeName to set. + * @return This builder for chaining. + */ + public Builder setFillingSchemeName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + fillingSchemeName_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       * LHC filling scheme name e.g. 25ns_2460b_2448_2089_2227_144bpi_20inj
+       * 
+ * + * string fillingSchemeName = 4; + * @return This builder for chaining. + */ + public Builder clearFillingSchemeName() { + fillingSchemeName_ = getDefaultInstance().getFillingSchemeName(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + /** + *
+       * LHC filling scheme name e.g. 25ns_2460b_2448_2089_2227_144bpi_20inj
+       * 
+ * + * string fillingSchemeName = 4; + * @param value The bytes for fillingSchemeName to set. + * @return This builder for chaining. + */ + public Builder setFillingSchemeNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + fillingSchemeName_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + + private float beam1Energy_ ; + /** + *
+       * in GeV
+       * 
+ * + * float beam1Energy = 5; + * @return The beam1Energy. + */ + @java.lang.Override + public float getBeam1Energy() { + return beam1Energy_; + } + /** + *
+       * in GeV
+       * 
+ * + * float beam1Energy = 5; + * @param value The beam1Energy to set. + * @return This builder for chaining. + */ + public Builder setBeam1Energy(float value) { + + beam1Energy_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + *
+       * in GeV
+       * 
+ * + * float beam1Energy = 5; + * @return This builder for chaining. + */ + public Builder clearBeam1Energy() { + bitField0_ = (bitField0_ & ~0x00000010); + beam1Energy_ = 0F; + onChanged(); + return this; + } + + private float beam2Energy_ ; + /** + *
+       * in GeV
+       * 
+ * + * float beam2Energy = 6; + * @return The beam2Energy. + */ + @java.lang.Override + public float getBeam2Energy() { + return beam2Energy_; + } + /** + *
+       * in GeV
+       * 
+ * + * float beam2Energy = 6; + * @param value The beam2Energy to set. + * @return This builder for chaining. + */ + public Builder setBeam2Energy(float value) { + + beam2Energy_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + *
+       * in GeV
+       * 
+ * + * float beam2Energy = 6; + * @return This builder for chaining. + */ + public Builder clearBeam2Energy() { + bitField0_ = (bitField0_ & ~0x00000020); + beam2Energy_ = 0F; + onChanged(); + return this; + } + + private java.lang.Object beamType_ = ""; + /** + *
+       * e.g. PROTON-PROTON, O8-O8, Pb-Pb, p-Pb, Pb-p
+       * 
+ * + * string beamType = 7; + * @return The beamType. + */ + public java.lang.String getBeamType() { + java.lang.Object ref = beamType_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + beamType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * e.g. PROTON-PROTON, O8-O8, Pb-Pb, p-Pb, Pb-p
+       * 
+ * + * string beamType = 7; + * @return The bytes for beamType. + */ + public com.google.protobuf.ByteString + getBeamTypeBytes() { + java.lang.Object ref = beamType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + beamType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * e.g. PROTON-PROTON, O8-O8, Pb-Pb, p-Pb, Pb-p
+       * 
+ * + * string beamType = 7; + * @param value The beamType to set. + * @return This builder for chaining. + */ + public Builder setBeamType( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + beamType_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + *
+       * e.g. PROTON-PROTON, O8-O8, Pb-Pb, p-Pb, Pb-p
+       * 
+ * + * string beamType = 7; + * @return This builder for chaining. + */ + public Builder clearBeamType() { + beamType_ = getDefaultInstance().getBeamType(); + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + return this; + } + /** + *
+       * e.g. PROTON-PROTON, O8-O8, Pb-Pb, p-Pb, Pb-p
+       * 
+ * + * string beamType = 7; + * @param value The bytes for beamType to set. + * @return This builder for chaining. + */ + public Builder setBeamTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + beamType_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + + private int beamMode_ = 0; + /** + * .common.BeamMode beamMode = 8; + * @return The enum numeric value on the wire for beamMode. + */ + @java.lang.Override public int getBeamModeValue() { + return beamMode_; + } + /** + * .common.BeamMode beamMode = 8; + * @param value The enum numeric value on the wire for beamMode to set. + * @return This builder for chaining. + */ + public Builder setBeamModeValue(int value) { + beamMode_ = value; + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + * .common.BeamMode beamMode = 8; + * @return The beamMode. + */ + @java.lang.Override + public alice.dip.kafka.events.Common.BeamMode getBeamMode() { + alice.dip.kafka.events.Common.BeamMode result = alice.dip.kafka.events.Common.BeamMode.forNumber(beamMode_); + return result == null ? alice.dip.kafka.events.Common.BeamMode.UNRECOGNIZED : result; + } + /** + * .common.BeamMode beamMode = 8; + * @param value The beamMode to set. + * @return This builder for chaining. + */ + public Builder setBeamMode(alice.dip.kafka.events.Common.BeamMode value) { + if (value == null) { throw new NullPointerException(); } + bitField0_ |= 0x00000080; + beamMode_ = value.getNumber(); + onChanged(); + return this; + } + /** + * .common.BeamMode beamMode = 8; + * @return This builder for chaining. + */ + public Builder clearBeamMode() { + bitField0_ = (bitField0_ & ~0x00000080); + beamMode_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:common.BeamInfo) + } + + // @@protoc_insertion_point(class_scope:common.BeamInfo) + private static final alice.dip.kafka.events.Common.BeamInfo DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Common.BeamInfo(); + } + + public static alice.dip.kafka.events.Common.BeamInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public BeamInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Common.BeamInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_common_User_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_common_User_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_common_WorkflowTemplateInfo_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_common_WorkflowTemplateInfo_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_common_BeamInfo_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_common_BeamInfo_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\014common.proto\022\006common\"T\n\004User\022\027\n\nextern" + + "alId\030\001 \001(\005H\000\210\001\001\022\017\n\002id\030\002 \001(\005H\001\210\001\001\022\014\n\004name" + + "\030\003 \001(\tB\r\n\013_externalIdB\005\n\003_id\"W\n\024Workflow" + + "TemplateInfo\022\014\n\004name\030\001 \001(\t\022\023\n\013descriptio" + + "n\030\002 \001(\t\022\014\n\004path\030\003 \001(\t\022\016\n\006public\030\004 \001(\010\"\313\001" + + "\n\010BeamInfo\022\030\n\020stableBeamsStart\030\001 \001(\003\022\026\n\016" + + "stableBeamsEnd\030\002 \001(\003\022\022\n\nfillNumber\030\003 \001(\005" + + "\022\031\n\021fillingSchemeName\030\004 \001(\t\022\023\n\013beam1Ener" + + "gy\030\005 \001(\002\022\023\n\013beam2Energy\030\006 \001(\002\022\020\n\010beamTyp" + + "e\030\007 \001(\t\022\"\n\010beamMode\030\010 \001(\0162\020.common.BeamM" + + "ode*\200\003\n\010BeamMode\022\013\n\007UNKNOWN\020\000\022\t\n\005SETUP\020\001" + + "\022\t\n\005ABORT\020\002\022\030\n\024INJECTION_PROBE_BEAM\020\003\022\030\n" + + "\024INJECTION_SETUP_BEAM\020\004\022\032\n\026INJECTION_PHY" + + "SICS_BEAM\020\005\022\020\n\014PREPARE_RAMP\020\006\022\010\n\004RAMP\020\007\022" + + "\014\n\010FLAT_TOP\020\010\022\013\n\007SQUEEZE\020\t\022\n\n\006ADJUST\020\n\022\020" + + "\n\014STABLE_BEAMS\020\013\022\016\n\nLOST_BEAMS\020\014\022\022\n\016UNST" + + "ABLE_BEAMS\020\r\022\025\n\021BEAM_DUMP_WARNING\020\016\022\r\n\tB" + + "EAM_DUMP\020\017\022\r\n\tRAMP_DOWN\020\020\022\013\n\007CYCLING\020\021\022\014" + + "\n\010RECOVERY\020\022\022\023\n\017INJECT_AND_DUMP\020\023\022\026\n\022CIR" + + "CULATE_AND_DUMP\020\024\022\013\n\007NO_BEAM\020\025BS\n\037ch.cer" + + "n.alice.o2.control.commonZ0github.com/Al" + + "iceO2Group/Control/common/protos;pbb\006pro" + + "to3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }); + internal_static_common_User_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_common_User_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_common_User_descriptor, + new java.lang.String[] { "ExternalId", "Id", "Name", }); + internal_static_common_WorkflowTemplateInfo_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_common_WorkflowTemplateInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_common_WorkflowTemplateInfo_descriptor, + new java.lang.String[] { "Name", "Description", "Path", "Public", }); + internal_static_common_BeamInfo_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_common_BeamInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_common_BeamInfo_descriptor, + new java.lang.String[] { "StableBeamsStart", "StableBeamsEnd", "FillNumber", "FillingSchemeName", "Beam1Energy", "Beam2Energy", "BeamType", "BeamMode", }); + descriptor.resolveAllFeaturesImmutable(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/src/alice/dip/kafka/events/Events.java b/src/alice/dip/kafka/events/Events.java new file mode 100644 index 0000000..57aa7bc --- /dev/null +++ b/src/alice/dip/kafka/events/Events.java @@ -0,0 +1,16603 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE +// source: events.proto +// Protobuf Java Version: 4.32.1 + +package alice.dip.kafka.events; + +@com.google.protobuf.Generated +public final class Events extends com.google.protobuf.GeneratedFile { + private Events() {} + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Events.class.getName()); + } + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + /** + * Protobuf enum {@code events.OpStatus} + */ + public enum OpStatus + implements com.google.protobuf.ProtocolMessageEnum { + /** + * NULL = 0; + */ + NULL(0), + /** + * STARTED = 1; + */ + STARTED(1), + /** + * ONGOING = 2; + */ + ONGOING(2), + /** + * DONE_OK = 3; + */ + DONE_OK(3), + /** + * DONE_ERROR = 4; + */ + DONE_ERROR(4), + /** + * DONE_TIMEOUT = 5; + */ + DONE_TIMEOUT(5), + UNRECOGNIZED(-1), + ; + + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + OpStatus.class.getName()); + } + /** + * NULL = 0; + */ + public static final int NULL_VALUE = 0; + /** + * STARTED = 1; + */ + public static final int STARTED_VALUE = 1; + /** + * ONGOING = 2; + */ + public static final int ONGOING_VALUE = 2; + /** + * DONE_OK = 3; + */ + public static final int DONE_OK_VALUE = 3; + /** + * DONE_ERROR = 4; + */ + public static final int DONE_ERROR_VALUE = 4; + /** + * DONE_TIMEOUT = 5; + */ + public static final int DONE_TIMEOUT_VALUE = 5; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static OpStatus valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static OpStatus forNumber(int value) { + switch (value) { + case 0: return NULL; + case 1: return STARTED; + case 2: return ONGOING; + case 3: return DONE_OK; + case 4: return DONE_ERROR; + case 5: return DONE_TIMEOUT; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + OpStatus> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public OpStatus findValueByNumber(int number) { + return OpStatus.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return alice.dip.kafka.events.Events.getDescriptor().getEnumTypes().get(0); + } + + private static final OpStatus[] VALUES = values(); + + public static OpStatus valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private OpStatus(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:events.OpStatus) + } + + public interface Ev_MetaEvent_MesosHeartbeatOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Ev_MetaEvent_MesosHeartbeat) + com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code events.Ev_MetaEvent_MesosHeartbeat} + */ + public static final class Ev_MetaEvent_MesosHeartbeat extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Ev_MetaEvent_MesosHeartbeat) + Ev_MetaEvent_MesosHeartbeatOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Ev_MetaEvent_MesosHeartbeat.class.getName()); + } + // Use Ev_MetaEvent_MesosHeartbeat.newBuilder() to construct. + private Ev_MetaEvent_MesosHeartbeat(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Ev_MetaEvent_MesosHeartbeat() { + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.class, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder.class); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat other = (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) obj; + + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code events.Ev_MetaEvent_MesosHeartbeat} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Ev_MetaEvent_MesosHeartbeat) + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.class, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Ev_MetaEvent_MesosHeartbeat.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat build() { + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat buildPartial() { + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat result = new alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat(this); + onBuilt(); + return result; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) { + return mergeFrom((alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat other) { + if (other == alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + + // @@protoc_insertion_point(builder_scope:events.Ev_MetaEvent_MesosHeartbeat) + } + + // @@protoc_insertion_point(class_scope:events.Ev_MetaEvent_MesosHeartbeat) + private static final alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat(); + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Ev_MetaEvent_MesosHeartbeat parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Ev_MetaEvent_CoreStartOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Ev_MetaEvent_CoreStart) + com.google.protobuf.MessageOrBuilder { + + /** + * string frameworkId = 1; + * @return The frameworkId. + */ + java.lang.String getFrameworkId(); + /** + * string frameworkId = 1; + * @return The bytes for frameworkId. + */ + com.google.protobuf.ByteString + getFrameworkIdBytes(); + } + /** + * Protobuf type {@code events.Ev_MetaEvent_CoreStart} + */ + public static final class Ev_MetaEvent_CoreStart extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Ev_MetaEvent_CoreStart) + Ev_MetaEvent_CoreStartOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Ev_MetaEvent_CoreStart.class.getName()); + } + // Use Ev_MetaEvent_CoreStart.newBuilder() to construct. + private Ev_MetaEvent_CoreStart(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Ev_MetaEvent_CoreStart() { + frameworkId_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_CoreStart_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_CoreStart_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.class, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder.class); + } + + public static final int FRAMEWORKID_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object frameworkId_ = ""; + /** + * string frameworkId = 1; + * @return The frameworkId. + */ + @java.lang.Override + public java.lang.String getFrameworkId() { + java.lang.Object ref = frameworkId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + frameworkId_ = s; + return s; + } + } + /** + * string frameworkId = 1; + * @return The bytes for frameworkId. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getFrameworkIdBytes() { + java.lang.Object ref = frameworkId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + frameworkId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(frameworkId_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, frameworkId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(frameworkId_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, frameworkId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart other = (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) obj; + + if (!getFrameworkId() + .equals(other.getFrameworkId())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + FRAMEWORKID_FIELD_NUMBER; + hash = (53 * hash) + getFrameworkId().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code events.Ev_MetaEvent_CoreStart} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Ev_MetaEvent_CoreStart) + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_CoreStart_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_CoreStart_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.class, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Ev_MetaEvent_CoreStart.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + frameworkId_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_CoreStart_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart build() { + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart buildPartial() { + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart result = new alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.frameworkId_ = frameworkId_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) { + return mergeFrom((alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart other) { + if (other == alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance()) return this; + if (!other.getFrameworkId().isEmpty()) { + frameworkId_ = other.frameworkId_; + bitField0_ |= 0x00000001; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + frameworkId_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object frameworkId_ = ""; + /** + * string frameworkId = 1; + * @return The frameworkId. + */ + public java.lang.String getFrameworkId() { + java.lang.Object ref = frameworkId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + frameworkId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string frameworkId = 1; + * @return The bytes for frameworkId. + */ + public com.google.protobuf.ByteString + getFrameworkIdBytes() { + java.lang.Object ref = frameworkId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + frameworkId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string frameworkId = 1; + * @param value The frameworkId to set. + * @return This builder for chaining. + */ + public Builder setFrameworkId( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + frameworkId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * string frameworkId = 1; + * @return This builder for chaining. + */ + public Builder clearFrameworkId() { + frameworkId_ = getDefaultInstance().getFrameworkId(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * string frameworkId = 1; + * @param value The bytes for frameworkId to set. + * @return This builder for chaining. + */ + public Builder setFrameworkIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + frameworkId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:events.Ev_MetaEvent_CoreStart) + } + + // @@protoc_insertion_point(class_scope:events.Ev_MetaEvent_CoreStart) + private static final alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart(); + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Ev_MetaEvent_CoreStart parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Ev_MetaEvent_FrameworkEventOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Ev_MetaEvent_FrameworkEvent) + com.google.protobuf.MessageOrBuilder { + + /** + * string frameworkId = 1; + * @return The frameworkId. + */ + java.lang.String getFrameworkId(); + /** + * string frameworkId = 1; + * @return The bytes for frameworkId. + */ + com.google.protobuf.ByteString + getFrameworkIdBytes(); + + /** + * string message = 2; + * @return The message. + */ + java.lang.String getMessage(); + /** + * string message = 2; + * @return The bytes for message. + */ + com.google.protobuf.ByteString + getMessageBytes(); + } + /** + * Protobuf type {@code events.Ev_MetaEvent_FrameworkEvent} + */ + public static final class Ev_MetaEvent_FrameworkEvent extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Ev_MetaEvent_FrameworkEvent) + Ev_MetaEvent_FrameworkEventOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Ev_MetaEvent_FrameworkEvent.class.getName()); + } + // Use Ev_MetaEvent_FrameworkEvent.newBuilder() to construct. + private Ev_MetaEvent_FrameworkEvent(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Ev_MetaEvent_FrameworkEvent() { + frameworkId_ = ""; + message_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.class, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder.class); + } + + public static final int FRAMEWORKID_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object frameworkId_ = ""; + /** + * string frameworkId = 1; + * @return The frameworkId. + */ + @java.lang.Override + public java.lang.String getFrameworkId() { + java.lang.Object ref = frameworkId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + frameworkId_ = s; + return s; + } + } + /** + * string frameworkId = 1; + * @return The bytes for frameworkId. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getFrameworkIdBytes() { + java.lang.Object ref = frameworkId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + frameworkId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int MESSAGE_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object message_ = ""; + /** + * string message = 2; + * @return The message. + */ + @java.lang.Override + public java.lang.String getMessage() { + java.lang.Object ref = message_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + message_ = s; + return s; + } + } + /** + * string message = 2; + * @return The bytes for message. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getMessageBytes() { + java.lang.Object ref = message_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + message_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(frameworkId_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, frameworkId_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(message_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, message_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(frameworkId_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, frameworkId_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(message_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, message_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent other = (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) obj; + + if (!getFrameworkId() + .equals(other.getFrameworkId())) return false; + if (!getMessage() + .equals(other.getMessage())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + FRAMEWORKID_FIELD_NUMBER; + hash = (53 * hash) + getFrameworkId().hashCode(); + hash = (37 * hash) + MESSAGE_FIELD_NUMBER; + hash = (53 * hash) + getMessage().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code events.Ev_MetaEvent_FrameworkEvent} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Ev_MetaEvent_FrameworkEvent) + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.class, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Ev_MetaEvent_FrameworkEvent.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + frameworkId_ = ""; + message_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent build() { + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent buildPartial() { + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent result = new alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.frameworkId_ = frameworkId_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.message_ = message_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) { + return mergeFrom((alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent other) { + if (other == alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance()) return this; + if (!other.getFrameworkId().isEmpty()) { + frameworkId_ = other.frameworkId_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (!other.getMessage().isEmpty()) { + message_ = other.message_; + bitField0_ |= 0x00000002; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + frameworkId_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + message_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object frameworkId_ = ""; + /** + * string frameworkId = 1; + * @return The frameworkId. + */ + public java.lang.String getFrameworkId() { + java.lang.Object ref = frameworkId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + frameworkId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string frameworkId = 1; + * @return The bytes for frameworkId. + */ + public com.google.protobuf.ByteString + getFrameworkIdBytes() { + java.lang.Object ref = frameworkId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + frameworkId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string frameworkId = 1; + * @param value The frameworkId to set. + * @return This builder for chaining. + */ + public Builder setFrameworkId( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + frameworkId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * string frameworkId = 1; + * @return This builder for chaining. + */ + public Builder clearFrameworkId() { + frameworkId_ = getDefaultInstance().getFrameworkId(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * string frameworkId = 1; + * @param value The bytes for frameworkId to set. + * @return This builder for chaining. + */ + public Builder setFrameworkIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + frameworkId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object message_ = ""; + /** + * string message = 2; + * @return The message. + */ + public java.lang.String getMessage() { + java.lang.Object ref = message_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + message_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string message = 2; + * @return The bytes for message. + */ + public com.google.protobuf.ByteString + getMessageBytes() { + java.lang.Object ref = message_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + message_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string message = 2; + * @param value The message to set. + * @return This builder for chaining. + */ + public Builder setMessage( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + message_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * string message = 2; + * @return This builder for chaining. + */ + public Builder clearMessage() { + message_ = getDefaultInstance().getMessage(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * string message = 2; + * @param value The bytes for message to set. + * @return This builder for chaining. + */ + public Builder setMessageBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + message_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:events.Ev_MetaEvent_FrameworkEvent) + } + + // @@protoc_insertion_point(class_scope:events.Ev_MetaEvent_FrameworkEvent) + private static final alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent(); + } + + public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Ev_MetaEvent_FrameworkEvent parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Ev_EnvironmentEventOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Ev_EnvironmentEvent) + com.google.protobuf.MessageOrBuilder { + + /** + * string environmentId = 1; + * @return The environmentId. + */ + java.lang.String getEnvironmentId(); + /** + * string environmentId = 1; + * @return The bytes for environmentId. + */ + com.google.protobuf.ByteString + getEnvironmentIdBytes(); + + /** + * string state = 2; + * @return The state. + */ + java.lang.String getState(); + /** + * string state = 2; + * @return The bytes for state. + */ + com.google.protobuf.ByteString + getStateBytes(); + + /** + *
+     * only when the environment is in the running state
+     * 
+ * + * uint32 runNumber = 3; + * @return The runNumber. + */ + int getRunNumber(); + + /** + * string error = 4; + * @return The error. + */ + java.lang.String getError(); + /** + * string error = 4; + * @return The bytes for error. + */ + com.google.protobuf.ByteString + getErrorBytes(); + + /** + *
+     * any additional message concerning the current state or transition
+     * 
+ * + * string message = 5; + * @return The message. + */ + java.lang.String getMessage(); + /** + *
+     * any additional message concerning the current state or transition
+     * 
+ * + * string message = 5; + * @return The bytes for message. + */ + com.google.protobuf.ByteString + getMessageBytes(); + + /** + * string transition = 6; + * @return The transition. + */ + java.lang.String getTransition(); + /** + * string transition = 6; + * @return The bytes for transition. + */ + com.google.protobuf.ByteString + getTransitionBytes(); + + /** + * string transitionStep = 7; + * @return The transitionStep. + */ + java.lang.String getTransitionStep(); + /** + * string transitionStep = 7; + * @return The bytes for transitionStep. + */ + com.google.protobuf.ByteString + getTransitionStepBytes(); + + /** + * .events.OpStatus transitionStatus = 8; + * @return The enum numeric value on the wire for transitionStatus. + */ + int getTransitionStatusValue(); + /** + * .events.OpStatus transitionStatus = 8; + * @return The transitionStatus. + */ + alice.dip.kafka.events.Events.OpStatus getTransitionStatus(); + + /** + *
+     * consolidated environment variables at the root role of the environment
+     * 
+ * + * map<string, string> vars = 9; + */ + int getVarsCount(); + /** + *
+     * consolidated environment variables at the root role of the environment
+     * 
+ * + * map<string, string> vars = 9; + */ + boolean containsVars( + java.lang.String key); + /** + * Use {@link #getVarsMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getVars(); + /** + *
+     * consolidated environment variables at the root role of the environment
+     * 
+ * + * map<string, string> vars = 9; + */ + java.util.Map + getVarsMap(); + /** + *
+     * consolidated environment variables at the root role of the environment
+     * 
+ * + * map<string, string> vars = 9; + */ + /* nullable */ +java.lang.String getVarsOrDefault( + java.lang.String key, + /* nullable */ +java.lang.String defaultValue); + /** + *
+     * consolidated environment variables at the root role of the environment
+     * 
+ * + * map<string, string> vars = 9; + */ + java.lang.String getVarsOrThrow( + java.lang.String key); + + /** + * .common.User lastRequestUser = 10; + * @return Whether the lastRequestUser field is set. + */ + boolean hasLastRequestUser(); + /** + * .common.User lastRequestUser = 10; + * @return The lastRequestUser. + */ + alice.dip.kafka.events.Common.User getLastRequestUser(); + /** + * .common.User lastRequestUser = 10; + */ + alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder(); + + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + * @return Whether the workflowTemplateInfo field is set. + */ + boolean hasWorkflowTemplateInfo(); + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + * @return The workflowTemplateInfo. + */ + alice.dip.kafka.events.Common.WorkflowTemplateInfo getWorkflowTemplateInfo(); + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + */ + alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder getWorkflowTemplateInfoOrBuilder(); + } + /** + * Protobuf type {@code events.Ev_EnvironmentEvent} + */ + public static final class Ev_EnvironmentEvent extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Ev_EnvironmentEvent) + Ev_EnvironmentEventOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Ev_EnvironmentEvent.class.getName()); + } + // Use Ev_EnvironmentEvent.newBuilder() to construct. + private Ev_EnvironmentEvent(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Ev_EnvironmentEvent() { + environmentId_ = ""; + state_ = ""; + error_ = ""; + message_ = ""; + transition_ = ""; + transitionStep_ = ""; + transitionStatus_ = 0; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( + int number) { + switch (number) { + case 9: + return internalGetVars(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_EnvironmentEvent.class, alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder.class); + } + + private int bitField0_; + public static final int ENVIRONMENTID_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object environmentId_ = ""; + /** + * string environmentId = 1; + * @return The environmentId. + */ + @java.lang.Override + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } + } + /** + * string environmentId = 1; + * @return The bytes for environmentId. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int STATE_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object state_ = ""; + /** + * string state = 2; + * @return The state. + */ + @java.lang.Override + public java.lang.String getState() { + java.lang.Object ref = state_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + state_ = s; + return s; + } + } + /** + * string state = 2; + * @return The bytes for state. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getStateBytes() { + java.lang.Object ref = state_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + state_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int RUNNUMBER_FIELD_NUMBER = 3; + private int runNumber_ = 0; + /** + *
+     * only when the environment is in the running state
+     * 
+ * + * uint32 runNumber = 3; + * @return The runNumber. + */ + @java.lang.Override + public int getRunNumber() { + return runNumber_; + } + + public static final int ERROR_FIELD_NUMBER = 4; + @SuppressWarnings("serial") + private volatile java.lang.Object error_ = ""; + /** + * string error = 4; + * @return The error. + */ + @java.lang.Override + public java.lang.String getError() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } + } + /** + * string error = 4; + * @return The bytes for error. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int MESSAGE_FIELD_NUMBER = 5; + @SuppressWarnings("serial") + private volatile java.lang.Object message_ = ""; + /** + *
+     * any additional message concerning the current state or transition
+     * 
+ * + * string message = 5; + * @return The message. + */ + @java.lang.Override + public java.lang.String getMessage() { + java.lang.Object ref = message_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + message_ = s; + return s; + } + } + /** + *
+     * any additional message concerning the current state or transition
+     * 
+ * + * string message = 5; + * @return The bytes for message. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getMessageBytes() { + java.lang.Object ref = message_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + message_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TRANSITION_FIELD_NUMBER = 6; + @SuppressWarnings("serial") + private volatile java.lang.Object transition_ = ""; + /** + * string transition = 6; + * @return The transition. + */ + @java.lang.Override + public java.lang.String getTransition() { + java.lang.Object ref = transition_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + transition_ = s; + return s; + } + } + /** + * string transition = 6; + * @return The bytes for transition. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTransitionBytes() { + java.lang.Object ref = transition_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + transition_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TRANSITIONSTEP_FIELD_NUMBER = 7; + @SuppressWarnings("serial") + private volatile java.lang.Object transitionStep_ = ""; + /** + * string transitionStep = 7; + * @return The transitionStep. + */ + @java.lang.Override + public java.lang.String getTransitionStep() { + java.lang.Object ref = transitionStep_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + transitionStep_ = s; + return s; + } + } + /** + * string transitionStep = 7; + * @return The bytes for transitionStep. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTransitionStepBytes() { + java.lang.Object ref = transitionStep_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + transitionStep_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TRANSITIONSTATUS_FIELD_NUMBER = 8; + private int transitionStatus_ = 0; + /** + * .events.OpStatus transitionStatus = 8; + * @return The enum numeric value on the wire for transitionStatus. + */ + @java.lang.Override public int getTransitionStatusValue() { + return transitionStatus_; + } + /** + * .events.OpStatus transitionStatus = 8; + * @return The transitionStatus. + */ + @java.lang.Override public alice.dip.kafka.events.Events.OpStatus getTransitionStatus() { + alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(transitionStatus_); + return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result; + } + + public static final int VARS_FIELD_NUMBER = 9; + private static final class VarsDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, java.lang.String> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_VarsEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.STRING, + ""); + } + @SuppressWarnings("serial") + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> vars_; + private com.google.protobuf.MapField + internalGetVars() { + if (vars_ == null) { + return com.google.protobuf.MapField.emptyMapField( + VarsDefaultEntryHolder.defaultEntry); + } + return vars_; + } + public int getVarsCount() { + return internalGetVars().getMap().size(); + } + /** + *
+     * consolidated environment variables at the root role of the environment
+     * 
+ * + * map<string, string> vars = 9; + */ + @java.lang.Override + public boolean containsVars( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + return internalGetVars().getMap().containsKey(key); + } + /** + * Use {@link #getVarsMap()} instead. + */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getVars() { + return getVarsMap(); + } + /** + *
+     * consolidated environment variables at the root role of the environment
+     * 
+ * + * map<string, string> vars = 9; + */ + @java.lang.Override + public java.util.Map getVarsMap() { + return internalGetVars().getMap(); + } + /** + *
+     * consolidated environment variables at the root role of the environment
+     * 
+ * + * map<string, string> vars = 9; + */ + @java.lang.Override + public /* nullable */ +java.lang.String getVarsOrDefault( + java.lang.String key, + /* nullable */ +java.lang.String defaultValue) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetVars().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+     * consolidated environment variables at the root role of the environment
+     * 
+ * + * map<string, string> vars = 9; + */ + @java.lang.Override + public java.lang.String getVarsOrThrow( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetVars().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + public static final int LASTREQUESTUSER_FIELD_NUMBER = 10; + private alice.dip.kafka.events.Common.User lastRequestUser_; + /** + * .common.User lastRequestUser = 10; + * @return Whether the lastRequestUser field is set. + */ + @java.lang.Override + public boolean hasLastRequestUser() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * .common.User lastRequestUser = 10; + * @return The lastRequestUser. + */ + @java.lang.Override + public alice.dip.kafka.events.Common.User getLastRequestUser() { + return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_; + } + /** + * .common.User lastRequestUser = 10; + */ + @java.lang.Override + public alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder() { + return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_; + } + + public static final int WORKFLOWTEMPLATEINFO_FIELD_NUMBER = 11; + private alice.dip.kafka.events.Common.WorkflowTemplateInfo workflowTemplateInfo_; + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + * @return Whether the workflowTemplateInfo field is set. + */ + @java.lang.Override + public boolean hasWorkflowTemplateInfo() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + * @return The workflowTemplateInfo. + */ + @java.lang.Override + public alice.dip.kafka.events.Common.WorkflowTemplateInfo getWorkflowTemplateInfo() { + return workflowTemplateInfo_ == null ? alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_; + } + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + */ + @java.lang.Override + public alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder getWorkflowTemplateInfoOrBuilder() { + return workflowTemplateInfo_ == null ? alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, environmentId_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(state_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, state_); + } + if (runNumber_ != 0) { + output.writeUInt32(3, runNumber_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(error_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 4, error_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(message_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 5, message_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(transition_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 6, transition_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(transitionStep_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 7, transitionStep_); + } + if (transitionStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) { + output.writeEnum(8, transitionStatus_); + } + com.google.protobuf.GeneratedMessage + .serializeStringMapTo( + output, + internalGetVars(), + VarsDefaultEntryHolder.defaultEntry, + 9); + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(10, getLastRequestUser()); + } + if (((bitField0_ & 0x00000002) != 0)) { + output.writeMessage(11, getWorkflowTemplateInfo()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, environmentId_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(state_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, state_); + } + if (runNumber_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(3, runNumber_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(error_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(4, error_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(message_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(5, message_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(transition_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(6, transition_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(transitionStep_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(7, transitionStep_); + } + if (transitionStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(8, transitionStatus_); + } + for (java.util.Map.Entry entry + : internalGetVars().getMap().entrySet()) { + com.google.protobuf.MapEntry + vars__ = VarsDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(9, vars__); + } + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, getLastRequestUser()); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(11, getWorkflowTemplateInfo()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Ev_EnvironmentEvent)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Ev_EnvironmentEvent other = (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) obj; + + if (!getEnvironmentId() + .equals(other.getEnvironmentId())) return false; + if (!getState() + .equals(other.getState())) return false; + if (getRunNumber() + != other.getRunNumber()) return false; + if (!getError() + .equals(other.getError())) return false; + if (!getMessage() + .equals(other.getMessage())) return false; + if (!getTransition() + .equals(other.getTransition())) return false; + if (!getTransitionStep() + .equals(other.getTransitionStep())) return false; + if (transitionStatus_ != other.transitionStatus_) return false; + if (!internalGetVars().equals( + other.internalGetVars())) return false; + if (hasLastRequestUser() != other.hasLastRequestUser()) return false; + if (hasLastRequestUser()) { + if (!getLastRequestUser() + .equals(other.getLastRequestUser())) return false; + } + if (hasWorkflowTemplateInfo() != other.hasWorkflowTemplateInfo()) return false; + if (hasWorkflowTemplateInfo()) { + if (!getWorkflowTemplateInfo() + .equals(other.getWorkflowTemplateInfo())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + ENVIRONMENTID_FIELD_NUMBER; + hash = (53 * hash) + getEnvironmentId().hashCode(); + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + getState().hashCode(); + hash = (37 * hash) + RUNNUMBER_FIELD_NUMBER; + hash = (53 * hash) + getRunNumber(); + hash = (37 * hash) + ERROR_FIELD_NUMBER; + hash = (53 * hash) + getError().hashCode(); + hash = (37 * hash) + MESSAGE_FIELD_NUMBER; + hash = (53 * hash) + getMessage().hashCode(); + hash = (37 * hash) + TRANSITION_FIELD_NUMBER; + hash = (53 * hash) + getTransition().hashCode(); + hash = (37 * hash) + TRANSITIONSTEP_FIELD_NUMBER; + hash = (53 * hash) + getTransitionStep().hashCode(); + hash = (37 * hash) + TRANSITIONSTATUS_FIELD_NUMBER; + hash = (53 * hash) + transitionStatus_; + if (!internalGetVars().getMap().isEmpty()) { + hash = (37 * hash) + VARS_FIELD_NUMBER; + hash = (53 * hash) + internalGetVars().hashCode(); + } + if (hasLastRequestUser()) { + hash = (37 * hash) + LASTREQUESTUSER_FIELD_NUMBER; + hash = (53 * hash) + getLastRequestUser().hashCode(); + } + if (hasWorkflowTemplateInfo()) { + hash = (37 * hash) + WORKFLOWTEMPLATEINFO_FIELD_NUMBER; + hash = (53 * hash) + getWorkflowTemplateInfo().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_EnvironmentEvent prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code events.Ev_EnvironmentEvent} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Ev_EnvironmentEvent) + alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( + int number) { + switch (number) { + case 9: + return internalGetVars(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection( + int number) { + switch (number) { + case 9: + return internalGetMutableVars(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_EnvironmentEvent.class, alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Ev_EnvironmentEvent.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage + .alwaysUseFieldBuilders) { + internalGetLastRequestUserFieldBuilder(); + internalGetWorkflowTemplateInfoFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + environmentId_ = ""; + state_ = ""; + runNumber_ = 0; + error_ = ""; + message_ = ""; + transition_ = ""; + transitionStep_ = ""; + transitionStatus_ = 0; + internalGetMutableVars().clear(); + lastRequestUser_ = null; + if (lastRequestUserBuilder_ != null) { + lastRequestUserBuilder_.dispose(); + lastRequestUserBuilder_ = null; + } + workflowTemplateInfo_ = null; + if (workflowTemplateInfoBuilder_ != null) { + workflowTemplateInfoBuilder_.dispose(); + workflowTemplateInfoBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_EnvironmentEvent getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_EnvironmentEvent build() { + alice.dip.kafka.events.Events.Ev_EnvironmentEvent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_EnvironmentEvent buildPartial() { + alice.dip.kafka.events.Events.Ev_EnvironmentEvent result = new alice.dip.kafka.events.Events.Ev_EnvironmentEvent(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Events.Ev_EnvironmentEvent result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.environmentId_ = environmentId_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.state_ = state_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.runNumber_ = runNumber_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.error_ = error_; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.message_ = message_; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.transition_ = transition_; + } + if (((from_bitField0_ & 0x00000040) != 0)) { + result.transitionStep_ = transitionStep_; + } + if (((from_bitField0_ & 0x00000080) != 0)) { + result.transitionStatus_ = transitionStatus_; + } + if (((from_bitField0_ & 0x00000100) != 0)) { + result.vars_ = internalGetVars(); + result.vars_.makeImmutable(); + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000200) != 0)) { + result.lastRequestUser_ = lastRequestUserBuilder_ == null + ? lastRequestUser_ + : lastRequestUserBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000400) != 0)) { + result.workflowTemplateInfo_ = workflowTemplateInfoBuilder_ == null + ? workflowTemplateInfo_ + : workflowTemplateInfoBuilder_.build(); + to_bitField0_ |= 0x00000002; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Ev_EnvironmentEvent) { + return mergeFrom((alice.dip.kafka.events.Events.Ev_EnvironmentEvent)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_EnvironmentEvent other) { + if (other == alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance()) return this; + if (!other.getEnvironmentId().isEmpty()) { + environmentId_ = other.environmentId_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (!other.getState().isEmpty()) { + state_ = other.state_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (other.getRunNumber() != 0) { + setRunNumber(other.getRunNumber()); + } + if (!other.getError().isEmpty()) { + error_ = other.error_; + bitField0_ |= 0x00000008; + onChanged(); + } + if (!other.getMessage().isEmpty()) { + message_ = other.message_; + bitField0_ |= 0x00000010; + onChanged(); + } + if (!other.getTransition().isEmpty()) { + transition_ = other.transition_; + bitField0_ |= 0x00000020; + onChanged(); + } + if (!other.getTransitionStep().isEmpty()) { + transitionStep_ = other.transitionStep_; + bitField0_ |= 0x00000040; + onChanged(); + } + if (other.transitionStatus_ != 0) { + setTransitionStatusValue(other.getTransitionStatusValue()); + } + internalGetMutableVars().mergeFrom( + other.internalGetVars()); + bitField0_ |= 0x00000100; + if (other.hasLastRequestUser()) { + mergeLastRequestUser(other.getLastRequestUser()); + } + if (other.hasWorkflowTemplateInfo()) { + mergeWorkflowTemplateInfo(other.getWorkflowTemplateInfo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + environmentId_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + state_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 24: { + runNumber_ = input.readUInt32(); + bitField0_ |= 0x00000004; + break; + } // case 24 + case 34: { + error_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000008; + break; + } // case 34 + case 42: { + message_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000010; + break; + } // case 42 + case 50: { + transition_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000020; + break; + } // case 50 + case 58: { + transitionStep_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000040; + break; + } // case 58 + case 64: { + transitionStatus_ = input.readEnum(); + bitField0_ |= 0x00000080; + break; + } // case 64 + case 74: { + com.google.protobuf.MapEntry + vars__ = input.readMessage( + VarsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + internalGetMutableVars().getMutableMap().put( + vars__.getKey(), vars__.getValue()); + bitField0_ |= 0x00000100; + break; + } // case 74 + case 82: { + input.readMessage( + internalGetLastRequestUserFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000200; + break; + } // case 82 + case 90: { + input.readMessage( + internalGetWorkflowTemplateInfoFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000400; + break; + } // case 90 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object environmentId_ = ""; + /** + * string environmentId = 1; + * @return The environmentId. + */ + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string environmentId = 1; + * @return The bytes for environmentId. + */ + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string environmentId = 1; + * @param value The environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentId( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + environmentId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * string environmentId = 1; + * @return This builder for chaining. + */ + public Builder clearEnvironmentId() { + environmentId_ = getDefaultInstance().getEnvironmentId(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * string environmentId = 1; + * @param value The bytes for environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + environmentId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object state_ = ""; + /** + * string state = 2; + * @return The state. + */ + public java.lang.String getState() { + java.lang.Object ref = state_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + state_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string state = 2; + * @return The bytes for state. + */ + public com.google.protobuf.ByteString + getStateBytes() { + java.lang.Object ref = state_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + state_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string state = 2; + * @param value The state to set. + * @return This builder for chaining. + */ + public Builder setState( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + state_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * string state = 2; + * @return This builder for chaining. + */ + public Builder clearState() { + state_ = getDefaultInstance().getState(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * string state = 2; + * @param value The bytes for state to set. + * @return This builder for chaining. + */ + public Builder setStateBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + state_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private int runNumber_ ; + /** + *
+       * only when the environment is in the running state
+       * 
+ * + * uint32 runNumber = 3; + * @return The runNumber. + */ + @java.lang.Override + public int getRunNumber() { + return runNumber_; + } + /** + *
+       * only when the environment is in the running state
+       * 
+ * + * uint32 runNumber = 3; + * @param value The runNumber to set. + * @return This builder for chaining. + */ + public Builder setRunNumber(int value) { + + runNumber_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * only when the environment is in the running state
+       * 
+ * + * uint32 runNumber = 3; + * @return This builder for chaining. + */ + public Builder clearRunNumber() { + bitField0_ = (bitField0_ & ~0x00000004); + runNumber_ = 0; + onChanged(); + return this; + } + + private java.lang.Object error_ = ""; + /** + * string error = 4; + * @return The error. + */ + public java.lang.String getError() { + java.lang.Object ref = error_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string error = 4; + * @return The bytes for error. + */ + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string error = 4; + * @param value The error to set. + * @return This builder for chaining. + */ + public Builder setError( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + error_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * string error = 4; + * @return This builder for chaining. + */ + public Builder clearError() { + error_ = getDefaultInstance().getError(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + /** + * string error = 4; + * @param value The bytes for error to set. + * @return This builder for chaining. + */ + public Builder setErrorBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + error_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + + private java.lang.Object message_ = ""; + /** + *
+       * any additional message concerning the current state or transition
+       * 
+ * + * string message = 5; + * @return The message. + */ + public java.lang.String getMessage() { + java.lang.Object ref = message_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + message_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * any additional message concerning the current state or transition
+       * 
+ * + * string message = 5; + * @return The bytes for message. + */ + public com.google.protobuf.ByteString + getMessageBytes() { + java.lang.Object ref = message_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + message_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * any additional message concerning the current state or transition
+       * 
+ * + * string message = 5; + * @param value The message to set. + * @return This builder for chaining. + */ + public Builder setMessage( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + message_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + *
+       * any additional message concerning the current state or transition
+       * 
+ * + * string message = 5; + * @return This builder for chaining. + */ + public Builder clearMessage() { + message_ = getDefaultInstance().getMessage(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + return this; + } + /** + *
+       * any additional message concerning the current state or transition
+       * 
+ * + * string message = 5; + * @param value The bytes for message to set. + * @return This builder for chaining. + */ + public Builder setMessageBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + message_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + + private java.lang.Object transition_ = ""; + /** + * string transition = 6; + * @return The transition. + */ + public java.lang.String getTransition() { + java.lang.Object ref = transition_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + transition_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string transition = 6; + * @return The bytes for transition. + */ + public com.google.protobuf.ByteString + getTransitionBytes() { + java.lang.Object ref = transition_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + transition_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string transition = 6; + * @param value The transition to set. + * @return This builder for chaining. + */ + public Builder setTransition( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + transition_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + * string transition = 6; + * @return This builder for chaining. + */ + public Builder clearTransition() { + transition_ = getDefaultInstance().getTransition(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + return this; + } + /** + * string transition = 6; + * @param value The bytes for transition to set. + * @return This builder for chaining. + */ + public Builder setTransitionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + transition_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + + private java.lang.Object transitionStep_ = ""; + /** + * string transitionStep = 7; + * @return The transitionStep. + */ + public java.lang.String getTransitionStep() { + java.lang.Object ref = transitionStep_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + transitionStep_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string transitionStep = 7; + * @return The bytes for transitionStep. + */ + public com.google.protobuf.ByteString + getTransitionStepBytes() { + java.lang.Object ref = transitionStep_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + transitionStep_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string transitionStep = 7; + * @param value The transitionStep to set. + * @return This builder for chaining. + */ + public Builder setTransitionStep( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + transitionStep_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * string transitionStep = 7; + * @return This builder for chaining. + */ + public Builder clearTransitionStep() { + transitionStep_ = getDefaultInstance().getTransitionStep(); + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + return this; + } + /** + * string transitionStep = 7; + * @param value The bytes for transitionStep to set. + * @return This builder for chaining. + */ + public Builder setTransitionStepBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + transitionStep_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + + private int transitionStatus_ = 0; + /** + * .events.OpStatus transitionStatus = 8; + * @return The enum numeric value on the wire for transitionStatus. + */ + @java.lang.Override public int getTransitionStatusValue() { + return transitionStatus_; + } + /** + * .events.OpStatus transitionStatus = 8; + * @param value The enum numeric value on the wire for transitionStatus to set. + * @return This builder for chaining. + */ + public Builder setTransitionStatusValue(int value) { + transitionStatus_ = value; + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + * .events.OpStatus transitionStatus = 8; + * @return The transitionStatus. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.OpStatus getTransitionStatus() { + alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(transitionStatus_); + return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result; + } + /** + * .events.OpStatus transitionStatus = 8; + * @param value The transitionStatus to set. + * @return This builder for chaining. + */ + public Builder setTransitionStatus(alice.dip.kafka.events.Events.OpStatus value) { + if (value == null) { throw new NullPointerException(); } + bitField0_ |= 0x00000080; + transitionStatus_ = value.getNumber(); + onChanged(); + return this; + } + /** + * .events.OpStatus transitionStatus = 8; + * @return This builder for chaining. + */ + public Builder clearTransitionStatus() { + bitField0_ = (bitField0_ & ~0x00000080); + transitionStatus_ = 0; + onChanged(); + return this; + } + + private com.google.protobuf.MapField< + java.lang.String, java.lang.String> vars_; + private com.google.protobuf.MapField + internalGetVars() { + if (vars_ == null) { + return com.google.protobuf.MapField.emptyMapField( + VarsDefaultEntryHolder.defaultEntry); + } + return vars_; + } + private com.google.protobuf.MapField + internalGetMutableVars() { + if (vars_ == null) { + vars_ = com.google.protobuf.MapField.newMapField( + VarsDefaultEntryHolder.defaultEntry); + } + if (!vars_.isMutable()) { + vars_ = vars_.copy(); + } + bitField0_ |= 0x00000100; + onChanged(); + return vars_; + } + public int getVarsCount() { + return internalGetVars().getMap().size(); + } + /** + *
+       * consolidated environment variables at the root role of the environment
+       * 
+ * + * map<string, string> vars = 9; + */ + @java.lang.Override + public boolean containsVars( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + return internalGetVars().getMap().containsKey(key); + } + /** + * Use {@link #getVarsMap()} instead. + */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getVars() { + return getVarsMap(); + } + /** + *
+       * consolidated environment variables at the root role of the environment
+       * 
+ * + * map<string, string> vars = 9; + */ + @java.lang.Override + public java.util.Map getVarsMap() { + return internalGetVars().getMap(); + } + /** + *
+       * consolidated environment variables at the root role of the environment
+       * 
+ * + * map<string, string> vars = 9; + */ + @java.lang.Override + public /* nullable */ +java.lang.String getVarsOrDefault( + java.lang.String key, + /* nullable */ +java.lang.String defaultValue) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetVars().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + *
+       * consolidated environment variables at the root role of the environment
+       * 
+ * + * map<string, string> vars = 9; + */ + @java.lang.Override + public java.lang.String getVarsOrThrow( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetVars().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + public Builder clearVars() { + bitField0_ = (bitField0_ & ~0x00000100); + internalGetMutableVars().getMutableMap() + .clear(); + return this; + } + /** + *
+       * consolidated environment variables at the root role of the environment
+       * 
+ * + * map<string, string> vars = 9; + */ + public Builder removeVars( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + internalGetMutableVars().getMutableMap() + .remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableVars() { + bitField0_ |= 0x00000100; + return internalGetMutableVars().getMutableMap(); + } + /** + *
+       * consolidated environment variables at the root role of the environment
+       * 
+ * + * map<string, string> vars = 9; + */ + public Builder putVars( + java.lang.String key, + java.lang.String value) { + if (key == null) { throw new NullPointerException("map key"); } + if (value == null) { throw new NullPointerException("map value"); } + internalGetMutableVars().getMutableMap() + .put(key, value); + bitField0_ |= 0x00000100; + return this; + } + /** + *
+       * consolidated environment variables at the root role of the environment
+       * 
+ * + * map<string, string> vars = 9; + */ + public Builder putAllVars( + java.util.Map values) { + internalGetMutableVars().getMutableMap() + .putAll(values); + bitField0_ |= 0x00000100; + return this; + } + + private alice.dip.kafka.events.Common.User lastRequestUser_; + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder> lastRequestUserBuilder_; + /** + * .common.User lastRequestUser = 10; + * @return Whether the lastRequestUser field is set. + */ + public boolean hasLastRequestUser() { + return ((bitField0_ & 0x00000200) != 0); + } + /** + * .common.User lastRequestUser = 10; + * @return The lastRequestUser. + */ + public alice.dip.kafka.events.Common.User getLastRequestUser() { + if (lastRequestUserBuilder_ == null) { + return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_; + } else { + return lastRequestUserBuilder_.getMessage(); + } + } + /** + * .common.User lastRequestUser = 10; + */ + public Builder setLastRequestUser(alice.dip.kafka.events.Common.User value) { + if (lastRequestUserBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + lastRequestUser_ = value; + } else { + lastRequestUserBuilder_.setMessage(value); + } + bitField0_ |= 0x00000200; + onChanged(); + return this; + } + /** + * .common.User lastRequestUser = 10; + */ + public Builder setLastRequestUser( + alice.dip.kafka.events.Common.User.Builder builderForValue) { + if (lastRequestUserBuilder_ == null) { + lastRequestUser_ = builderForValue.build(); + } else { + lastRequestUserBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000200; + onChanged(); + return this; + } + /** + * .common.User lastRequestUser = 10; + */ + public Builder mergeLastRequestUser(alice.dip.kafka.events.Common.User value) { + if (lastRequestUserBuilder_ == null) { + if (((bitField0_ & 0x00000200) != 0) && + lastRequestUser_ != null && + lastRequestUser_ != alice.dip.kafka.events.Common.User.getDefaultInstance()) { + getLastRequestUserBuilder().mergeFrom(value); + } else { + lastRequestUser_ = value; + } + } else { + lastRequestUserBuilder_.mergeFrom(value); + } + if (lastRequestUser_ != null) { + bitField0_ |= 0x00000200; + onChanged(); + } + return this; + } + /** + * .common.User lastRequestUser = 10; + */ + public Builder clearLastRequestUser() { + bitField0_ = (bitField0_ & ~0x00000200); + lastRequestUser_ = null; + if (lastRequestUserBuilder_ != null) { + lastRequestUserBuilder_.dispose(); + lastRequestUserBuilder_ = null; + } + onChanged(); + return this; + } + /** + * .common.User lastRequestUser = 10; + */ + public alice.dip.kafka.events.Common.User.Builder getLastRequestUserBuilder() { + bitField0_ |= 0x00000200; + onChanged(); + return internalGetLastRequestUserFieldBuilder().getBuilder(); + } + /** + * .common.User lastRequestUser = 10; + */ + public alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder() { + if (lastRequestUserBuilder_ != null) { + return lastRequestUserBuilder_.getMessageOrBuilder(); + } else { + return lastRequestUser_ == null ? + alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_; + } + } + /** + * .common.User lastRequestUser = 10; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder> + internalGetLastRequestUserFieldBuilder() { + if (lastRequestUserBuilder_ == null) { + lastRequestUserBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder>( + getLastRequestUser(), + getParentForChildren(), + isClean()); + lastRequestUser_ = null; + } + return lastRequestUserBuilder_; + } + + private alice.dip.kafka.events.Common.WorkflowTemplateInfo workflowTemplateInfo_; + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.WorkflowTemplateInfo, alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder, alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder> workflowTemplateInfoBuilder_; + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + * @return Whether the workflowTemplateInfo field is set. + */ + public boolean hasWorkflowTemplateInfo() { + return ((bitField0_ & 0x00000400) != 0); + } + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + * @return The workflowTemplateInfo. + */ + public alice.dip.kafka.events.Common.WorkflowTemplateInfo getWorkflowTemplateInfo() { + if (workflowTemplateInfoBuilder_ == null) { + return workflowTemplateInfo_ == null ? alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_; + } else { + return workflowTemplateInfoBuilder_.getMessage(); + } + } + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + */ + public Builder setWorkflowTemplateInfo(alice.dip.kafka.events.Common.WorkflowTemplateInfo value) { + if (workflowTemplateInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + workflowTemplateInfo_ = value; + } else { + workflowTemplateInfoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000400; + onChanged(); + return this; + } + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + */ + public Builder setWorkflowTemplateInfo( + alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder builderForValue) { + if (workflowTemplateInfoBuilder_ == null) { + workflowTemplateInfo_ = builderForValue.build(); + } else { + workflowTemplateInfoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000400; + onChanged(); + return this; + } + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + */ + public Builder mergeWorkflowTemplateInfo(alice.dip.kafka.events.Common.WorkflowTemplateInfo value) { + if (workflowTemplateInfoBuilder_ == null) { + if (((bitField0_ & 0x00000400) != 0) && + workflowTemplateInfo_ != null && + workflowTemplateInfo_ != alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance()) { + getWorkflowTemplateInfoBuilder().mergeFrom(value); + } else { + workflowTemplateInfo_ = value; + } + } else { + workflowTemplateInfoBuilder_.mergeFrom(value); + } + if (workflowTemplateInfo_ != null) { + bitField0_ |= 0x00000400; + onChanged(); + } + return this; + } + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + */ + public Builder clearWorkflowTemplateInfo() { + bitField0_ = (bitField0_ & ~0x00000400); + workflowTemplateInfo_ = null; + if (workflowTemplateInfoBuilder_ != null) { + workflowTemplateInfoBuilder_.dispose(); + workflowTemplateInfoBuilder_ = null; + } + onChanged(); + return this; + } + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + */ + public alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder getWorkflowTemplateInfoBuilder() { + bitField0_ |= 0x00000400; + onChanged(); + return internalGetWorkflowTemplateInfoFieldBuilder().getBuilder(); + } + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + */ + public alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder getWorkflowTemplateInfoOrBuilder() { + if (workflowTemplateInfoBuilder_ != null) { + return workflowTemplateInfoBuilder_.getMessageOrBuilder(); + } else { + return workflowTemplateInfo_ == null ? + alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_; + } + } + /** + * .common.WorkflowTemplateInfo workflowTemplateInfo = 11; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.WorkflowTemplateInfo, alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder, alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder> + internalGetWorkflowTemplateInfoFieldBuilder() { + if (workflowTemplateInfoBuilder_ == null) { + workflowTemplateInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.WorkflowTemplateInfo, alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder, alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder>( + getWorkflowTemplateInfo(), + getParentForChildren(), + isClean()); + workflowTemplateInfo_ = null; + } + return workflowTemplateInfoBuilder_; + } + + // @@protoc_insertion_point(builder_scope:events.Ev_EnvironmentEvent) + } + + // @@protoc_insertion_point(class_scope:events.Ev_EnvironmentEvent) + private static final alice.dip.kafka.events.Events.Ev_EnvironmentEvent DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_EnvironmentEvent(); + } + + public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Ev_EnvironmentEvent parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_EnvironmentEvent getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface TraitsOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Traits) + com.google.protobuf.MessageOrBuilder { + + /** + * string trigger = 1; + * @return The trigger. + */ + java.lang.String getTrigger(); + /** + * string trigger = 1; + * @return The bytes for trigger. + */ + com.google.protobuf.ByteString + getTriggerBytes(); + + /** + * string await = 2; + * @return The await. + */ + java.lang.String getAwait(); + /** + * string await = 2; + * @return The bytes for await. + */ + com.google.protobuf.ByteString + getAwaitBytes(); + + /** + * string timeout = 3; + * @return The timeout. + */ + java.lang.String getTimeout(); + /** + * string timeout = 3; + * @return The bytes for timeout. + */ + com.google.protobuf.ByteString + getTimeoutBytes(); + + /** + * bool critical = 4; + * @return The critical. + */ + boolean getCritical(); + } + /** + * Protobuf type {@code events.Traits} + */ + public static final class Traits extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Traits) + TraitsOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Traits.class.getName()); + } + // Use Traits.newBuilder() to construct. + private Traits(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Traits() { + trigger_ = ""; + await_ = ""; + timeout_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Traits_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Traits_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Traits.class, alice.dip.kafka.events.Events.Traits.Builder.class); + } + + public static final int TRIGGER_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object trigger_ = ""; + /** + * string trigger = 1; + * @return The trigger. + */ + @java.lang.Override + public java.lang.String getTrigger() { + java.lang.Object ref = trigger_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + trigger_ = s; + return s; + } + } + /** + * string trigger = 1; + * @return The bytes for trigger. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTriggerBytes() { + java.lang.Object ref = trigger_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + trigger_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int AWAIT_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object await_ = ""; + /** + * string await = 2; + * @return The await. + */ + @java.lang.Override + public java.lang.String getAwait() { + java.lang.Object ref = await_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + await_ = s; + return s; + } + } + /** + * string await = 2; + * @return The bytes for await. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getAwaitBytes() { + java.lang.Object ref = await_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + await_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TIMEOUT_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object timeout_ = ""; + /** + * string timeout = 3; + * @return The timeout. + */ + @java.lang.Override + public java.lang.String getTimeout() { + java.lang.Object ref = timeout_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + timeout_ = s; + return s; + } + } + /** + * string timeout = 3; + * @return The bytes for timeout. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTimeoutBytes() { + java.lang.Object ref = timeout_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + timeout_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CRITICAL_FIELD_NUMBER = 4; + private boolean critical_ = false; + /** + * bool critical = 4; + * @return The critical. + */ + @java.lang.Override + public boolean getCritical() { + return critical_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(trigger_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, trigger_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(await_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, await_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(timeout_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 3, timeout_); + } + if (critical_ != false) { + output.writeBool(4, critical_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(trigger_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, trigger_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(await_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, await_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(timeout_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, timeout_); + } + if (critical_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, critical_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Traits)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Traits other = (alice.dip.kafka.events.Events.Traits) obj; + + if (!getTrigger() + .equals(other.getTrigger())) return false; + if (!getAwait() + .equals(other.getAwait())) return false; + if (!getTimeout() + .equals(other.getTimeout())) return false; + if (getCritical() + != other.getCritical()) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TRIGGER_FIELD_NUMBER; + hash = (53 * hash) + getTrigger().hashCode(); + hash = (37 * hash) + AWAIT_FIELD_NUMBER; + hash = (53 * hash) + getAwait().hashCode(); + hash = (37 * hash) + TIMEOUT_FIELD_NUMBER; + hash = (53 * hash) + getTimeout().hashCode(); + hash = (37 * hash) + CRITICAL_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getCritical()); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Traits parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Traits parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Traits parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Traits parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Traits parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Traits parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Traits parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Traits parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Traits parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Traits parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Traits parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Traits parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Traits prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code events.Traits} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Traits) + alice.dip.kafka.events.Events.TraitsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Traits_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Traits_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Traits.class, alice.dip.kafka.events.Events.Traits.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Traits.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + trigger_ = ""; + await_ = ""; + timeout_ = ""; + critical_ = false; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Traits_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Traits getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Traits.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Traits build() { + alice.dip.kafka.events.Events.Traits result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Traits buildPartial() { + alice.dip.kafka.events.Events.Traits result = new alice.dip.kafka.events.Events.Traits(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Events.Traits result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.trigger_ = trigger_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.await_ = await_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.timeout_ = timeout_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.critical_ = critical_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Traits) { + return mergeFrom((alice.dip.kafka.events.Events.Traits)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Traits other) { + if (other == alice.dip.kafka.events.Events.Traits.getDefaultInstance()) return this; + if (!other.getTrigger().isEmpty()) { + trigger_ = other.trigger_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (!other.getAwait().isEmpty()) { + await_ = other.await_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (!other.getTimeout().isEmpty()) { + timeout_ = other.timeout_; + bitField0_ |= 0x00000004; + onChanged(); + } + if (other.getCritical() != false) { + setCritical(other.getCritical()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + trigger_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + await_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + timeout_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 32: { + critical_ = input.readBool(); + bitField0_ |= 0x00000008; + break; + } // case 32 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object trigger_ = ""; + /** + * string trigger = 1; + * @return The trigger. + */ + public java.lang.String getTrigger() { + java.lang.Object ref = trigger_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + trigger_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string trigger = 1; + * @return The bytes for trigger. + */ + public com.google.protobuf.ByteString + getTriggerBytes() { + java.lang.Object ref = trigger_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + trigger_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string trigger = 1; + * @param value The trigger to set. + * @return This builder for chaining. + */ + public Builder setTrigger( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + trigger_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * string trigger = 1; + * @return This builder for chaining. + */ + public Builder clearTrigger() { + trigger_ = getDefaultInstance().getTrigger(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * string trigger = 1; + * @param value The bytes for trigger to set. + * @return This builder for chaining. + */ + public Builder setTriggerBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + trigger_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object await_ = ""; + /** + * string await = 2; + * @return The await. + */ + public java.lang.String getAwait() { + java.lang.Object ref = await_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + await_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string await = 2; + * @return The bytes for await. + */ + public com.google.protobuf.ByteString + getAwaitBytes() { + java.lang.Object ref = await_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + await_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string await = 2; + * @param value The await to set. + * @return This builder for chaining. + */ + public Builder setAwait( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + await_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * string await = 2; + * @return This builder for chaining. + */ + public Builder clearAwait() { + await_ = getDefaultInstance().getAwait(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * string await = 2; + * @param value The bytes for await to set. + * @return This builder for chaining. + */ + public Builder setAwaitBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + await_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private java.lang.Object timeout_ = ""; + /** + * string timeout = 3; + * @return The timeout. + */ + public java.lang.String getTimeout() { + java.lang.Object ref = timeout_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + timeout_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string timeout = 3; + * @return The bytes for timeout. + */ + public com.google.protobuf.ByteString + getTimeoutBytes() { + java.lang.Object ref = timeout_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + timeout_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string timeout = 3; + * @param value The timeout to set. + * @return This builder for chaining. + */ + public Builder setTimeout( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + timeout_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * string timeout = 3; + * @return This builder for chaining. + */ + public Builder clearTimeout() { + timeout_ = getDefaultInstance().getTimeout(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * string timeout = 3; + * @param value The bytes for timeout to set. + * @return This builder for chaining. + */ + public Builder setTimeoutBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + timeout_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + private boolean critical_ ; + /** + * bool critical = 4; + * @return The critical. + */ + @java.lang.Override + public boolean getCritical() { + return critical_; + } + /** + * bool critical = 4; + * @param value The critical to set. + * @return This builder for chaining. + */ + public Builder setCritical(boolean value) { + + critical_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * bool critical = 4; + * @return This builder for chaining. + */ + public Builder clearCritical() { + bitField0_ = (bitField0_ & ~0x00000008); + critical_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:events.Traits) + } + + // @@protoc_insertion_point(class_scope:events.Traits) + private static final alice.dip.kafka.events.Events.Traits DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Traits(); + } + + public static alice.dip.kafka.events.Events.Traits getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Traits parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Traits getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Ev_TaskEventOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Ev_TaskEvent) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * task name, based on the name of the task class
+     * 
+ * + * string name = 1; + * @return The name. + */ + java.lang.String getName(); + /** + *
+     * task name, based on the name of the task class
+     * 
+ * + * string name = 1; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+     * task id, unique
+     * 
+ * + * string taskid = 2; + * @return The taskid. + */ + java.lang.String getTaskid(); + /** + *
+     * task id, unique
+     * 
+ * + * string taskid = 2; + * @return The bytes for taskid. + */ + com.google.protobuf.ByteString + getTaskidBytes(); + + /** + *
+     * state machine state for this task
+     * 
+ * + * string state = 3; + * @return The state. + */ + java.lang.String getState(); + /** + *
+     * state machine state for this task
+     * 
+ * + * string state = 3; + * @return The bytes for state. + */ + com.google.protobuf.ByteString + getStateBytes(); + + /** + *
+     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+     * 
+ * + * string status = 4; + * @return The status. + */ + java.lang.String getStatus(); + /** + *
+     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+     * 
+ * + * string status = 4; + * @return The bytes for status. + */ + com.google.protobuf.ByteString + getStatusBytes(); + + /** + * string hostname = 5; + * @return The hostname. + */ + java.lang.String getHostname(); + /** + * string hostname = 5; + * @return The bytes for hostname. + */ + com.google.protobuf.ByteString + getHostnameBytes(); + + /** + *
+     * name of the task class from which this task was spawned
+     * 
+ * + * string className = 6; + * @return The className. + */ + java.lang.String getClassName(); + /** + *
+     * name of the task class from which this task was spawned
+     * 
+ * + * string className = 6; + * @return The bytes for className. + */ + com.google.protobuf.ByteString + getClassNameBytes(); + + /** + * .events.Traits traits = 7; + * @return Whether the traits field is set. + */ + boolean hasTraits(); + /** + * .events.Traits traits = 7; + * @return The traits. + */ + alice.dip.kafka.events.Events.Traits getTraits(); + /** + * .events.Traits traits = 7; + */ + alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder(); + + /** + * string environmentId = 8; + * @return The environmentId. + */ + java.lang.String getEnvironmentId(); + /** + * string environmentId = 8; + * @return The bytes for environmentId. + */ + com.google.protobuf.ByteString + getEnvironmentIdBytes(); + + /** + *
+     * path to the parent taskRole of this task within the environment
+     * 
+ * + * string path = 9; + * @return The path. + */ + java.lang.String getPath(); + /** + *
+     * path to the parent taskRole of this task within the environment
+     * 
+ * + * string path = 9; + * @return The bytes for path. + */ + com.google.protobuf.ByteString + getPathBytes(); + } + /** + * Protobuf type {@code events.Ev_TaskEvent} + */ + public static final class Ev_TaskEvent extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Ev_TaskEvent) + Ev_TaskEventOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Ev_TaskEvent.class.getName()); + } + // Use Ev_TaskEvent.newBuilder() to construct. + private Ev_TaskEvent(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Ev_TaskEvent() { + name_ = ""; + taskid_ = ""; + state_ = ""; + status_ = ""; + hostname_ = ""; + className_ = ""; + environmentId_ = ""; + path_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_TaskEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_TaskEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_TaskEvent.class, alice.dip.kafka.events.Events.Ev_TaskEvent.Builder.class); + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + *
+     * task name, based on the name of the task class
+     * 
+ * + * string name = 1; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+     * task name, based on the name of the task class
+     * 
+ * + * string name = 1; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TASKID_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object taskid_ = ""; + /** + *
+     * task id, unique
+     * 
+ * + * string taskid = 2; + * @return The taskid. + */ + @java.lang.Override + public java.lang.String getTaskid() { + java.lang.Object ref = taskid_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + taskid_ = s; + return s; + } + } + /** + *
+     * task id, unique
+     * 
+ * + * string taskid = 2; + * @return The bytes for taskid. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTaskidBytes() { + java.lang.Object ref = taskid_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + taskid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int STATE_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object state_ = ""; + /** + *
+     * state machine state for this task
+     * 
+ * + * string state = 3; + * @return The state. + */ + @java.lang.Override + public java.lang.String getState() { + java.lang.Object ref = state_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + state_ = s; + return s; + } + } + /** + *
+     * state machine state for this task
+     * 
+ * + * string state = 3; + * @return The bytes for state. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getStateBytes() { + java.lang.Object ref = state_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + state_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int STATUS_FIELD_NUMBER = 4; + @SuppressWarnings("serial") + private volatile java.lang.Object status_ = ""; + /** + *
+     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+     * 
+ * + * string status = 4; + * @return The status. + */ + @java.lang.Override + public java.lang.String getStatus() { + java.lang.Object ref = status_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + status_ = s; + return s; + } + } + /** + *
+     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+     * 
+ * + * string status = 4; + * @return The bytes for status. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getStatusBytes() { + java.lang.Object ref = status_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + status_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int HOSTNAME_FIELD_NUMBER = 5; + @SuppressWarnings("serial") + private volatile java.lang.Object hostname_ = ""; + /** + * string hostname = 5; + * @return The hostname. + */ + @java.lang.Override + public java.lang.String getHostname() { + java.lang.Object ref = hostname_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + hostname_ = s; + return s; + } + } + /** + * string hostname = 5; + * @return The bytes for hostname. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getHostnameBytes() { + java.lang.Object ref = hostname_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostname_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CLASSNAME_FIELD_NUMBER = 6; + @SuppressWarnings("serial") + private volatile java.lang.Object className_ = ""; + /** + *
+     * name of the task class from which this task was spawned
+     * 
+ * + * string className = 6; + * @return The className. + */ + @java.lang.Override + public java.lang.String getClassName() { + java.lang.Object ref = className_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + className_ = s; + return s; + } + } + /** + *
+     * name of the task class from which this task was spawned
+     * 
+ * + * string className = 6; + * @return The bytes for className. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getClassNameBytes() { + java.lang.Object ref = className_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + className_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TRAITS_FIELD_NUMBER = 7; + private alice.dip.kafka.events.Events.Traits traits_; + /** + * .events.Traits traits = 7; + * @return Whether the traits field is set. + */ + @java.lang.Override + public boolean hasTraits() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * .events.Traits traits = 7; + * @return The traits. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Traits getTraits() { + return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_; + } + /** + * .events.Traits traits = 7; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder() { + return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_; + } + + public static final int ENVIRONMENTID_FIELD_NUMBER = 8; + @SuppressWarnings("serial") + private volatile java.lang.Object environmentId_ = ""; + /** + * string environmentId = 8; + * @return The environmentId. + */ + @java.lang.Override + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } + } + /** + * string environmentId = 8; + * @return The bytes for environmentId. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PATH_FIELD_NUMBER = 9; + @SuppressWarnings("serial") + private volatile java.lang.Object path_ = ""; + /** + *
+     * path to the parent taskRole of this task within the environment
+     * 
+ * + * string path = 9; + * @return The path. + */ + @java.lang.Override + public java.lang.String getPath() { + java.lang.Object ref = path_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + path_ = s; + return s; + } + } + /** + *
+     * path to the parent taskRole of this task within the environment
+     * 
+ * + * string path = 9; + * @return The bytes for path. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getPathBytes() { + java.lang.Object ref = path_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + path_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, name_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(taskid_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, taskid_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(state_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 3, state_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(status_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 4, status_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(hostname_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 5, hostname_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(className_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 6, className_); + } + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(7, getTraits()); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 8, environmentId_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(path_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 9, path_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, name_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(taskid_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, taskid_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(state_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, state_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(status_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(4, status_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(hostname_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(5, hostname_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(className_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(6, className_); + } + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, getTraits()); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(8, environmentId_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(path_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(9, path_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Ev_TaskEvent)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Ev_TaskEvent other = (alice.dip.kafka.events.Events.Ev_TaskEvent) obj; + + if (!getName() + .equals(other.getName())) return false; + if (!getTaskid() + .equals(other.getTaskid())) return false; + if (!getState() + .equals(other.getState())) return false; + if (!getStatus() + .equals(other.getStatus())) return false; + if (!getHostname() + .equals(other.getHostname())) return false; + if (!getClassName() + .equals(other.getClassName())) return false; + if (hasTraits() != other.hasTraits()) return false; + if (hasTraits()) { + if (!getTraits() + .equals(other.getTraits())) return false; + } + if (!getEnvironmentId() + .equals(other.getEnvironmentId())) return false; + if (!getPath() + .equals(other.getPath())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + TASKID_FIELD_NUMBER; + hash = (53 * hash) + getTaskid().hashCode(); + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + getState().hashCode(); + hash = (37 * hash) + STATUS_FIELD_NUMBER; + hash = (53 * hash) + getStatus().hashCode(); + hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; + hash = (53 * hash) + getHostname().hashCode(); + hash = (37 * hash) + CLASSNAME_FIELD_NUMBER; + hash = (53 * hash) + getClassName().hashCode(); + if (hasTraits()) { + hash = (37 * hash) + TRAITS_FIELD_NUMBER; + hash = (53 * hash) + getTraits().hashCode(); + } + hash = (37 * hash) + ENVIRONMENTID_FIELD_NUMBER; + hash = (53 * hash) + getEnvironmentId().hashCode(); + hash = (37 * hash) + PATH_FIELD_NUMBER; + hash = (53 * hash) + getPath().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_TaskEvent prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code events.Ev_TaskEvent} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Ev_TaskEvent) + alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_TaskEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_TaskEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_TaskEvent.class, alice.dip.kafka.events.Events.Ev_TaskEvent.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Ev_TaskEvent.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage + .alwaysUseFieldBuilders) { + internalGetTraitsFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + taskid_ = ""; + state_ = ""; + status_ = ""; + hostname_ = ""; + className_ = ""; + traits_ = null; + if (traitsBuilder_ != null) { + traitsBuilder_.dispose(); + traitsBuilder_ = null; + } + environmentId_ = ""; + path_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_TaskEvent_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_TaskEvent getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_TaskEvent build() { + alice.dip.kafka.events.Events.Ev_TaskEvent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_TaskEvent buildPartial() { + alice.dip.kafka.events.Events.Ev_TaskEvent result = new alice.dip.kafka.events.Events.Ev_TaskEvent(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Events.Ev_TaskEvent result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.taskid_ = taskid_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.state_ = state_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.status_ = status_; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.hostname_ = hostname_; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.className_ = className_; + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000040) != 0)) { + result.traits_ = traitsBuilder_ == null + ? traits_ + : traitsBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000080) != 0)) { + result.environmentId_ = environmentId_; + } + if (((from_bitField0_ & 0x00000100) != 0)) { + result.path_ = path_; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Ev_TaskEvent) { + return mergeFrom((alice.dip.kafka.events.Events.Ev_TaskEvent)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_TaskEvent other) { + if (other == alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (!other.getTaskid().isEmpty()) { + taskid_ = other.taskid_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (!other.getState().isEmpty()) { + state_ = other.state_; + bitField0_ |= 0x00000004; + onChanged(); + } + if (!other.getStatus().isEmpty()) { + status_ = other.status_; + bitField0_ |= 0x00000008; + onChanged(); + } + if (!other.getHostname().isEmpty()) { + hostname_ = other.hostname_; + bitField0_ |= 0x00000010; + onChanged(); + } + if (!other.getClassName().isEmpty()) { + className_ = other.className_; + bitField0_ |= 0x00000020; + onChanged(); + } + if (other.hasTraits()) { + mergeTraits(other.getTraits()); + } + if (!other.getEnvironmentId().isEmpty()) { + environmentId_ = other.environmentId_; + bitField0_ |= 0x00000080; + onChanged(); + } + if (!other.getPath().isEmpty()) { + path_ = other.path_; + bitField0_ |= 0x00000100; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + taskid_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + state_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: { + status_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000008; + break; + } // case 34 + case 42: { + hostname_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000010; + break; + } // case 42 + case 50: { + className_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000020; + break; + } // case 50 + case 58: { + input.readMessage( + internalGetTraitsFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000040; + break; + } // case 58 + case 66: { + environmentId_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000080; + break; + } // case 66 + case 74: { + path_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000100; + break; + } // case 74 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + *
+       * task name, based on the name of the task class
+       * 
+ * + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * task name, based on the name of the task class
+       * 
+ * + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * task name, based on the name of the task class
+       * 
+ * + * string name = 1; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * task name, based on the name of the task class
+       * 
+ * + * string name = 1; + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       * task name, based on the name of the task class
+       * 
+ * + * string name = 1; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object taskid_ = ""; + /** + *
+       * task id, unique
+       * 
+ * + * string taskid = 2; + * @return The taskid. + */ + public java.lang.String getTaskid() { + java.lang.Object ref = taskid_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + taskid_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * task id, unique
+       * 
+ * + * string taskid = 2; + * @return The bytes for taskid. + */ + public com.google.protobuf.ByteString + getTaskidBytes() { + java.lang.Object ref = taskid_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + taskid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * task id, unique
+       * 
+ * + * string taskid = 2; + * @param value The taskid to set. + * @return This builder for chaining. + */ + public Builder setTaskid( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + taskid_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * task id, unique
+       * 
+ * + * string taskid = 2; + * @return This builder for chaining. + */ + public Builder clearTaskid() { + taskid_ = getDefaultInstance().getTaskid(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       * task id, unique
+       * 
+ * + * string taskid = 2; + * @param value The bytes for taskid to set. + * @return This builder for chaining. + */ + public Builder setTaskidBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + taskid_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private java.lang.Object state_ = ""; + /** + *
+       * state machine state for this task
+       * 
+ * + * string state = 3; + * @return The state. + */ + public java.lang.String getState() { + java.lang.Object ref = state_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + state_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * state machine state for this task
+       * 
+ * + * string state = 3; + * @return The bytes for state. + */ + public com.google.protobuf.ByteString + getStateBytes() { + java.lang.Object ref = state_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + state_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * state machine state for this task
+       * 
+ * + * string state = 3; + * @param value The state to set. + * @return This builder for chaining. + */ + public Builder setState( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + state_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * state machine state for this task
+       * 
+ * + * string state = 3; + * @return This builder for chaining. + */ + public Builder clearState() { + state_ = getDefaultInstance().getState(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       * state machine state for this task
+       * 
+ * + * string state = 3; + * @param value The bytes for state to set. + * @return This builder for chaining. + */ + public Builder setStateBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + state_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + private java.lang.Object status_ = ""; + /** + *
+       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+       * 
+ * + * string status = 4; + * @return The status. + */ + public java.lang.String getStatus() { + java.lang.Object ref = status_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + status_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+       * 
+ * + * string status = 4; + * @return The bytes for status. + */ + public com.google.protobuf.ByteString + getStatusBytes() { + java.lang.Object ref = status_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + status_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+       * 
+ * + * string status = 4; + * @param value The status to set. + * @return This builder for chaining. + */ + public Builder setStatus( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + status_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+       * 
+ * + * string status = 4; + * @return This builder for chaining. + */ + public Builder clearStatus() { + status_ = getDefaultInstance().getStatus(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + /** + *
+       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+       * 
+ * + * string status = 4; + * @param value The bytes for status to set. + * @return This builder for chaining. + */ + public Builder setStatusBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + status_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + + private java.lang.Object hostname_ = ""; + /** + * string hostname = 5; + * @return The hostname. + */ + public java.lang.String getHostname() { + java.lang.Object ref = hostname_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + hostname_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string hostname = 5; + * @return The bytes for hostname. + */ + public com.google.protobuf.ByteString + getHostnameBytes() { + java.lang.Object ref = hostname_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + hostname_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string hostname = 5; + * @param value The hostname to set. + * @return This builder for chaining. + */ + public Builder setHostname( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + hostname_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * string hostname = 5; + * @return This builder for chaining. + */ + public Builder clearHostname() { + hostname_ = getDefaultInstance().getHostname(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + return this; + } + /** + * string hostname = 5; + * @param value The bytes for hostname to set. + * @return This builder for chaining. + */ + public Builder setHostnameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + hostname_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + + private java.lang.Object className_ = ""; + /** + *
+       * name of the task class from which this task was spawned
+       * 
+ * + * string className = 6; + * @return The className. + */ + public java.lang.String getClassName() { + java.lang.Object ref = className_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + className_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * name of the task class from which this task was spawned
+       * 
+ * + * string className = 6; + * @return The bytes for className. + */ + public com.google.protobuf.ByteString + getClassNameBytes() { + java.lang.Object ref = className_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + className_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * name of the task class from which this task was spawned
+       * 
+ * + * string className = 6; + * @param value The className to set. + * @return This builder for chaining. + */ + public Builder setClassName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + className_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + *
+       * name of the task class from which this task was spawned
+       * 
+ * + * string className = 6; + * @return This builder for chaining. + */ + public Builder clearClassName() { + className_ = getDefaultInstance().getClassName(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + return this; + } + /** + *
+       * name of the task class from which this task was spawned
+       * 
+ * + * string className = 6; + * @param value The bytes for className to set. + * @return This builder for chaining. + */ + public Builder setClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + className_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + + private alice.dip.kafka.events.Events.Traits traits_; + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder> traitsBuilder_; + /** + * .events.Traits traits = 7; + * @return Whether the traits field is set. + */ + public boolean hasTraits() { + return ((bitField0_ & 0x00000040) != 0); + } + /** + * .events.Traits traits = 7; + * @return The traits. + */ + public alice.dip.kafka.events.Events.Traits getTraits() { + if (traitsBuilder_ == null) { + return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_; + } else { + return traitsBuilder_.getMessage(); + } + } + /** + * .events.Traits traits = 7; + */ + public Builder setTraits(alice.dip.kafka.events.Events.Traits value) { + if (traitsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + traits_ = value; + } else { + traitsBuilder_.setMessage(value); + } + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * .events.Traits traits = 7; + */ + public Builder setTraits( + alice.dip.kafka.events.Events.Traits.Builder builderForValue) { + if (traitsBuilder_ == null) { + traits_ = builderForValue.build(); + } else { + traitsBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * .events.Traits traits = 7; + */ + public Builder mergeTraits(alice.dip.kafka.events.Events.Traits value) { + if (traitsBuilder_ == null) { + if (((bitField0_ & 0x00000040) != 0) && + traits_ != null && + traits_ != alice.dip.kafka.events.Events.Traits.getDefaultInstance()) { + getTraitsBuilder().mergeFrom(value); + } else { + traits_ = value; + } + } else { + traitsBuilder_.mergeFrom(value); + } + if (traits_ != null) { + bitField0_ |= 0x00000040; + onChanged(); + } + return this; + } + /** + * .events.Traits traits = 7; + */ + public Builder clearTraits() { + bitField0_ = (bitField0_ & ~0x00000040); + traits_ = null; + if (traitsBuilder_ != null) { + traitsBuilder_.dispose(); + traitsBuilder_ = null; + } + onChanged(); + return this; + } + /** + * .events.Traits traits = 7; + */ + public alice.dip.kafka.events.Events.Traits.Builder getTraitsBuilder() { + bitField0_ |= 0x00000040; + onChanged(); + return internalGetTraitsFieldBuilder().getBuilder(); + } + /** + * .events.Traits traits = 7; + */ + public alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder() { + if (traitsBuilder_ != null) { + return traitsBuilder_.getMessageOrBuilder(); + } else { + return traits_ == null ? + alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_; + } + } + /** + * .events.Traits traits = 7; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder> + internalGetTraitsFieldBuilder() { + if (traitsBuilder_ == null) { + traitsBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder>( + getTraits(), + getParentForChildren(), + isClean()); + traits_ = null; + } + return traitsBuilder_; + } + + private java.lang.Object environmentId_ = ""; + /** + * string environmentId = 8; + * @return The environmentId. + */ + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string environmentId = 8; + * @return The bytes for environmentId. + */ + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string environmentId = 8; + * @param value The environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentId( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + environmentId_ = value; + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + * string environmentId = 8; + * @return This builder for chaining. + */ + public Builder clearEnvironmentId() { + environmentId_ = getDefaultInstance().getEnvironmentId(); + bitField0_ = (bitField0_ & ~0x00000080); + onChanged(); + return this; + } + /** + * string environmentId = 8; + * @param value The bytes for environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + environmentId_ = value; + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + + private java.lang.Object path_ = ""; + /** + *
+       * path to the parent taskRole of this task within the environment
+       * 
+ * + * string path = 9; + * @return The path. + */ + public java.lang.String getPath() { + java.lang.Object ref = path_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + path_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * path to the parent taskRole of this task within the environment
+       * 
+ * + * string path = 9; + * @return The bytes for path. + */ + public com.google.protobuf.ByteString + getPathBytes() { + java.lang.Object ref = path_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + path_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * path to the parent taskRole of this task within the environment
+       * 
+ * + * string path = 9; + * @param value The path to set. + * @return This builder for chaining. + */ + public Builder setPath( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + path_ = value; + bitField0_ |= 0x00000100; + onChanged(); + return this; + } + /** + *
+       * path to the parent taskRole of this task within the environment
+       * 
+ * + * string path = 9; + * @return This builder for chaining. + */ + public Builder clearPath() { + path_ = getDefaultInstance().getPath(); + bitField0_ = (bitField0_ & ~0x00000100); + onChanged(); + return this; + } + /** + *
+       * path to the parent taskRole of this task within the environment
+       * 
+ * + * string path = 9; + * @param value The bytes for path to set. + * @return This builder for chaining. + */ + public Builder setPathBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + path_ = value; + bitField0_ |= 0x00000100; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:events.Ev_TaskEvent) + } + + // @@protoc_insertion_point(class_scope:events.Ev_TaskEvent) + private static final alice.dip.kafka.events.Events.Ev_TaskEvent DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_TaskEvent(); + } + + public static alice.dip.kafka.events.Events.Ev_TaskEvent getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Ev_TaskEvent parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_TaskEvent getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Ev_CallEventOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Ev_CallEvent) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * name of the function being called, within the workflow template context
+     * 
+ * + * string func = 1; + * @return The func. + */ + java.lang.String getFunc(); + /** + *
+     * name of the function being called, within the workflow template context
+     * 
+ * + * string func = 1; + * @return The bytes for func. + */ + com.google.protobuf.ByteString + getFuncBytes(); + + /** + *
+     * progress or success/failure state of the call
+     * 
+ * + * .events.OpStatus callStatus = 2; + * @return The enum numeric value on the wire for callStatus. + */ + int getCallStatusValue(); + /** + *
+     * progress or success/failure state of the call
+     * 
+ * + * .events.OpStatus callStatus = 2; + * @return The callStatus. + */ + alice.dip.kafka.events.Events.OpStatus getCallStatus(); + + /** + *
+     * return value of the function
+     * 
+ * + * string return = 3; + * @return The return. + */ + java.lang.String getReturn(); + /** + *
+     * return value of the function
+     * 
+ * + * string return = 3; + * @return The bytes for return. + */ + com.google.protobuf.ByteString + getReturnBytes(); + + /** + * .events.Traits traits = 4; + * @return Whether the traits field is set. + */ + boolean hasTraits(); + /** + * .events.Traits traits = 4; + * @return The traits. + */ + alice.dip.kafka.events.Events.Traits getTraits(); + /** + * .events.Traits traits = 4; + */ + alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder(); + + /** + *
+     * any additional output of the function
+     * 
+ * + * string output = 5; + * @return The output. + */ + java.lang.String getOutput(); + /** + *
+     * any additional output of the function
+     * 
+ * + * string output = 5; + * @return The bytes for output. + */ + com.google.protobuf.ByteString + getOutputBytes(); + + /** + *
+     * error value, if returned
+     * 
+ * + * string error = 6; + * @return The error. + */ + java.lang.String getError(); + /** + *
+     * error value, if returned
+     * 
+ * + * string error = 6; + * @return The bytes for error. + */ + com.google.protobuf.ByteString + getErrorBytes(); + + /** + * string environmentId = 7; + * @return The environmentId. + */ + java.lang.String getEnvironmentId(); + /** + * string environmentId = 7; + * @return The bytes for environmentId. + */ + com.google.protobuf.ByteString + getEnvironmentIdBytes(); + + /** + *
+     * path to the parent callRole of this call within the environment
+     * 
+ * + * string path = 8; + * @return The path. + */ + java.lang.String getPath(); + /** + *
+     * path to the parent callRole of this call within the environment
+     * 
+ * + * string path = 8; + * @return The bytes for path. + */ + com.google.protobuf.ByteString + getPathBytes(); + } + /** + * Protobuf type {@code events.Ev_CallEvent} + */ + public static final class Ev_CallEvent extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Ev_CallEvent) + Ev_CallEventOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Ev_CallEvent.class.getName()); + } + // Use Ev_CallEvent.newBuilder() to construct. + private Ev_CallEvent(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Ev_CallEvent() { + func_ = ""; + callStatus_ = 0; + return_ = ""; + output_ = ""; + error_ = ""; + environmentId_ = ""; + path_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_CallEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_CallEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_CallEvent.class, alice.dip.kafka.events.Events.Ev_CallEvent.Builder.class); + } + + private int bitField0_; + public static final int FUNC_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object func_ = ""; + /** + *
+     * name of the function being called, within the workflow template context
+     * 
+ * + * string func = 1; + * @return The func. + */ + @java.lang.Override + public java.lang.String getFunc() { + java.lang.Object ref = func_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + func_ = s; + return s; + } + } + /** + *
+     * name of the function being called, within the workflow template context
+     * 
+ * + * string func = 1; + * @return The bytes for func. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getFuncBytes() { + java.lang.Object ref = func_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + func_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int CALLSTATUS_FIELD_NUMBER = 2; + private int callStatus_ = 0; + /** + *
+     * progress or success/failure state of the call
+     * 
+ * + * .events.OpStatus callStatus = 2; + * @return The enum numeric value on the wire for callStatus. + */ + @java.lang.Override public int getCallStatusValue() { + return callStatus_; + } + /** + *
+     * progress or success/failure state of the call
+     * 
+ * + * .events.OpStatus callStatus = 2; + * @return The callStatus. + */ + @java.lang.Override public alice.dip.kafka.events.Events.OpStatus getCallStatus() { + alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(callStatus_); + return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result; + } + + public static final int RETURN_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object return_ = ""; + /** + *
+     * return value of the function
+     * 
+ * + * string return = 3; + * @return The return. + */ + @java.lang.Override + public java.lang.String getReturn() { + java.lang.Object ref = return_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + return_ = s; + return s; + } + } + /** + *
+     * return value of the function
+     * 
+ * + * string return = 3; + * @return The bytes for return. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getReturnBytes() { + java.lang.Object ref = return_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + return_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TRAITS_FIELD_NUMBER = 4; + private alice.dip.kafka.events.Events.Traits traits_; + /** + * .events.Traits traits = 4; + * @return Whether the traits field is set. + */ + @java.lang.Override + public boolean hasTraits() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * .events.Traits traits = 4; + * @return The traits. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Traits getTraits() { + return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_; + } + /** + * .events.Traits traits = 4; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder() { + return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_; + } + + public static final int OUTPUT_FIELD_NUMBER = 5; + @SuppressWarnings("serial") + private volatile java.lang.Object output_ = ""; + /** + *
+     * any additional output of the function
+     * 
+ * + * string output = 5; + * @return The output. + */ + @java.lang.Override + public java.lang.String getOutput() { + java.lang.Object ref = output_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + output_ = s; + return s; + } + } + /** + *
+     * any additional output of the function
+     * 
+ * + * string output = 5; + * @return The bytes for output. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getOutputBytes() { + java.lang.Object ref = output_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + output_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ERROR_FIELD_NUMBER = 6; + @SuppressWarnings("serial") + private volatile java.lang.Object error_ = ""; + /** + *
+     * error value, if returned
+     * 
+ * + * string error = 6; + * @return The error. + */ + @java.lang.Override + public java.lang.String getError() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } + } + /** + *
+     * error value, if returned
+     * 
+ * + * string error = 6; + * @return The bytes for error. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ENVIRONMENTID_FIELD_NUMBER = 7; + @SuppressWarnings("serial") + private volatile java.lang.Object environmentId_ = ""; + /** + * string environmentId = 7; + * @return The environmentId. + */ + @java.lang.Override + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } + } + /** + * string environmentId = 7; + * @return The bytes for environmentId. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PATH_FIELD_NUMBER = 8; + @SuppressWarnings("serial") + private volatile java.lang.Object path_ = ""; + /** + *
+     * path to the parent callRole of this call within the environment
+     * 
+ * + * string path = 8; + * @return The path. + */ + @java.lang.Override + public java.lang.String getPath() { + java.lang.Object ref = path_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + path_ = s; + return s; + } + } + /** + *
+     * path to the parent callRole of this call within the environment
+     * 
+ * + * string path = 8; + * @return The bytes for path. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getPathBytes() { + java.lang.Object ref = path_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + path_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(func_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, func_); + } + if (callStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) { + output.writeEnum(2, callStatus_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(return_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 3, return_); + } + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(4, getTraits()); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(output_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 5, output_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(error_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 6, error_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 7, environmentId_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(path_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 8, path_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(func_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, func_); + } + if (callStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, callStatus_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(return_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, return_); + } + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getTraits()); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(output_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(5, output_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(error_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(6, error_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(7, environmentId_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(path_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(8, path_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Ev_CallEvent)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Ev_CallEvent other = (alice.dip.kafka.events.Events.Ev_CallEvent) obj; + + if (!getFunc() + .equals(other.getFunc())) return false; + if (callStatus_ != other.callStatus_) return false; + if (!getReturn() + .equals(other.getReturn())) return false; + if (hasTraits() != other.hasTraits()) return false; + if (hasTraits()) { + if (!getTraits() + .equals(other.getTraits())) return false; + } + if (!getOutput() + .equals(other.getOutput())) return false; + if (!getError() + .equals(other.getError())) return false; + if (!getEnvironmentId() + .equals(other.getEnvironmentId())) return false; + if (!getPath() + .equals(other.getPath())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + FUNC_FIELD_NUMBER; + hash = (53 * hash) + getFunc().hashCode(); + hash = (37 * hash) + CALLSTATUS_FIELD_NUMBER; + hash = (53 * hash) + callStatus_; + hash = (37 * hash) + RETURN_FIELD_NUMBER; + hash = (53 * hash) + getReturn().hashCode(); + if (hasTraits()) { + hash = (37 * hash) + TRAITS_FIELD_NUMBER; + hash = (53 * hash) + getTraits().hashCode(); + } + hash = (37 * hash) + OUTPUT_FIELD_NUMBER; + hash = (53 * hash) + getOutput().hashCode(); + hash = (37 * hash) + ERROR_FIELD_NUMBER; + hash = (53 * hash) + getError().hashCode(); + hash = (37 * hash) + ENVIRONMENTID_FIELD_NUMBER; + hash = (53 * hash) + getEnvironmentId().hashCode(); + hash = (37 * hash) + PATH_FIELD_NUMBER; + hash = (53 * hash) + getPath().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Ev_CallEvent parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Ev_CallEvent parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_CallEvent prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code events.Ev_CallEvent} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Ev_CallEvent) + alice.dip.kafka.events.Events.Ev_CallEventOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_CallEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_CallEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_CallEvent.class, alice.dip.kafka.events.Events.Ev_CallEvent.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Ev_CallEvent.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage + .alwaysUseFieldBuilders) { + internalGetTraitsFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + func_ = ""; + callStatus_ = 0; + return_ = ""; + traits_ = null; + if (traitsBuilder_ != null) { + traitsBuilder_.dispose(); + traitsBuilder_ = null; + } + output_ = ""; + error_ = ""; + environmentId_ = ""; + path_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_CallEvent_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_CallEvent getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_CallEvent build() { + alice.dip.kafka.events.Events.Ev_CallEvent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_CallEvent buildPartial() { + alice.dip.kafka.events.Events.Ev_CallEvent result = new alice.dip.kafka.events.Events.Ev_CallEvent(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Events.Ev_CallEvent result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.func_ = func_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.callStatus_ = callStatus_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.return_ = return_; + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000008) != 0)) { + result.traits_ = traitsBuilder_ == null + ? traits_ + : traitsBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.output_ = output_; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.error_ = error_; + } + if (((from_bitField0_ & 0x00000040) != 0)) { + result.environmentId_ = environmentId_; + } + if (((from_bitField0_ & 0x00000080) != 0)) { + result.path_ = path_; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Ev_CallEvent) { + return mergeFrom((alice.dip.kafka.events.Events.Ev_CallEvent)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_CallEvent other) { + if (other == alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance()) return this; + if (!other.getFunc().isEmpty()) { + func_ = other.func_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.callStatus_ != 0) { + setCallStatusValue(other.getCallStatusValue()); + } + if (!other.getReturn().isEmpty()) { + return_ = other.return_; + bitField0_ |= 0x00000004; + onChanged(); + } + if (other.hasTraits()) { + mergeTraits(other.getTraits()); + } + if (!other.getOutput().isEmpty()) { + output_ = other.output_; + bitField0_ |= 0x00000010; + onChanged(); + } + if (!other.getError().isEmpty()) { + error_ = other.error_; + bitField0_ |= 0x00000020; + onChanged(); + } + if (!other.getEnvironmentId().isEmpty()) { + environmentId_ = other.environmentId_; + bitField0_ |= 0x00000040; + onChanged(); + } + if (!other.getPath().isEmpty()) { + path_ = other.path_; + bitField0_ |= 0x00000080; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + func_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: { + callStatus_ = input.readEnum(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 26: { + return_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: { + input.readMessage( + internalGetTraitsFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000008; + break; + } // case 34 + case 42: { + output_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000010; + break; + } // case 42 + case 50: { + error_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000020; + break; + } // case 50 + case 58: { + environmentId_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000040; + break; + } // case 58 + case 66: { + path_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000080; + break; + } // case 66 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object func_ = ""; + /** + *
+       * name of the function being called, within the workflow template context
+       * 
+ * + * string func = 1; + * @return The func. + */ + public java.lang.String getFunc() { + java.lang.Object ref = func_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + func_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * name of the function being called, within the workflow template context
+       * 
+ * + * string func = 1; + * @return The bytes for func. + */ + public com.google.protobuf.ByteString + getFuncBytes() { + java.lang.Object ref = func_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + func_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * name of the function being called, within the workflow template context
+       * 
+ * + * string func = 1; + * @param value The func to set. + * @return This builder for chaining. + */ + public Builder setFunc( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + func_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * name of the function being called, within the workflow template context
+       * 
+ * + * string func = 1; + * @return This builder for chaining. + */ + public Builder clearFunc() { + func_ = getDefaultInstance().getFunc(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       * name of the function being called, within the workflow template context
+       * 
+ * + * string func = 1; + * @param value The bytes for func to set. + * @return This builder for chaining. + */ + public Builder setFuncBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + func_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private int callStatus_ = 0; + /** + *
+       * progress or success/failure state of the call
+       * 
+ * + * .events.OpStatus callStatus = 2; + * @return The enum numeric value on the wire for callStatus. + */ + @java.lang.Override public int getCallStatusValue() { + return callStatus_; + } + /** + *
+       * progress or success/failure state of the call
+       * 
+ * + * .events.OpStatus callStatus = 2; + * @param value The enum numeric value on the wire for callStatus to set. + * @return This builder for chaining. + */ + public Builder setCallStatusValue(int value) { + callStatus_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * progress or success/failure state of the call
+       * 
+ * + * .events.OpStatus callStatus = 2; + * @return The callStatus. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.OpStatus getCallStatus() { + alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(callStatus_); + return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result; + } + /** + *
+       * progress or success/failure state of the call
+       * 
+ * + * .events.OpStatus callStatus = 2; + * @param value The callStatus to set. + * @return This builder for chaining. + */ + public Builder setCallStatus(alice.dip.kafka.events.Events.OpStatus value) { + if (value == null) { throw new NullPointerException(); } + bitField0_ |= 0x00000002; + callStatus_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+       * progress or success/failure state of the call
+       * 
+ * + * .events.OpStatus callStatus = 2; + * @return This builder for chaining. + */ + public Builder clearCallStatus() { + bitField0_ = (bitField0_ & ~0x00000002); + callStatus_ = 0; + onChanged(); + return this; + } + + private java.lang.Object return_ = ""; + /** + *
+       * return value of the function
+       * 
+ * + * string return = 3; + * @return The return. + */ + public java.lang.String getReturn() { + java.lang.Object ref = return_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + return_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * return value of the function
+       * 
+ * + * string return = 3; + * @return The bytes for return. + */ + public com.google.protobuf.ByteString + getReturnBytes() { + java.lang.Object ref = return_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + return_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * return value of the function
+       * 
+ * + * string return = 3; + * @param value The return to set. + * @return This builder for chaining. + */ + public Builder setReturn( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + return_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * return value of the function
+       * 
+ * + * string return = 3; + * @return This builder for chaining. + */ + public Builder clearReturn() { + return_ = getDefaultInstance().getReturn(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       * return value of the function
+       * 
+ * + * string return = 3; + * @param value The bytes for return to set. + * @return This builder for chaining. + */ + public Builder setReturnBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + return_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + private alice.dip.kafka.events.Events.Traits traits_; + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder> traitsBuilder_; + /** + * .events.Traits traits = 4; + * @return Whether the traits field is set. + */ + public boolean hasTraits() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + * .events.Traits traits = 4; + * @return The traits. + */ + public alice.dip.kafka.events.Events.Traits getTraits() { + if (traitsBuilder_ == null) { + return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_; + } else { + return traitsBuilder_.getMessage(); + } + } + /** + * .events.Traits traits = 4; + */ + public Builder setTraits(alice.dip.kafka.events.Events.Traits value) { + if (traitsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + traits_ = value; + } else { + traitsBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * .events.Traits traits = 4; + */ + public Builder setTraits( + alice.dip.kafka.events.Events.Traits.Builder builderForValue) { + if (traitsBuilder_ == null) { + traits_ = builderForValue.build(); + } else { + traitsBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * .events.Traits traits = 4; + */ + public Builder mergeTraits(alice.dip.kafka.events.Events.Traits value) { + if (traitsBuilder_ == null) { + if (((bitField0_ & 0x00000008) != 0) && + traits_ != null && + traits_ != alice.dip.kafka.events.Events.Traits.getDefaultInstance()) { + getTraitsBuilder().mergeFrom(value); + } else { + traits_ = value; + } + } else { + traitsBuilder_.mergeFrom(value); + } + if (traits_ != null) { + bitField0_ |= 0x00000008; + onChanged(); + } + return this; + } + /** + * .events.Traits traits = 4; + */ + public Builder clearTraits() { + bitField0_ = (bitField0_ & ~0x00000008); + traits_ = null; + if (traitsBuilder_ != null) { + traitsBuilder_.dispose(); + traitsBuilder_ = null; + } + onChanged(); + return this; + } + /** + * .events.Traits traits = 4; + */ + public alice.dip.kafka.events.Events.Traits.Builder getTraitsBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return internalGetTraitsFieldBuilder().getBuilder(); + } + /** + * .events.Traits traits = 4; + */ + public alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder() { + if (traitsBuilder_ != null) { + return traitsBuilder_.getMessageOrBuilder(); + } else { + return traits_ == null ? + alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_; + } + } + /** + * .events.Traits traits = 4; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder> + internalGetTraitsFieldBuilder() { + if (traitsBuilder_ == null) { + traitsBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder>( + getTraits(), + getParentForChildren(), + isClean()); + traits_ = null; + } + return traitsBuilder_; + } + + private java.lang.Object output_ = ""; + /** + *
+       * any additional output of the function
+       * 
+ * + * string output = 5; + * @return The output. + */ + public java.lang.String getOutput() { + java.lang.Object ref = output_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + output_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * any additional output of the function
+       * 
+ * + * string output = 5; + * @return The bytes for output. + */ + public com.google.protobuf.ByteString + getOutputBytes() { + java.lang.Object ref = output_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + output_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * any additional output of the function
+       * 
+ * + * string output = 5; + * @param value The output to set. + * @return This builder for chaining. + */ + public Builder setOutput( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + output_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + *
+       * any additional output of the function
+       * 
+ * + * string output = 5; + * @return This builder for chaining. + */ + public Builder clearOutput() { + output_ = getDefaultInstance().getOutput(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + return this; + } + /** + *
+       * any additional output of the function
+       * 
+ * + * string output = 5; + * @param value The bytes for output to set. + * @return This builder for chaining. + */ + public Builder setOutputBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + output_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + + private java.lang.Object error_ = ""; + /** + *
+       * error value, if returned
+       * 
+ * + * string error = 6; + * @return The error. + */ + public java.lang.String getError() { + java.lang.Object ref = error_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * error value, if returned
+       * 
+ * + * string error = 6; + * @return The bytes for error. + */ + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * error value, if returned
+       * 
+ * + * string error = 6; + * @param value The error to set. + * @return This builder for chaining. + */ + public Builder setError( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + error_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + *
+       * error value, if returned
+       * 
+ * + * string error = 6; + * @return This builder for chaining. + */ + public Builder clearError() { + error_ = getDefaultInstance().getError(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + return this; + } + /** + *
+       * error value, if returned
+       * 
+ * + * string error = 6; + * @param value The bytes for error to set. + * @return This builder for chaining. + */ + public Builder setErrorBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + error_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + + private java.lang.Object environmentId_ = ""; + /** + * string environmentId = 7; + * @return The environmentId. + */ + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string environmentId = 7; + * @return The bytes for environmentId. + */ + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string environmentId = 7; + * @param value The environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentId( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + environmentId_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * string environmentId = 7; + * @return This builder for chaining. + */ + public Builder clearEnvironmentId() { + environmentId_ = getDefaultInstance().getEnvironmentId(); + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + return this; + } + /** + * string environmentId = 7; + * @param value The bytes for environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + environmentId_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + + private java.lang.Object path_ = ""; + /** + *
+       * path to the parent callRole of this call within the environment
+       * 
+ * + * string path = 8; + * @return The path. + */ + public java.lang.String getPath() { + java.lang.Object ref = path_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + path_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * path to the parent callRole of this call within the environment
+       * 
+ * + * string path = 8; + * @return The bytes for path. + */ + public com.google.protobuf.ByteString + getPathBytes() { + java.lang.Object ref = path_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + path_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * path to the parent callRole of this call within the environment
+       * 
+ * + * string path = 8; + * @param value The path to set. + * @return This builder for chaining. + */ + public Builder setPath( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + path_ = value; + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + *
+       * path to the parent callRole of this call within the environment
+       * 
+ * + * string path = 8; + * @return This builder for chaining. + */ + public Builder clearPath() { + path_ = getDefaultInstance().getPath(); + bitField0_ = (bitField0_ & ~0x00000080); + onChanged(); + return this; + } + /** + *
+       * path to the parent callRole of this call within the environment
+       * 
+ * + * string path = 8; + * @param value The bytes for path to set. + * @return This builder for chaining. + */ + public Builder setPathBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + path_ = value; + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:events.Ev_CallEvent) + } + + // @@protoc_insertion_point(class_scope:events.Ev_CallEvent) + private static final alice.dip.kafka.events.Events.Ev_CallEvent DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_CallEvent(); + } + + public static alice.dip.kafka.events.Events.Ev_CallEvent getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Ev_CallEvent parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_CallEvent getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Ev_RoleEventOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Ev_RoleEvent) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * role name
+     * 
+ * + * string name = 1; + * @return The name. + */ + java.lang.String getName(); + /** + *
+     * role name
+     * 
+ * + * string name = 1; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+     * 
+ * + * string status = 2; + * @return The status. + */ + java.lang.String getStatus(); + /** + *
+     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+     * 
+ * + * string status = 2; + * @return The bytes for status. + */ + com.google.protobuf.ByteString + getStatusBytes(); + + /** + *
+     * state machine state for this role
+     * 
+ * + * string state = 3; + * @return The state. + */ + java.lang.String getState(); + /** + *
+     * state machine state for this role
+     * 
+ * + * string state = 3; + * @return The bytes for state. + */ + com.google.protobuf.ByteString + getStateBytes(); + + /** + *
+     * path to this role within the environment
+     * 
+ * + * string rolePath = 4; + * @return The rolePath. + */ + java.lang.String getRolePath(); + /** + *
+     * path to this role within the environment
+     * 
+ * + * string rolePath = 4; + * @return The bytes for rolePath. + */ + com.google.protobuf.ByteString + getRolePathBytes(); + + /** + * string environmentId = 5; + * @return The environmentId. + */ + java.lang.String getEnvironmentId(); + /** + * string environmentId = 5; + * @return The bytes for environmentId. + */ + com.google.protobuf.ByteString + getEnvironmentIdBytes(); + } + /** + * Protobuf type {@code events.Ev_RoleEvent} + */ + public static final class Ev_RoleEvent extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Ev_RoleEvent) + Ev_RoleEventOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Ev_RoleEvent.class.getName()); + } + // Use Ev_RoleEvent.newBuilder() to construct. + private Ev_RoleEvent(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Ev_RoleEvent() { + name_ = ""; + status_ = ""; + state_ = ""; + rolePath_ = ""; + environmentId_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_RoleEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_RoleEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_RoleEvent.class, alice.dip.kafka.events.Events.Ev_RoleEvent.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + *
+     * role name
+     * 
+ * + * string name = 1; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+     * role name
+     * 
+ * + * string name = 1; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int STATUS_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object status_ = ""; + /** + *
+     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+     * 
+ * + * string status = 2; + * @return The status. + */ + @java.lang.Override + public java.lang.String getStatus() { + java.lang.Object ref = status_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + status_ = s; + return s; + } + } + /** + *
+     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+     * 
+ * + * string status = 2; + * @return The bytes for status. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getStatusBytes() { + java.lang.Object ref = status_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + status_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int STATE_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object state_ = ""; + /** + *
+     * state machine state for this role
+     * 
+ * + * string state = 3; + * @return The state. + */ + @java.lang.Override + public java.lang.String getState() { + java.lang.Object ref = state_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + state_ = s; + return s; + } + } + /** + *
+     * state machine state for this role
+     * 
+ * + * string state = 3; + * @return The bytes for state. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getStateBytes() { + java.lang.Object ref = state_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + state_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ROLEPATH_FIELD_NUMBER = 4; + @SuppressWarnings("serial") + private volatile java.lang.Object rolePath_ = ""; + /** + *
+     * path to this role within the environment
+     * 
+ * + * string rolePath = 4; + * @return The rolePath. + */ + @java.lang.Override + public java.lang.String getRolePath() { + java.lang.Object ref = rolePath_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + rolePath_ = s; + return s; + } + } + /** + *
+     * path to this role within the environment
+     * 
+ * + * string rolePath = 4; + * @return The bytes for rolePath. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getRolePathBytes() { + java.lang.Object ref = rolePath_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + rolePath_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ENVIRONMENTID_FIELD_NUMBER = 5; + @SuppressWarnings("serial") + private volatile java.lang.Object environmentId_ = ""; + /** + * string environmentId = 5; + * @return The environmentId. + */ + @java.lang.Override + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } + } + /** + * string environmentId = 5; + * @return The bytes for environmentId. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, name_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(status_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, status_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(state_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 3, state_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(rolePath_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 4, rolePath_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 5, environmentId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, name_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(status_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, status_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(state_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, state_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(rolePath_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(4, rolePath_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(5, environmentId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Ev_RoleEvent)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Ev_RoleEvent other = (alice.dip.kafka.events.Events.Ev_RoleEvent) obj; + + if (!getName() + .equals(other.getName())) return false; + if (!getStatus() + .equals(other.getStatus())) return false; + if (!getState() + .equals(other.getState())) return false; + if (!getRolePath() + .equals(other.getRolePath())) return false; + if (!getEnvironmentId() + .equals(other.getEnvironmentId())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + STATUS_FIELD_NUMBER; + hash = (53 * hash) + getStatus().hashCode(); + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + getState().hashCode(); + hash = (37 * hash) + ROLEPATH_FIELD_NUMBER; + hash = (53 * hash) + getRolePath().hashCode(); + hash = (37 * hash) + ENVIRONMENTID_FIELD_NUMBER; + hash = (53 * hash) + getEnvironmentId().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_RoleEvent prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code events.Ev_RoleEvent} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Ev_RoleEvent) + alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_RoleEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_RoleEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_RoleEvent.class, alice.dip.kafka.events.Events.Ev_RoleEvent.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Ev_RoleEvent.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + status_ = ""; + state_ = ""; + rolePath_ = ""; + environmentId_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_RoleEvent_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RoleEvent getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RoleEvent build() { + alice.dip.kafka.events.Events.Ev_RoleEvent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RoleEvent buildPartial() { + alice.dip.kafka.events.Events.Ev_RoleEvent result = new alice.dip.kafka.events.Events.Ev_RoleEvent(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Events.Ev_RoleEvent result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.status_ = status_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.state_ = state_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.rolePath_ = rolePath_; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.environmentId_ = environmentId_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Ev_RoleEvent) { + return mergeFrom((alice.dip.kafka.events.Events.Ev_RoleEvent)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_RoleEvent other) { + if (other == alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (!other.getStatus().isEmpty()) { + status_ = other.status_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (!other.getState().isEmpty()) { + state_ = other.state_; + bitField0_ |= 0x00000004; + onChanged(); + } + if (!other.getRolePath().isEmpty()) { + rolePath_ = other.rolePath_; + bitField0_ |= 0x00000008; + onChanged(); + } + if (!other.getEnvironmentId().isEmpty()) { + environmentId_ = other.environmentId_; + bitField0_ |= 0x00000010; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + status_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + state_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: { + rolePath_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000008; + break; + } // case 34 + case 42: { + environmentId_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000010; + break; + } // case 42 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + *
+       * role name
+       * 
+ * + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * role name
+       * 
+ * + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * role name
+       * 
+ * + * string name = 1; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * role name
+       * 
+ * + * string name = 1; + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       * role name
+       * 
+ * + * string name = 1; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object status_ = ""; + /** + *
+       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+       * 
+ * + * string status = 2; + * @return The status. + */ + public java.lang.String getStatus() { + java.lang.Object ref = status_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + status_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+       * 
+ * + * string status = 2; + * @return The bytes for status. + */ + public com.google.protobuf.ByteString + getStatusBytes() { + java.lang.Object ref = status_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + status_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+       * 
+ * + * string status = 2; + * @param value The status to set. + * @return This builder for chaining. + */ + public Builder setStatus( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + status_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+       * 
+ * + * string status = 2; + * @return This builder for chaining. + */ + public Builder clearStatus() { + status_ = getDefaultInstance().getStatus(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+       * 
+ * + * string status = 2; + * @param value The bytes for status to set. + * @return This builder for chaining. + */ + public Builder setStatusBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + status_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private java.lang.Object state_ = ""; + /** + *
+       * state machine state for this role
+       * 
+ * + * string state = 3; + * @return The state. + */ + public java.lang.String getState() { + java.lang.Object ref = state_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + state_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * state machine state for this role
+       * 
+ * + * string state = 3; + * @return The bytes for state. + */ + public com.google.protobuf.ByteString + getStateBytes() { + java.lang.Object ref = state_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + state_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * state machine state for this role
+       * 
+ * + * string state = 3; + * @param value The state to set. + * @return This builder for chaining. + */ + public Builder setState( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + state_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * state machine state for this role
+       * 
+ * + * string state = 3; + * @return This builder for chaining. + */ + public Builder clearState() { + state_ = getDefaultInstance().getState(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       * state machine state for this role
+       * 
+ * + * string state = 3; + * @param value The bytes for state to set. + * @return This builder for chaining. + */ + public Builder setStateBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + state_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + private java.lang.Object rolePath_ = ""; + /** + *
+       * path to this role within the environment
+       * 
+ * + * string rolePath = 4; + * @return The rolePath. + */ + public java.lang.String getRolePath() { + java.lang.Object ref = rolePath_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + rolePath_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * path to this role within the environment
+       * 
+ * + * string rolePath = 4; + * @return The bytes for rolePath. + */ + public com.google.protobuf.ByteString + getRolePathBytes() { + java.lang.Object ref = rolePath_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + rolePath_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * path to this role within the environment
+       * 
+ * + * string rolePath = 4; + * @param value The rolePath to set. + * @return This builder for chaining. + */ + public Builder setRolePath( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + rolePath_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       * path to this role within the environment
+       * 
+ * + * string rolePath = 4; + * @return This builder for chaining. + */ + public Builder clearRolePath() { + rolePath_ = getDefaultInstance().getRolePath(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + /** + *
+       * path to this role within the environment
+       * 
+ * + * string rolePath = 4; + * @param value The bytes for rolePath to set. + * @return This builder for chaining. + */ + public Builder setRolePathBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + rolePath_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + + private java.lang.Object environmentId_ = ""; + /** + * string environmentId = 5; + * @return The environmentId. + */ + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string environmentId = 5; + * @return The bytes for environmentId. + */ + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string environmentId = 5; + * @param value The environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentId( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + environmentId_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * string environmentId = 5; + * @return This builder for chaining. + */ + public Builder clearEnvironmentId() { + environmentId_ = getDefaultInstance().getEnvironmentId(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + return this; + } + /** + * string environmentId = 5; + * @param value The bytes for environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + environmentId_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:events.Ev_RoleEvent) + } + + // @@protoc_insertion_point(class_scope:events.Ev_RoleEvent) + private static final alice.dip.kafka.events.Events.Ev_RoleEvent DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_RoleEvent(); + } + + public static alice.dip.kafka.events.Events.Ev_RoleEvent getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Ev_RoleEvent parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RoleEvent getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Ev_IntegratedServiceEventOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Ev_IntegratedServiceEvent) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * name of the context, usually the path of the callRole that calls a given integrated service function e.g. readout-dataflow.dd-scheduler.terminate
+     * 
+ * + * string name = 1; + * @return The name. + */ + java.lang.String getName(); + /** + *
+     * name of the context, usually the path of the callRole that calls a given integrated service function e.g. readout-dataflow.dd-scheduler.terminate
+     * 
+ * + * string name = 1; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+     * error message, if any
+     * 
+ * + * string error = 2; + * @return The error. + */ + java.lang.String getError(); + /** + *
+     * error message, if any
+     * 
+ * + * string error = 2; + * @return The bytes for error. + */ + com.google.protobuf.ByteString + getErrorBytes(); + + /** + *
+     * name of the operation, usually the name of the integrated service function being called e.g. ddsched.PartitionTerminate()"
+     * 
+ * + * string operationName = 3; + * @return The operationName. + */ + java.lang.String getOperationName(); + /** + *
+     * name of the operation, usually the name of the integrated service function being called e.g. ddsched.PartitionTerminate()"
+     * 
+ * + * string operationName = 3; + * @return The bytes for operationName. + */ + com.google.protobuf.ByteString + getOperationNameBytes(); + + /** + *
+     * progress or success/failure state of the operation
+     * 
+ * + * .events.OpStatus operationStatus = 4; + * @return The enum numeric value on the wire for operationStatus. + */ + int getOperationStatusValue(); + /** + *
+     * progress or success/failure state of the operation
+     * 
+ * + * .events.OpStatus operationStatus = 4; + * @return The operationStatus. + */ + alice.dip.kafka.events.Events.OpStatus getOperationStatus(); + + /** + *
+     * if the operation has substeps, this is the name of the current substep, like an API call or polling phase
+     * 
+ * + * string operationStep = 5; + * @return The operationStep. + */ + java.lang.String getOperationStep(); + /** + *
+     * if the operation has substeps, this is the name of the current substep, like an API call or polling phase
+     * 
+ * + * string operationStep = 5; + * @return The bytes for operationStep. + */ + com.google.protobuf.ByteString + getOperationStepBytes(); + + /** + *
+     * progress or success/failure state of the current substep
+     * 
+ * + * .events.OpStatus operationStepStatus = 6; + * @return The enum numeric value on the wire for operationStepStatus. + */ + int getOperationStepStatusValue(); + /** + *
+     * progress or success/failure state of the current substep
+     * 
+ * + * .events.OpStatus operationStepStatus = 6; + * @return The operationStepStatus. + */ + alice.dip.kafka.events.Events.OpStatus getOperationStepStatus(); + + /** + * string environmentId = 7; + * @return The environmentId. + */ + java.lang.String getEnvironmentId(); + /** + * string environmentId = 7; + * @return The bytes for environmentId. + */ + com.google.protobuf.ByteString + getEnvironmentIdBytes(); + + /** + *
+     * any additional payload, depending on the integrated service; there is no schema, it can even be the raw return structure of a remote API call
+     * 
+ * + * string payload = 8; + * @return The payload. + */ + java.lang.String getPayload(); + /** + *
+     * any additional payload, depending on the integrated service; there is no schema, it can even be the raw return structure of a remote API call
+     * 
+ * + * string payload = 8; + * @return The bytes for payload. + */ + com.google.protobuf.ByteString + getPayloadBytes(); + } + /** + * Protobuf type {@code events.Ev_IntegratedServiceEvent} + */ + public static final class Ev_IntegratedServiceEvent extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Ev_IntegratedServiceEvent) + Ev_IntegratedServiceEventOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Ev_IntegratedServiceEvent.class.getName()); + } + // Use Ev_IntegratedServiceEvent.newBuilder() to construct. + private Ev_IntegratedServiceEvent(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Ev_IntegratedServiceEvent() { + name_ = ""; + error_ = ""; + operationName_ = ""; + operationStatus_ = 0; + operationStep_ = ""; + operationStepStatus_ = 0; + environmentId_ = ""; + payload_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_IntegratedServiceEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_IntegratedServiceEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.class, alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder.class); + } + + public static final int NAME_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + *
+     * name of the context, usually the path of the callRole that calls a given integrated service function e.g. readout-dataflow.dd-scheduler.terminate
+     * 
+ * + * string name = 1; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+     * name of the context, usually the path of the callRole that calls a given integrated service function e.g. readout-dataflow.dd-scheduler.terminate
+     * 
+ * + * string name = 1; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ERROR_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object error_ = ""; + /** + *
+     * error message, if any
+     * 
+ * + * string error = 2; + * @return The error. + */ + @java.lang.Override + public java.lang.String getError() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } + } + /** + *
+     * error message, if any
+     * 
+ * + * string error = 2; + * @return The bytes for error. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int OPERATIONNAME_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object operationName_ = ""; + /** + *
+     * name of the operation, usually the name of the integrated service function being called e.g. ddsched.PartitionTerminate()"
+     * 
+ * + * string operationName = 3; + * @return The operationName. + */ + @java.lang.Override + public java.lang.String getOperationName() { + java.lang.Object ref = operationName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + operationName_ = s; + return s; + } + } + /** + *
+     * name of the operation, usually the name of the integrated service function being called e.g. ddsched.PartitionTerminate()"
+     * 
+ * + * string operationName = 3; + * @return The bytes for operationName. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getOperationNameBytes() { + java.lang.Object ref = operationName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + operationName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int OPERATIONSTATUS_FIELD_NUMBER = 4; + private int operationStatus_ = 0; + /** + *
+     * progress or success/failure state of the operation
+     * 
+ * + * .events.OpStatus operationStatus = 4; + * @return The enum numeric value on the wire for operationStatus. + */ + @java.lang.Override public int getOperationStatusValue() { + return operationStatus_; + } + /** + *
+     * progress or success/failure state of the operation
+     * 
+ * + * .events.OpStatus operationStatus = 4; + * @return The operationStatus. + */ + @java.lang.Override public alice.dip.kafka.events.Events.OpStatus getOperationStatus() { + alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(operationStatus_); + return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result; + } + + public static final int OPERATIONSTEP_FIELD_NUMBER = 5; + @SuppressWarnings("serial") + private volatile java.lang.Object operationStep_ = ""; + /** + *
+     * if the operation has substeps, this is the name of the current substep, like an API call or polling phase
+     * 
+ * + * string operationStep = 5; + * @return The operationStep. + */ + @java.lang.Override + public java.lang.String getOperationStep() { + java.lang.Object ref = operationStep_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + operationStep_ = s; + return s; + } + } + /** + *
+     * if the operation has substeps, this is the name of the current substep, like an API call or polling phase
+     * 
+ * + * string operationStep = 5; + * @return The bytes for operationStep. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getOperationStepBytes() { + java.lang.Object ref = operationStep_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + operationStep_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int OPERATIONSTEPSTATUS_FIELD_NUMBER = 6; + private int operationStepStatus_ = 0; + /** + *
+     * progress or success/failure state of the current substep
+     * 
+ * + * .events.OpStatus operationStepStatus = 6; + * @return The enum numeric value on the wire for operationStepStatus. + */ + @java.lang.Override public int getOperationStepStatusValue() { + return operationStepStatus_; + } + /** + *
+     * progress or success/failure state of the current substep
+     * 
+ * + * .events.OpStatus operationStepStatus = 6; + * @return The operationStepStatus. + */ + @java.lang.Override public alice.dip.kafka.events.Events.OpStatus getOperationStepStatus() { + alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(operationStepStatus_); + return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result; + } + + public static final int ENVIRONMENTID_FIELD_NUMBER = 7; + @SuppressWarnings("serial") + private volatile java.lang.Object environmentId_ = ""; + /** + * string environmentId = 7; + * @return The environmentId. + */ + @java.lang.Override + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } + } + /** + * string environmentId = 7; + * @return The bytes for environmentId. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PAYLOAD_FIELD_NUMBER = 8; + @SuppressWarnings("serial") + private volatile java.lang.Object payload_ = ""; + /** + *
+     * any additional payload, depending on the integrated service; there is no schema, it can even be the raw return structure of a remote API call
+     * 
+ * + * string payload = 8; + * @return The payload. + */ + @java.lang.Override + public java.lang.String getPayload() { + java.lang.Object ref = payload_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + payload_ = s; + return s; + } + } + /** + *
+     * any additional payload, depending on the integrated service; there is no schema, it can even be the raw return structure of a remote API call
+     * 
+ * + * string payload = 8; + * @return The bytes for payload. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getPayloadBytes() { + java.lang.Object ref = payload_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + payload_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, name_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(error_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, error_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(operationName_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 3, operationName_); + } + if (operationStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) { + output.writeEnum(4, operationStatus_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(operationStep_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 5, operationStep_); + } + if (operationStepStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) { + output.writeEnum(6, operationStepStatus_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 7, environmentId_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(payload_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 8, payload_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, name_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(error_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, error_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(operationName_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, operationName_); + } + if (operationStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(4, operationStatus_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(operationStep_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(5, operationStep_); + } + if (operationStepStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(6, operationStepStatus_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(7, environmentId_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(payload_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(8, payload_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent other = (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) obj; + + if (!getName() + .equals(other.getName())) return false; + if (!getError() + .equals(other.getError())) return false; + if (!getOperationName() + .equals(other.getOperationName())) return false; + if (operationStatus_ != other.operationStatus_) return false; + if (!getOperationStep() + .equals(other.getOperationStep())) return false; + if (operationStepStatus_ != other.operationStepStatus_) return false; + if (!getEnvironmentId() + .equals(other.getEnvironmentId())) return false; + if (!getPayload() + .equals(other.getPayload())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + ERROR_FIELD_NUMBER; + hash = (53 * hash) + getError().hashCode(); + hash = (37 * hash) + OPERATIONNAME_FIELD_NUMBER; + hash = (53 * hash) + getOperationName().hashCode(); + hash = (37 * hash) + OPERATIONSTATUS_FIELD_NUMBER; + hash = (53 * hash) + operationStatus_; + hash = (37 * hash) + OPERATIONSTEP_FIELD_NUMBER; + hash = (53 * hash) + getOperationStep().hashCode(); + hash = (37 * hash) + OPERATIONSTEPSTATUS_FIELD_NUMBER; + hash = (53 * hash) + operationStepStatus_; + hash = (37 * hash) + ENVIRONMENTID_FIELD_NUMBER; + hash = (53 * hash) + getEnvironmentId().hashCode(); + hash = (37 * hash) + PAYLOAD_FIELD_NUMBER; + hash = (53 * hash) + getPayload().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code events.Ev_IntegratedServiceEvent} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Ev_IntegratedServiceEvent) + alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_IntegratedServiceEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_IntegratedServiceEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.class, alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Ev_IntegratedServiceEvent.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + error_ = ""; + operationName_ = ""; + operationStatus_ = 0; + operationStep_ = ""; + operationStepStatus_ = 0; + environmentId_ = ""; + payload_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_IntegratedServiceEvent_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent build() { + alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent buildPartial() { + alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent result = new alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.error_ = error_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.operationName_ = operationName_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.operationStatus_ = operationStatus_; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.operationStep_ = operationStep_; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.operationStepStatus_ = operationStepStatus_; + } + if (((from_bitField0_ & 0x00000040) != 0)) { + result.environmentId_ = environmentId_; + } + if (((from_bitField0_ & 0x00000080) != 0)) { + result.payload_ = payload_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) { + return mergeFrom((alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent other) { + if (other == alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance()) return this; + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (!other.getError().isEmpty()) { + error_ = other.error_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (!other.getOperationName().isEmpty()) { + operationName_ = other.operationName_; + bitField0_ |= 0x00000004; + onChanged(); + } + if (other.operationStatus_ != 0) { + setOperationStatusValue(other.getOperationStatusValue()); + } + if (!other.getOperationStep().isEmpty()) { + operationStep_ = other.operationStep_; + bitField0_ |= 0x00000010; + onChanged(); + } + if (other.operationStepStatus_ != 0) { + setOperationStepStatusValue(other.getOperationStepStatusValue()); + } + if (!other.getEnvironmentId().isEmpty()) { + environmentId_ = other.environmentId_; + bitField0_ |= 0x00000040; + onChanged(); + } + if (!other.getPayload().isEmpty()) { + payload_ = other.payload_; + bitField0_ |= 0x00000080; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + error_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + operationName_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 32: { + operationStatus_ = input.readEnum(); + bitField0_ |= 0x00000008; + break; + } // case 32 + case 42: { + operationStep_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000010; + break; + } // case 42 + case 48: { + operationStepStatus_ = input.readEnum(); + bitField0_ |= 0x00000020; + break; + } // case 48 + case 58: { + environmentId_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000040; + break; + } // case 58 + case 66: { + payload_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000080; + break; + } // case 66 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + *
+       * name of the context, usually the path of the callRole that calls a given integrated service function e.g. readout-dataflow.dd-scheduler.terminate
+       * 
+ * + * string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * name of the context, usually the path of the callRole that calls a given integrated service function e.g. readout-dataflow.dd-scheduler.terminate
+       * 
+ * + * string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * name of the context, usually the path of the callRole that calls a given integrated service function e.g. readout-dataflow.dd-scheduler.terminate
+       * 
+ * + * string name = 1; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * name of the context, usually the path of the callRole that calls a given integrated service function e.g. readout-dataflow.dd-scheduler.terminate
+       * 
+ * + * string name = 1; + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       * name of the context, usually the path of the callRole that calls a given integrated service function e.g. readout-dataflow.dd-scheduler.terminate
+       * 
+ * + * string name = 1; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object error_ = ""; + /** + *
+       * error message, if any
+       * 
+ * + * string error = 2; + * @return The error. + */ + public java.lang.String getError() { + java.lang.Object ref = error_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * error message, if any
+       * 
+ * + * string error = 2; + * @return The bytes for error. + */ + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * error message, if any
+       * 
+ * + * string error = 2; + * @param value The error to set. + * @return This builder for chaining. + */ + public Builder setError( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + error_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * error message, if any
+       * 
+ * + * string error = 2; + * @return This builder for chaining. + */ + public Builder clearError() { + error_ = getDefaultInstance().getError(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       * error message, if any
+       * 
+ * + * string error = 2; + * @param value The bytes for error to set. + * @return This builder for chaining. + */ + public Builder setErrorBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + error_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private java.lang.Object operationName_ = ""; + /** + *
+       * name of the operation, usually the name of the integrated service function being called e.g. ddsched.PartitionTerminate()"
+       * 
+ * + * string operationName = 3; + * @return The operationName. + */ + public java.lang.String getOperationName() { + java.lang.Object ref = operationName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + operationName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * name of the operation, usually the name of the integrated service function being called e.g. ddsched.PartitionTerminate()"
+       * 
+ * + * string operationName = 3; + * @return The bytes for operationName. + */ + public com.google.protobuf.ByteString + getOperationNameBytes() { + java.lang.Object ref = operationName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + operationName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * name of the operation, usually the name of the integrated service function being called e.g. ddsched.PartitionTerminate()"
+       * 
+ * + * string operationName = 3; + * @param value The operationName to set. + * @return This builder for chaining. + */ + public Builder setOperationName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + operationName_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * name of the operation, usually the name of the integrated service function being called e.g. ddsched.PartitionTerminate()"
+       * 
+ * + * string operationName = 3; + * @return This builder for chaining. + */ + public Builder clearOperationName() { + operationName_ = getDefaultInstance().getOperationName(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       * name of the operation, usually the name of the integrated service function being called e.g. ddsched.PartitionTerminate()"
+       * 
+ * + * string operationName = 3; + * @param value The bytes for operationName to set. + * @return This builder for chaining. + */ + public Builder setOperationNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + operationName_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + private int operationStatus_ = 0; + /** + *
+       * progress or success/failure state of the operation
+       * 
+ * + * .events.OpStatus operationStatus = 4; + * @return The enum numeric value on the wire for operationStatus. + */ + @java.lang.Override public int getOperationStatusValue() { + return operationStatus_; + } + /** + *
+       * progress or success/failure state of the operation
+       * 
+ * + * .events.OpStatus operationStatus = 4; + * @param value The enum numeric value on the wire for operationStatus to set. + * @return This builder for chaining. + */ + public Builder setOperationStatusValue(int value) { + operationStatus_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       * progress or success/failure state of the operation
+       * 
+ * + * .events.OpStatus operationStatus = 4; + * @return The operationStatus. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.OpStatus getOperationStatus() { + alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(operationStatus_); + return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result; + } + /** + *
+       * progress or success/failure state of the operation
+       * 
+ * + * .events.OpStatus operationStatus = 4; + * @param value The operationStatus to set. + * @return This builder for chaining. + */ + public Builder setOperationStatus(alice.dip.kafka.events.Events.OpStatus value) { + if (value == null) { throw new NullPointerException(); } + bitField0_ |= 0x00000008; + operationStatus_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+       * progress or success/failure state of the operation
+       * 
+ * + * .events.OpStatus operationStatus = 4; + * @return This builder for chaining. + */ + public Builder clearOperationStatus() { + bitField0_ = (bitField0_ & ~0x00000008); + operationStatus_ = 0; + onChanged(); + return this; + } + + private java.lang.Object operationStep_ = ""; + /** + *
+       * if the operation has substeps, this is the name of the current substep, like an API call or polling phase
+       * 
+ * + * string operationStep = 5; + * @return The operationStep. + */ + public java.lang.String getOperationStep() { + java.lang.Object ref = operationStep_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + operationStep_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * if the operation has substeps, this is the name of the current substep, like an API call or polling phase
+       * 
+ * + * string operationStep = 5; + * @return The bytes for operationStep. + */ + public com.google.protobuf.ByteString + getOperationStepBytes() { + java.lang.Object ref = operationStep_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + operationStep_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * if the operation has substeps, this is the name of the current substep, like an API call or polling phase
+       * 
+ * + * string operationStep = 5; + * @param value The operationStep to set. + * @return This builder for chaining. + */ + public Builder setOperationStep( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + operationStep_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + *
+       * if the operation has substeps, this is the name of the current substep, like an API call or polling phase
+       * 
+ * + * string operationStep = 5; + * @return This builder for chaining. + */ + public Builder clearOperationStep() { + operationStep_ = getDefaultInstance().getOperationStep(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + return this; + } + /** + *
+       * if the operation has substeps, this is the name of the current substep, like an API call or polling phase
+       * 
+ * + * string operationStep = 5; + * @param value The bytes for operationStep to set. + * @return This builder for chaining. + */ + public Builder setOperationStepBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + operationStep_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + + private int operationStepStatus_ = 0; + /** + *
+       * progress or success/failure state of the current substep
+       * 
+ * + * .events.OpStatus operationStepStatus = 6; + * @return The enum numeric value on the wire for operationStepStatus. + */ + @java.lang.Override public int getOperationStepStatusValue() { + return operationStepStatus_; + } + /** + *
+       * progress or success/failure state of the current substep
+       * 
+ * + * .events.OpStatus operationStepStatus = 6; + * @param value The enum numeric value on the wire for operationStepStatus to set. + * @return This builder for chaining. + */ + public Builder setOperationStepStatusValue(int value) { + operationStepStatus_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + *
+       * progress or success/failure state of the current substep
+       * 
+ * + * .events.OpStatus operationStepStatus = 6; + * @return The operationStepStatus. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.OpStatus getOperationStepStatus() { + alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(operationStepStatus_); + return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result; + } + /** + *
+       * progress or success/failure state of the current substep
+       * 
+ * + * .events.OpStatus operationStepStatus = 6; + * @param value The operationStepStatus to set. + * @return This builder for chaining. + */ + public Builder setOperationStepStatus(alice.dip.kafka.events.Events.OpStatus value) { + if (value == null) { throw new NullPointerException(); } + bitField0_ |= 0x00000020; + operationStepStatus_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+       * progress or success/failure state of the current substep
+       * 
+ * + * .events.OpStatus operationStepStatus = 6; + * @return This builder for chaining. + */ + public Builder clearOperationStepStatus() { + bitField0_ = (bitField0_ & ~0x00000020); + operationStepStatus_ = 0; + onChanged(); + return this; + } + + private java.lang.Object environmentId_ = ""; + /** + * string environmentId = 7; + * @return The environmentId. + */ + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string environmentId = 7; + * @return The bytes for environmentId. + */ + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string environmentId = 7; + * @param value The environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentId( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + environmentId_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * string environmentId = 7; + * @return This builder for chaining. + */ + public Builder clearEnvironmentId() { + environmentId_ = getDefaultInstance().getEnvironmentId(); + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + return this; + } + /** + * string environmentId = 7; + * @param value The bytes for environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + environmentId_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + + private java.lang.Object payload_ = ""; + /** + *
+       * any additional payload, depending on the integrated service; there is no schema, it can even be the raw return structure of a remote API call
+       * 
+ * + * string payload = 8; + * @return The payload. + */ + public java.lang.String getPayload() { + java.lang.Object ref = payload_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + payload_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * any additional payload, depending on the integrated service; there is no schema, it can even be the raw return structure of a remote API call
+       * 
+ * + * string payload = 8; + * @return The bytes for payload. + */ + public com.google.protobuf.ByteString + getPayloadBytes() { + java.lang.Object ref = payload_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + payload_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * any additional payload, depending on the integrated service; there is no schema, it can even be the raw return structure of a remote API call
+       * 
+ * + * string payload = 8; + * @param value The payload to set. + * @return This builder for chaining. + */ + public Builder setPayload( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + payload_ = value; + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + *
+       * any additional payload, depending on the integrated service; there is no schema, it can even be the raw return structure of a remote API call
+       * 
+ * + * string payload = 8; + * @return This builder for chaining. + */ + public Builder clearPayload() { + payload_ = getDefaultInstance().getPayload(); + bitField0_ = (bitField0_ & ~0x00000080); + onChanged(); + return this; + } + /** + *
+       * any additional payload, depending on the integrated service; there is no schema, it can even be the raw return structure of a remote API call
+       * 
+ * + * string payload = 8; + * @param value The bytes for payload to set. + * @return This builder for chaining. + */ + public Builder setPayloadBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + payload_ = value; + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:events.Ev_IntegratedServiceEvent) + } + + // @@protoc_insertion_point(class_scope:events.Ev_IntegratedServiceEvent) + private static final alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent(); + } + + public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Ev_IntegratedServiceEvent parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Ev_RunEventOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Ev_RunEvent) + com.google.protobuf.MessageOrBuilder { + + /** + * string environmentId = 1; + * @return The environmentId. + */ + java.lang.String getEnvironmentId(); + /** + * string environmentId = 1; + * @return The bytes for environmentId. + */ + com.google.protobuf.ByteString + getEnvironmentIdBytes(); + + /** + * uint32 runNumber = 2; + * @return The runNumber. + */ + int getRunNumber(); + + /** + * string state = 3; + * @return The state. + */ + java.lang.String getState(); + /** + * string state = 3; + * @return The bytes for state. + */ + com.google.protobuf.ByteString + getStateBytes(); + + /** + * string error = 4; + * @return The error. + */ + java.lang.String getError(); + /** + * string error = 4; + * @return The bytes for error. + */ + com.google.protobuf.ByteString + getErrorBytes(); + + /** + * string transition = 5; + * @return The transition. + */ + java.lang.String getTransition(); + /** + * string transition = 5; + * @return The bytes for transition. + */ + com.google.protobuf.ByteString + getTransitionBytes(); + + /** + * .events.OpStatus transitionStatus = 6; + * @return The enum numeric value on the wire for transitionStatus. + */ + int getTransitionStatusValue(); + /** + * .events.OpStatus transitionStatus = 6; + * @return The transitionStatus. + */ + alice.dip.kafka.events.Events.OpStatus getTransitionStatus(); + + /** + * .common.User lastRequestUser = 8; + * @return Whether the lastRequestUser field is set. + */ + boolean hasLastRequestUser(); + /** + * .common.User lastRequestUser = 8; + * @return The lastRequestUser. + */ + alice.dip.kafka.events.Common.User getLastRequestUser(); + /** + * .common.User lastRequestUser = 8; + */ + alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder(); + } + /** + * Protobuf type {@code events.Ev_RunEvent} + */ + public static final class Ev_RunEvent extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Ev_RunEvent) + Ev_RunEventOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Ev_RunEvent.class.getName()); + } + // Use Ev_RunEvent.newBuilder() to construct. + private Ev_RunEvent(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Ev_RunEvent() { + environmentId_ = ""; + state_ = ""; + error_ = ""; + transition_ = ""; + transitionStatus_ = 0; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_RunEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_RunEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_RunEvent.class, alice.dip.kafka.events.Events.Ev_RunEvent.Builder.class); + } + + private int bitField0_; + public static final int ENVIRONMENTID_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object environmentId_ = ""; + /** + * string environmentId = 1; + * @return The environmentId. + */ + @java.lang.Override + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } + } + /** + * string environmentId = 1; + * @return The bytes for environmentId. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int RUNNUMBER_FIELD_NUMBER = 2; + private int runNumber_ = 0; + /** + * uint32 runNumber = 2; + * @return The runNumber. + */ + @java.lang.Override + public int getRunNumber() { + return runNumber_; + } + + public static final int STATE_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object state_ = ""; + /** + * string state = 3; + * @return The state. + */ + @java.lang.Override + public java.lang.String getState() { + java.lang.Object ref = state_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + state_ = s; + return s; + } + } + /** + * string state = 3; + * @return The bytes for state. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getStateBytes() { + java.lang.Object ref = state_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + state_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int ERROR_FIELD_NUMBER = 4; + @SuppressWarnings("serial") + private volatile java.lang.Object error_ = ""; + /** + * string error = 4; + * @return The error. + */ + @java.lang.Override + public java.lang.String getError() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } + } + /** + * string error = 4; + * @return The bytes for error. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TRANSITION_FIELD_NUMBER = 5; + @SuppressWarnings("serial") + private volatile java.lang.Object transition_ = ""; + /** + * string transition = 5; + * @return The transition. + */ + @java.lang.Override + public java.lang.String getTransition() { + java.lang.Object ref = transition_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + transition_ = s; + return s; + } + } + /** + * string transition = 5; + * @return The bytes for transition. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTransitionBytes() { + java.lang.Object ref = transition_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + transition_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TRANSITIONSTATUS_FIELD_NUMBER = 6; + private int transitionStatus_ = 0; + /** + * .events.OpStatus transitionStatus = 6; + * @return The enum numeric value on the wire for transitionStatus. + */ + @java.lang.Override public int getTransitionStatusValue() { + return transitionStatus_; + } + /** + * .events.OpStatus transitionStatus = 6; + * @return The transitionStatus. + */ + @java.lang.Override public alice.dip.kafka.events.Events.OpStatus getTransitionStatus() { + alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(transitionStatus_); + return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result; + } + + public static final int LASTREQUESTUSER_FIELD_NUMBER = 8; + private alice.dip.kafka.events.Common.User lastRequestUser_; + /** + * .common.User lastRequestUser = 8; + * @return Whether the lastRequestUser field is set. + */ + @java.lang.Override + public boolean hasLastRequestUser() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * .common.User lastRequestUser = 8; + * @return The lastRequestUser. + */ + @java.lang.Override + public alice.dip.kafka.events.Common.User getLastRequestUser() { + return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_; + } + /** + * .common.User lastRequestUser = 8; + */ + @java.lang.Override + public alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder() { + return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 1, environmentId_); + } + if (runNumber_ != 0) { + output.writeUInt32(2, runNumber_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(state_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 3, state_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(error_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 4, error_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(transition_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 5, transition_); + } + if (transitionStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) { + output.writeEnum(6, transitionStatus_); + } + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(8, getLastRequestUser()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(1, environmentId_); + } + if (runNumber_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, runNumber_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(state_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(3, state_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(error_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(4, error_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(transition_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(5, transition_); + } + if (transitionStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(6, transitionStatus_); + } + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(8, getLastRequestUser()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Ev_RunEvent)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Ev_RunEvent other = (alice.dip.kafka.events.Events.Ev_RunEvent) obj; + + if (!getEnvironmentId() + .equals(other.getEnvironmentId())) return false; + if (getRunNumber() + != other.getRunNumber()) return false; + if (!getState() + .equals(other.getState())) return false; + if (!getError() + .equals(other.getError())) return false; + if (!getTransition() + .equals(other.getTransition())) return false; + if (transitionStatus_ != other.transitionStatus_) return false; + if (hasLastRequestUser() != other.hasLastRequestUser()) return false; + if (hasLastRequestUser()) { + if (!getLastRequestUser() + .equals(other.getLastRequestUser())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + ENVIRONMENTID_FIELD_NUMBER; + hash = (53 * hash) + getEnvironmentId().hashCode(); + hash = (37 * hash) + RUNNUMBER_FIELD_NUMBER; + hash = (53 * hash) + getRunNumber(); + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + getState().hashCode(); + hash = (37 * hash) + ERROR_FIELD_NUMBER; + hash = (53 * hash) + getError().hashCode(); + hash = (37 * hash) + TRANSITION_FIELD_NUMBER; + hash = (53 * hash) + getTransition().hashCode(); + hash = (37 * hash) + TRANSITIONSTATUS_FIELD_NUMBER; + hash = (53 * hash) + transitionStatus_; + if (hasLastRequestUser()) { + hash = (37 * hash) + LASTREQUESTUSER_FIELD_NUMBER; + hash = (53 * hash) + getLastRequestUser().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Ev_RunEvent parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Ev_RunEvent parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_RunEvent prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code events.Ev_RunEvent} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Ev_RunEvent) + alice.dip.kafka.events.Events.Ev_RunEventOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_RunEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_RunEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_RunEvent.class, alice.dip.kafka.events.Events.Ev_RunEvent.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Ev_RunEvent.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage + .alwaysUseFieldBuilders) { + internalGetLastRequestUserFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + environmentId_ = ""; + runNumber_ = 0; + state_ = ""; + error_ = ""; + transition_ = ""; + transitionStatus_ = 0; + lastRequestUser_ = null; + if (lastRequestUserBuilder_ != null) { + lastRequestUserBuilder_.dispose(); + lastRequestUserBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_RunEvent_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RunEvent getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RunEvent build() { + alice.dip.kafka.events.Events.Ev_RunEvent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RunEvent buildPartial() { + alice.dip.kafka.events.Events.Ev_RunEvent result = new alice.dip.kafka.events.Events.Ev_RunEvent(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Events.Ev_RunEvent result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.environmentId_ = environmentId_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.runNumber_ = runNumber_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.state_ = state_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.error_ = error_; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.transition_ = transition_; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.transitionStatus_ = transitionStatus_; + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000040) != 0)) { + result.lastRequestUser_ = lastRequestUserBuilder_ == null + ? lastRequestUser_ + : lastRequestUserBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Ev_RunEvent) { + return mergeFrom((alice.dip.kafka.events.Events.Ev_RunEvent)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_RunEvent other) { + if (other == alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance()) return this; + if (!other.getEnvironmentId().isEmpty()) { + environmentId_ = other.environmentId_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.getRunNumber() != 0) { + setRunNumber(other.getRunNumber()); + } + if (!other.getState().isEmpty()) { + state_ = other.state_; + bitField0_ |= 0x00000004; + onChanged(); + } + if (!other.getError().isEmpty()) { + error_ = other.error_; + bitField0_ |= 0x00000008; + onChanged(); + } + if (!other.getTransition().isEmpty()) { + transition_ = other.transition_; + bitField0_ |= 0x00000010; + onChanged(); + } + if (other.transitionStatus_ != 0) { + setTransitionStatusValue(other.getTransitionStatusValue()); + } + if (other.hasLastRequestUser()) { + mergeLastRequestUser(other.getLastRequestUser()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + environmentId_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: { + runNumber_ = input.readUInt32(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 26: { + state_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: { + error_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000008; + break; + } // case 34 + case 42: { + transition_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000010; + break; + } // case 42 + case 48: { + transitionStatus_ = input.readEnum(); + bitField0_ |= 0x00000020; + break; + } // case 48 + case 66: { + input.readMessage( + internalGetLastRequestUserFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000040; + break; + } // case 66 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object environmentId_ = ""; + /** + * string environmentId = 1; + * @return The environmentId. + */ + public java.lang.String getEnvironmentId() { + java.lang.Object ref = environmentId_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + environmentId_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string environmentId = 1; + * @return The bytes for environmentId. + */ + public com.google.protobuf.ByteString + getEnvironmentIdBytes() { + java.lang.Object ref = environmentId_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + environmentId_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string environmentId = 1; + * @param value The environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentId( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + environmentId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * string environmentId = 1; + * @return This builder for chaining. + */ + public Builder clearEnvironmentId() { + environmentId_ = getDefaultInstance().getEnvironmentId(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * string environmentId = 1; + * @param value The bytes for environmentId to set. + * @return This builder for chaining. + */ + public Builder setEnvironmentIdBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + environmentId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private int runNumber_ ; + /** + * uint32 runNumber = 2; + * @return The runNumber. + */ + @java.lang.Override + public int getRunNumber() { + return runNumber_; + } + /** + * uint32 runNumber = 2; + * @param value The runNumber to set. + * @return This builder for chaining. + */ + public Builder setRunNumber(int value) { + + runNumber_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * uint32 runNumber = 2; + * @return This builder for chaining. + */ + public Builder clearRunNumber() { + bitField0_ = (bitField0_ & ~0x00000002); + runNumber_ = 0; + onChanged(); + return this; + } + + private java.lang.Object state_ = ""; + /** + * string state = 3; + * @return The state. + */ + public java.lang.String getState() { + java.lang.Object ref = state_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + state_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string state = 3; + * @return The bytes for state. + */ + public com.google.protobuf.ByteString + getStateBytes() { + java.lang.Object ref = state_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + state_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string state = 3; + * @param value The state to set. + * @return This builder for chaining. + */ + public Builder setState( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + state_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * string state = 3; + * @return This builder for chaining. + */ + public Builder clearState() { + state_ = getDefaultInstance().getState(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + * string state = 3; + * @param value The bytes for state to set. + * @return This builder for chaining. + */ + public Builder setStateBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + state_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + private java.lang.Object error_ = ""; + /** + * string error = 4; + * @return The error. + */ + public java.lang.String getError() { + java.lang.Object ref = error_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + error_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string error = 4; + * @return The bytes for error. + */ + public com.google.protobuf.ByteString + getErrorBytes() { + java.lang.Object ref = error_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + error_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string error = 4; + * @param value The error to set. + * @return This builder for chaining. + */ + public Builder setError( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + error_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * string error = 4; + * @return This builder for chaining. + */ + public Builder clearError() { + error_ = getDefaultInstance().getError(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + /** + * string error = 4; + * @param value The bytes for error to set. + * @return This builder for chaining. + */ + public Builder setErrorBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + error_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + + private java.lang.Object transition_ = ""; + /** + * string transition = 5; + * @return The transition. + */ + public java.lang.String getTransition() { + java.lang.Object ref = transition_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + transition_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string transition = 5; + * @return The bytes for transition. + */ + public com.google.protobuf.ByteString + getTransitionBytes() { + java.lang.Object ref = transition_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + transition_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string transition = 5; + * @param value The transition to set. + * @return This builder for chaining. + */ + public Builder setTransition( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + transition_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * string transition = 5; + * @return This builder for chaining. + */ + public Builder clearTransition() { + transition_ = getDefaultInstance().getTransition(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + return this; + } + /** + * string transition = 5; + * @param value The bytes for transition to set. + * @return This builder for chaining. + */ + public Builder setTransitionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + transition_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + + private int transitionStatus_ = 0; + /** + * .events.OpStatus transitionStatus = 6; + * @return The enum numeric value on the wire for transitionStatus. + */ + @java.lang.Override public int getTransitionStatusValue() { + return transitionStatus_; + } + /** + * .events.OpStatus transitionStatus = 6; + * @param value The enum numeric value on the wire for transitionStatus to set. + * @return This builder for chaining. + */ + public Builder setTransitionStatusValue(int value) { + transitionStatus_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + * .events.OpStatus transitionStatus = 6; + * @return The transitionStatus. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.OpStatus getTransitionStatus() { + alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(transitionStatus_); + return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result; + } + /** + * .events.OpStatus transitionStatus = 6; + * @param value The transitionStatus to set. + * @return This builder for chaining. + */ + public Builder setTransitionStatus(alice.dip.kafka.events.Events.OpStatus value) { + if (value == null) { throw new NullPointerException(); } + bitField0_ |= 0x00000020; + transitionStatus_ = value.getNumber(); + onChanged(); + return this; + } + /** + * .events.OpStatus transitionStatus = 6; + * @return This builder for chaining. + */ + public Builder clearTransitionStatus() { + bitField0_ = (bitField0_ & ~0x00000020); + transitionStatus_ = 0; + onChanged(); + return this; + } + + private alice.dip.kafka.events.Common.User lastRequestUser_; + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder> lastRequestUserBuilder_; + /** + * .common.User lastRequestUser = 8; + * @return Whether the lastRequestUser field is set. + */ + public boolean hasLastRequestUser() { + return ((bitField0_ & 0x00000040) != 0); + } + /** + * .common.User lastRequestUser = 8; + * @return The lastRequestUser. + */ + public alice.dip.kafka.events.Common.User getLastRequestUser() { + if (lastRequestUserBuilder_ == null) { + return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_; + } else { + return lastRequestUserBuilder_.getMessage(); + } + } + /** + * .common.User lastRequestUser = 8; + */ + public Builder setLastRequestUser(alice.dip.kafka.events.Common.User value) { + if (lastRequestUserBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + lastRequestUser_ = value; + } else { + lastRequestUserBuilder_.setMessage(value); + } + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * .common.User lastRequestUser = 8; + */ + public Builder setLastRequestUser( + alice.dip.kafka.events.Common.User.Builder builderForValue) { + if (lastRequestUserBuilder_ == null) { + lastRequestUser_ = builderForValue.build(); + } else { + lastRequestUserBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * .common.User lastRequestUser = 8; + */ + public Builder mergeLastRequestUser(alice.dip.kafka.events.Common.User value) { + if (lastRequestUserBuilder_ == null) { + if (((bitField0_ & 0x00000040) != 0) && + lastRequestUser_ != null && + lastRequestUser_ != alice.dip.kafka.events.Common.User.getDefaultInstance()) { + getLastRequestUserBuilder().mergeFrom(value); + } else { + lastRequestUser_ = value; + } + } else { + lastRequestUserBuilder_.mergeFrom(value); + } + if (lastRequestUser_ != null) { + bitField0_ |= 0x00000040; + onChanged(); + } + return this; + } + /** + * .common.User lastRequestUser = 8; + */ + public Builder clearLastRequestUser() { + bitField0_ = (bitField0_ & ~0x00000040); + lastRequestUser_ = null; + if (lastRequestUserBuilder_ != null) { + lastRequestUserBuilder_.dispose(); + lastRequestUserBuilder_ = null; + } + onChanged(); + return this; + } + /** + * .common.User lastRequestUser = 8; + */ + public alice.dip.kafka.events.Common.User.Builder getLastRequestUserBuilder() { + bitField0_ |= 0x00000040; + onChanged(); + return internalGetLastRequestUserFieldBuilder().getBuilder(); + } + /** + * .common.User lastRequestUser = 8; + */ + public alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder() { + if (lastRequestUserBuilder_ != null) { + return lastRequestUserBuilder_.getMessageOrBuilder(); + } else { + return lastRequestUser_ == null ? + alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_; + } + } + /** + * .common.User lastRequestUser = 8; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder> + internalGetLastRequestUserFieldBuilder() { + if (lastRequestUserBuilder_ == null) { + lastRequestUserBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder>( + getLastRequestUser(), + getParentForChildren(), + isClean()); + lastRequestUser_ = null; + } + return lastRequestUserBuilder_; + } + + // @@protoc_insertion_point(builder_scope:events.Ev_RunEvent) + } + + // @@protoc_insertion_point(class_scope:events.Ev_RunEvent) + private static final alice.dip.kafka.events.Events.Ev_RunEvent DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_RunEvent(); + } + + public static alice.dip.kafka.events.Events.Ev_RunEvent getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Ev_RunEvent parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RunEvent getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface Ev_BeamModeEventOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Ev_BeamModeEvent) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * milliseconds since epoch when the beam mode change happened
+     * 
+ * + * int64 timestamp = 1; + * @return The timestamp. + */ + long getTimestamp(); + + /** + * .common.BeamInfo beamInfo = 2; + * @return Whether the beamInfo field is set. + */ + boolean hasBeamInfo(); + /** + * .common.BeamInfo beamInfo = 2; + * @return The beamInfo. + */ + alice.dip.kafka.events.Common.BeamInfo getBeamInfo(); + /** + * .common.BeamInfo beamInfo = 2; + */ + alice.dip.kafka.events.Common.BeamInfoOrBuilder getBeamInfoOrBuilder(); + } + /** + *
+   * *
+   * Beam mode changes are propagated as Kafka events and to be sent by the BKP-LHC-Client on a dedicated topic
+   * e.g. dip.lhc.beam_mode
+   * 
+ * + * Protobuf type {@code events.Ev_BeamModeEvent} + */ + public static final class Ev_BeamModeEvent extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Ev_BeamModeEvent) + Ev_BeamModeEventOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Ev_BeamModeEvent.class.getName()); + } + // Use Ev_BeamModeEvent.newBuilder() to construct. + private Ev_BeamModeEvent(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Ev_BeamModeEvent() { + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_BeamModeEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_BeamModeEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_BeamModeEvent.class, alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder.class); + } + + private int bitField0_; + public static final int TIMESTAMP_FIELD_NUMBER = 1; + private long timestamp_ = 0L; + /** + *
+     * milliseconds since epoch when the beam mode change happened
+     * 
+ * + * int64 timestamp = 1; + * @return The timestamp. + */ + @java.lang.Override + public long getTimestamp() { + return timestamp_; + } + + public static final int BEAMINFO_FIELD_NUMBER = 2; + private alice.dip.kafka.events.Common.BeamInfo beamInfo_; + /** + * .common.BeamInfo beamInfo = 2; + * @return Whether the beamInfo field is set. + */ + @java.lang.Override + public boolean hasBeamInfo() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * .common.BeamInfo beamInfo = 2; + * @return The beamInfo. + */ + @java.lang.Override + public alice.dip.kafka.events.Common.BeamInfo getBeamInfo() { + return beamInfo_ == null ? alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance() : beamInfo_; + } + /** + * .common.BeamInfo beamInfo = 2; + */ + @java.lang.Override + public alice.dip.kafka.events.Common.BeamInfoOrBuilder getBeamInfoOrBuilder() { + return beamInfo_ == null ? alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance() : beamInfo_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (timestamp_ != 0L) { + output.writeInt64(1, timestamp_); + } + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(2, getBeamInfo()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (timestamp_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, timestamp_); + } + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getBeamInfo()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Ev_BeamModeEvent)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Ev_BeamModeEvent other = (alice.dip.kafka.events.Events.Ev_BeamModeEvent) obj; + + if (getTimestamp() + != other.getTimestamp()) return false; + if (hasBeamInfo() != other.hasBeamInfo()) return false; + if (hasBeamInfo()) { + if (!getBeamInfo() + .equals(other.getBeamInfo())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTimestamp()); + if (hasBeamInfo()) { + hash = (37 * hash) + BEAMINFO_FIELD_NUMBER; + hash = (53 * hash) + getBeamInfo().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_BeamModeEvent prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * *
+     * Beam mode changes are propagated as Kafka events and to be sent by the BKP-LHC-Client on a dedicated topic
+     * e.g. dip.lhc.beam_mode
+     * 
+ * + * Protobuf type {@code events.Ev_BeamModeEvent} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Ev_BeamModeEvent) + alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_BeamModeEvent_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_BeamModeEvent_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Ev_BeamModeEvent.class, alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Ev_BeamModeEvent.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage + .alwaysUseFieldBuilders) { + internalGetBeamInfoFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + timestamp_ = 0L; + beamInfo_ = null; + if (beamInfoBuilder_ != null) { + beamInfoBuilder_.dispose(); + beamInfoBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Ev_BeamModeEvent_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_BeamModeEvent getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_BeamModeEvent build() { + alice.dip.kafka.events.Events.Ev_BeamModeEvent result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_BeamModeEvent buildPartial() { + alice.dip.kafka.events.Events.Ev_BeamModeEvent result = new alice.dip.kafka.events.Events.Ev_BeamModeEvent(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Events.Ev_BeamModeEvent result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.timestamp_ = timestamp_; + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.beamInfo_ = beamInfoBuilder_ == null + ? beamInfo_ + : beamInfoBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Ev_BeamModeEvent) { + return mergeFrom((alice.dip.kafka.events.Events.Ev_BeamModeEvent)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_BeamModeEvent other) { + if (other == alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance()) return this; + if (other.getTimestamp() != 0L) { + setTimestamp(other.getTimestamp()); + } + if (other.hasBeamInfo()) { + mergeBeamInfo(other.getBeamInfo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + timestamp_ = input.readInt64(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 18: { + input.readMessage( + internalGetBeamInfoFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private long timestamp_ ; + /** + *
+       * milliseconds since epoch when the beam mode change happened
+       * 
+ * + * int64 timestamp = 1; + * @return The timestamp. + */ + @java.lang.Override + public long getTimestamp() { + return timestamp_; + } + /** + *
+       * milliseconds since epoch when the beam mode change happened
+       * 
+ * + * int64 timestamp = 1; + * @param value The timestamp to set. + * @return This builder for chaining. + */ + public Builder setTimestamp(long value) { + + timestamp_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * milliseconds since epoch when the beam mode change happened
+       * 
+ * + * int64 timestamp = 1; + * @return This builder for chaining. + */ + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000001); + timestamp_ = 0L; + onChanged(); + return this; + } + + private alice.dip.kafka.events.Common.BeamInfo beamInfo_; + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.BeamInfo, alice.dip.kafka.events.Common.BeamInfo.Builder, alice.dip.kafka.events.Common.BeamInfoOrBuilder> beamInfoBuilder_; + /** + * .common.BeamInfo beamInfo = 2; + * @return Whether the beamInfo field is set. + */ + public boolean hasBeamInfo() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * .common.BeamInfo beamInfo = 2; + * @return The beamInfo. + */ + public alice.dip.kafka.events.Common.BeamInfo getBeamInfo() { + if (beamInfoBuilder_ == null) { + return beamInfo_ == null ? alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance() : beamInfo_; + } else { + return beamInfoBuilder_.getMessage(); + } + } + /** + * .common.BeamInfo beamInfo = 2; + */ + public Builder setBeamInfo(alice.dip.kafka.events.Common.BeamInfo value) { + if (beamInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + beamInfo_ = value; + } else { + beamInfoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * .common.BeamInfo beamInfo = 2; + */ + public Builder setBeamInfo( + alice.dip.kafka.events.Common.BeamInfo.Builder builderForValue) { + if (beamInfoBuilder_ == null) { + beamInfo_ = builderForValue.build(); + } else { + beamInfoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * .common.BeamInfo beamInfo = 2; + */ + public Builder mergeBeamInfo(alice.dip.kafka.events.Common.BeamInfo value) { + if (beamInfoBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) && + beamInfo_ != null && + beamInfo_ != alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance()) { + getBeamInfoBuilder().mergeFrom(value); + } else { + beamInfo_ = value; + } + } else { + beamInfoBuilder_.mergeFrom(value); + } + if (beamInfo_ != null) { + bitField0_ |= 0x00000002; + onChanged(); + } + return this; + } + /** + * .common.BeamInfo beamInfo = 2; + */ + public Builder clearBeamInfo() { + bitField0_ = (bitField0_ & ~0x00000002); + beamInfo_ = null; + if (beamInfoBuilder_ != null) { + beamInfoBuilder_.dispose(); + beamInfoBuilder_ = null; + } + onChanged(); + return this; + } + /** + * .common.BeamInfo beamInfo = 2; + */ + public alice.dip.kafka.events.Common.BeamInfo.Builder getBeamInfoBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return internalGetBeamInfoFieldBuilder().getBuilder(); + } + /** + * .common.BeamInfo beamInfo = 2; + */ + public alice.dip.kafka.events.Common.BeamInfoOrBuilder getBeamInfoOrBuilder() { + if (beamInfoBuilder_ != null) { + return beamInfoBuilder_.getMessageOrBuilder(); + } else { + return beamInfo_ == null ? + alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance() : beamInfo_; + } + } + /** + * .common.BeamInfo beamInfo = 2; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.BeamInfo, alice.dip.kafka.events.Common.BeamInfo.Builder, alice.dip.kafka.events.Common.BeamInfoOrBuilder> + internalGetBeamInfoFieldBuilder() { + if (beamInfoBuilder_ == null) { + beamInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Common.BeamInfo, alice.dip.kafka.events.Common.BeamInfo.Builder, alice.dip.kafka.events.Common.BeamInfoOrBuilder>( + getBeamInfo(), + getParentForChildren(), + isClean()); + beamInfo_ = null; + } + return beamInfoBuilder_; + } + + // @@protoc_insertion_point(builder_scope:events.Ev_BeamModeEvent) + } + + // @@protoc_insertion_point(class_scope:events.Ev_BeamModeEvent) + private static final alice.dip.kafka.events.Events.Ev_BeamModeEvent DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_BeamModeEvent(); + } + + public static alice.dip.kafka.events.Events.Ev_BeamModeEvent getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Ev_BeamModeEvent parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_BeamModeEvent getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface EventOrBuilder extends + // @@protoc_insertion_point(interface_extends:events.Event) + com.google.protobuf.MessageOrBuilder { + + /** + * int64 timestamp = 1; + * @return The timestamp. + */ + long getTimestamp(); + + /** + * int64 timestampNano = 2; + * @return The timestampNano. + */ + long getTimestampNano(); + + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + * @return Whether the environmentEvent field is set. + */ + boolean hasEnvironmentEvent(); + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + * @return The environmentEvent. + */ + alice.dip.kafka.events.Events.Ev_EnvironmentEvent getEnvironmentEvent(); + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + */ + alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder getEnvironmentEventOrBuilder(); + + /** + * .events.Ev_TaskEvent taskEvent = 12; + * @return Whether the taskEvent field is set. + */ + boolean hasTaskEvent(); + /** + * .events.Ev_TaskEvent taskEvent = 12; + * @return The taskEvent. + */ + alice.dip.kafka.events.Events.Ev_TaskEvent getTaskEvent(); + /** + * .events.Ev_TaskEvent taskEvent = 12; + */ + alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder getTaskEventOrBuilder(); + + /** + * .events.Ev_RoleEvent roleEvent = 13; + * @return Whether the roleEvent field is set. + */ + boolean hasRoleEvent(); + /** + * .events.Ev_RoleEvent roleEvent = 13; + * @return The roleEvent. + */ + alice.dip.kafka.events.Events.Ev_RoleEvent getRoleEvent(); + /** + * .events.Ev_RoleEvent roleEvent = 13; + */ + alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder getRoleEventOrBuilder(); + + /** + * .events.Ev_CallEvent callEvent = 14; + * @return Whether the callEvent field is set. + */ + boolean hasCallEvent(); + /** + * .events.Ev_CallEvent callEvent = 14; + * @return The callEvent. + */ + alice.dip.kafka.events.Events.Ev_CallEvent getCallEvent(); + /** + * .events.Ev_CallEvent callEvent = 14; + */ + alice.dip.kafka.events.Events.Ev_CallEventOrBuilder getCallEventOrBuilder(); + + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + * @return Whether the integratedServiceEvent field is set. + */ + boolean hasIntegratedServiceEvent(); + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + * @return The integratedServiceEvent. + */ + alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getIntegratedServiceEvent(); + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + */ + alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder getIntegratedServiceEventOrBuilder(); + + /** + * .events.Ev_RunEvent runEvent = 16; + * @return Whether the runEvent field is set. + */ + boolean hasRunEvent(); + /** + * .events.Ev_RunEvent runEvent = 16; + * @return The runEvent. + */ + alice.dip.kafka.events.Events.Ev_RunEvent getRunEvent(); + /** + * .events.Ev_RunEvent runEvent = 16; + */ + alice.dip.kafka.events.Events.Ev_RunEventOrBuilder getRunEventOrBuilder(); + + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + * @return Whether the frameworkEvent field is set. + */ + boolean hasFrameworkEvent(); + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + * @return The frameworkEvent. + */ + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getFrameworkEvent(); + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + */ + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder getFrameworkEventOrBuilder(); + + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + * @return Whether the mesosHeartbeatEvent field is set. + */ + boolean hasMesosHeartbeatEvent(); + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + * @return The mesosHeartbeatEvent. + */ + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getMesosHeartbeatEvent(); + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + */ + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder getMesosHeartbeatEventOrBuilder(); + + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + * @return Whether the coreStartEvent field is set. + */ + boolean hasCoreStartEvent(); + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + * @return The coreStartEvent. + */ + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getCoreStartEvent(); + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + */ + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder getCoreStartEventOrBuilder(); + + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + * @return Whether the beamModeEvent field is set. + */ + boolean hasBeamModeEvent(); + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + * @return The beamModeEvent. + */ + alice.dip.kafka.events.Events.Ev_BeamModeEvent getBeamModeEvent(); + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + */ + alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder getBeamModeEventOrBuilder(); + + alice.dip.kafka.events.Events.Event.PayloadCase getPayloadCase(); + } + /** + * Protobuf type {@code events.Event} + */ + public static final class Event extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:events.Event) + EventOrBuilder { + private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 32, + /* patch= */ 1, + /* suffix= */ "", + Event.class.getName()); + } + // Use Event.newBuilder() to construct. + private Event(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private Event() { + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Event_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Event_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Event.class, alice.dip.kafka.events.Events.Event.Builder.class); + } + + private int payloadCase_ = 0; + @SuppressWarnings("serial") + private java.lang.Object payload_; + public enum PayloadCase + implements com.google.protobuf.Internal.EnumLite, + com.google.protobuf.AbstractMessage.InternalOneOfEnum { + ENVIRONMENTEVENT(11), + TASKEVENT(12), + ROLEEVENT(13), + CALLEVENT(14), + INTEGRATEDSERVICEEVENT(15), + RUNEVENT(16), + FRAMEWORKEVENT(101), + MESOSHEARTBEATEVENT(102), + CORESTARTEVENT(103), + BEAMMODEEVENT(110), + PAYLOAD_NOT_SET(0); + private final int value; + private PayloadCase(int value) { + this.value = value; + } + /** + * @param value The number of the enum to look for. + * @return The enum associated with the given number. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static PayloadCase valueOf(int value) { + return forNumber(value); + } + + public static PayloadCase forNumber(int value) { + switch (value) { + case 11: return ENVIRONMENTEVENT; + case 12: return TASKEVENT; + case 13: return ROLEEVENT; + case 14: return CALLEVENT; + case 15: return INTEGRATEDSERVICEEVENT; + case 16: return RUNEVENT; + case 101: return FRAMEWORKEVENT; + case 102: return MESOSHEARTBEATEVENT; + case 103: return CORESTARTEVENT; + case 110: return BEAMMODEEVENT; + case 0: return PAYLOAD_NOT_SET; + default: return null; + } + } + public int getNumber() { + return this.value; + } + }; + + public PayloadCase + getPayloadCase() { + return PayloadCase.forNumber( + payloadCase_); + } + + public static final int TIMESTAMP_FIELD_NUMBER = 1; + private long timestamp_ = 0L; + /** + * int64 timestamp = 1; + * @return The timestamp. + */ + @java.lang.Override + public long getTimestamp() { + return timestamp_; + } + + public static final int TIMESTAMPNANO_FIELD_NUMBER = 2; + private long timestampNano_ = 0L; + /** + * int64 timestampNano = 2; + * @return The timestampNano. + */ + @java.lang.Override + public long getTimestampNano() { + return timestampNano_; + } + + public static final int ENVIRONMENTEVENT_FIELD_NUMBER = 11; + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + * @return Whether the environmentEvent field is set. + */ + @java.lang.Override + public boolean hasEnvironmentEvent() { + return payloadCase_ == 11; + } + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + * @return The environmentEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_EnvironmentEvent getEnvironmentEvent() { + if (payloadCase_ == 11) { + return (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance(); + } + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder getEnvironmentEventOrBuilder() { + if (payloadCase_ == 11) { + return (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance(); + } + + public static final int TASKEVENT_FIELD_NUMBER = 12; + /** + * .events.Ev_TaskEvent taskEvent = 12; + * @return Whether the taskEvent field is set. + */ + @java.lang.Override + public boolean hasTaskEvent() { + return payloadCase_ == 12; + } + /** + * .events.Ev_TaskEvent taskEvent = 12; + * @return The taskEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_TaskEvent getTaskEvent() { + if (payloadCase_ == 12) { + return (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance(); + } + /** + * .events.Ev_TaskEvent taskEvent = 12; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder getTaskEventOrBuilder() { + if (payloadCase_ == 12) { + return (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance(); + } + + public static final int ROLEEVENT_FIELD_NUMBER = 13; + /** + * .events.Ev_RoleEvent roleEvent = 13; + * @return Whether the roleEvent field is set. + */ + @java.lang.Override + public boolean hasRoleEvent() { + return payloadCase_ == 13; + } + /** + * .events.Ev_RoleEvent roleEvent = 13; + * @return The roleEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RoleEvent getRoleEvent() { + if (payloadCase_ == 13) { + return (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance(); + } + /** + * .events.Ev_RoleEvent roleEvent = 13; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder getRoleEventOrBuilder() { + if (payloadCase_ == 13) { + return (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance(); + } + + public static final int CALLEVENT_FIELD_NUMBER = 14; + /** + * .events.Ev_CallEvent callEvent = 14; + * @return Whether the callEvent field is set. + */ + @java.lang.Override + public boolean hasCallEvent() { + return payloadCase_ == 14; + } + /** + * .events.Ev_CallEvent callEvent = 14; + * @return The callEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_CallEvent getCallEvent() { + if (payloadCase_ == 14) { + return (alice.dip.kafka.events.Events.Ev_CallEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance(); + } + /** + * .events.Ev_CallEvent callEvent = 14; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_CallEventOrBuilder getCallEventOrBuilder() { + if (payloadCase_ == 14) { + return (alice.dip.kafka.events.Events.Ev_CallEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance(); + } + + public static final int INTEGRATEDSERVICEEVENT_FIELD_NUMBER = 15; + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + * @return Whether the integratedServiceEvent field is set. + */ + @java.lang.Override + public boolean hasIntegratedServiceEvent() { + return payloadCase_ == 15; + } + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + * @return The integratedServiceEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getIntegratedServiceEvent() { + if (payloadCase_ == 15) { + return (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance(); + } + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder getIntegratedServiceEventOrBuilder() { + if (payloadCase_ == 15) { + return (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance(); + } + + public static final int RUNEVENT_FIELD_NUMBER = 16; + /** + * .events.Ev_RunEvent runEvent = 16; + * @return Whether the runEvent field is set. + */ + @java.lang.Override + public boolean hasRunEvent() { + return payloadCase_ == 16; + } + /** + * .events.Ev_RunEvent runEvent = 16; + * @return The runEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RunEvent getRunEvent() { + if (payloadCase_ == 16) { + return (alice.dip.kafka.events.Events.Ev_RunEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance(); + } + /** + * .events.Ev_RunEvent runEvent = 16; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RunEventOrBuilder getRunEventOrBuilder() { + if (payloadCase_ == 16) { + return (alice.dip.kafka.events.Events.Ev_RunEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance(); + } + + public static final int FRAMEWORKEVENT_FIELD_NUMBER = 101; + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + * @return Whether the frameworkEvent field is set. + */ + @java.lang.Override + public boolean hasFrameworkEvent() { + return payloadCase_ == 101; + } + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + * @return The frameworkEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getFrameworkEvent() { + if (payloadCase_ == 101) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance(); + } + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder getFrameworkEventOrBuilder() { + if (payloadCase_ == 101) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance(); + } + + public static final int MESOSHEARTBEATEVENT_FIELD_NUMBER = 102; + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + * @return Whether the mesosHeartbeatEvent field is set. + */ + @java.lang.Override + public boolean hasMesosHeartbeatEvent() { + return payloadCase_ == 102; + } + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + * @return The mesosHeartbeatEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getMesosHeartbeatEvent() { + if (payloadCase_ == 102) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance(); + } + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder getMesosHeartbeatEventOrBuilder() { + if (payloadCase_ == 102) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance(); + } + + public static final int CORESTARTEVENT_FIELD_NUMBER = 103; + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + * @return Whether the coreStartEvent field is set. + */ + @java.lang.Override + public boolean hasCoreStartEvent() { + return payloadCase_ == 103; + } + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + * @return The coreStartEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getCoreStartEvent() { + if (payloadCase_ == 103) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance(); + } + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder getCoreStartEventOrBuilder() { + if (payloadCase_ == 103) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance(); + } + + public static final int BEAMMODEEVENT_FIELD_NUMBER = 110; + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + * @return Whether the beamModeEvent field is set. + */ + @java.lang.Override + public boolean hasBeamModeEvent() { + return payloadCase_ == 110; + } + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + * @return The beamModeEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_BeamModeEvent getBeamModeEvent() { + if (payloadCase_ == 110) { + return (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance(); + } + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder getBeamModeEventOrBuilder() { + if (payloadCase_ == 110) { + return (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (timestamp_ != 0L) { + output.writeInt64(1, timestamp_); + } + if (timestampNano_ != 0L) { + output.writeInt64(2, timestampNano_); + } + if (payloadCase_ == 11) { + output.writeMessage(11, (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_); + } + if (payloadCase_ == 12) { + output.writeMessage(12, (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_); + } + if (payloadCase_ == 13) { + output.writeMessage(13, (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_); + } + if (payloadCase_ == 14) { + output.writeMessage(14, (alice.dip.kafka.events.Events.Ev_CallEvent) payload_); + } + if (payloadCase_ == 15) { + output.writeMessage(15, (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_); + } + if (payloadCase_ == 16) { + output.writeMessage(16, (alice.dip.kafka.events.Events.Ev_RunEvent) payload_); + } + if (payloadCase_ == 101) { + output.writeMessage(101, (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_); + } + if (payloadCase_ == 102) { + output.writeMessage(102, (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_); + } + if (payloadCase_ == 103) { + output.writeMessage(103, (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_); + } + if (payloadCase_ == 110) { + output.writeMessage(110, (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (timestamp_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, timestamp_); + } + if (timestampNano_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, timestampNano_); + } + if (payloadCase_ == 11) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(11, (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_); + } + if (payloadCase_ == 12) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(12, (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_); + } + if (payloadCase_ == 13) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(13, (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_); + } + if (payloadCase_ == 14) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(14, (alice.dip.kafka.events.Events.Ev_CallEvent) payload_); + } + if (payloadCase_ == 15) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(15, (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_); + } + if (payloadCase_ == 16) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(16, (alice.dip.kafka.events.Events.Ev_RunEvent) payload_); + } + if (payloadCase_ == 101) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(101, (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_); + } + if (payloadCase_ == 102) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(102, (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_); + } + if (payloadCase_ == 103) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(103, (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_); + } + if (payloadCase_ == 110) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(110, (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof alice.dip.kafka.events.Events.Event)) { + return super.equals(obj); + } + alice.dip.kafka.events.Events.Event other = (alice.dip.kafka.events.Events.Event) obj; + + if (getTimestamp() + != other.getTimestamp()) return false; + if (getTimestampNano() + != other.getTimestampNano()) return false; + if (!getPayloadCase().equals(other.getPayloadCase())) return false; + switch (payloadCase_) { + case 11: + if (!getEnvironmentEvent() + .equals(other.getEnvironmentEvent())) return false; + break; + case 12: + if (!getTaskEvent() + .equals(other.getTaskEvent())) return false; + break; + case 13: + if (!getRoleEvent() + .equals(other.getRoleEvent())) return false; + break; + case 14: + if (!getCallEvent() + .equals(other.getCallEvent())) return false; + break; + case 15: + if (!getIntegratedServiceEvent() + .equals(other.getIntegratedServiceEvent())) return false; + break; + case 16: + if (!getRunEvent() + .equals(other.getRunEvent())) return false; + break; + case 101: + if (!getFrameworkEvent() + .equals(other.getFrameworkEvent())) return false; + break; + case 102: + if (!getMesosHeartbeatEvent() + .equals(other.getMesosHeartbeatEvent())) return false; + break; + case 103: + if (!getCoreStartEvent() + .equals(other.getCoreStartEvent())) return false; + break; + case 110: + if (!getBeamModeEvent() + .equals(other.getBeamModeEvent())) return false; + break; + case 0: + default: + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTimestamp()); + hash = (37 * hash) + TIMESTAMPNANO_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTimestampNano()); + switch (payloadCase_) { + case 11: + hash = (37 * hash) + ENVIRONMENTEVENT_FIELD_NUMBER; + hash = (53 * hash) + getEnvironmentEvent().hashCode(); + break; + case 12: + hash = (37 * hash) + TASKEVENT_FIELD_NUMBER; + hash = (53 * hash) + getTaskEvent().hashCode(); + break; + case 13: + hash = (37 * hash) + ROLEEVENT_FIELD_NUMBER; + hash = (53 * hash) + getRoleEvent().hashCode(); + break; + case 14: + hash = (37 * hash) + CALLEVENT_FIELD_NUMBER; + hash = (53 * hash) + getCallEvent().hashCode(); + break; + case 15: + hash = (37 * hash) + INTEGRATEDSERVICEEVENT_FIELD_NUMBER; + hash = (53 * hash) + getIntegratedServiceEvent().hashCode(); + break; + case 16: + hash = (37 * hash) + RUNEVENT_FIELD_NUMBER; + hash = (53 * hash) + getRunEvent().hashCode(); + break; + case 101: + hash = (37 * hash) + FRAMEWORKEVENT_FIELD_NUMBER; + hash = (53 * hash) + getFrameworkEvent().hashCode(); + break; + case 102: + hash = (37 * hash) + MESOSHEARTBEATEVENT_FIELD_NUMBER; + hash = (53 * hash) + getMesosHeartbeatEvent().hashCode(); + break; + case 103: + hash = (37 * hash) + CORESTARTEVENT_FIELD_NUMBER; + hash = (53 * hash) + getCoreStartEvent().hashCode(); + break; + case 110: + hash = (37 * hash) + BEAMMODEEVENT_FIELD_NUMBER; + hash = (53 * hash) + getBeamModeEvent().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static alice.dip.kafka.events.Events.Event parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Event parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Event parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Event parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Event parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static alice.dip.kafka.events.Events.Event parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Event parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Event parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static alice.dip.kafka.events.Events.Event parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static alice.dip.kafka.events.Events.Event parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static alice.dip.kafka.events.Events.Event parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static alice.dip.kafka.events.Events.Event parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(alice.dip.kafka.events.Events.Event prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code events.Event} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:events.Event) + alice.dip.kafka.events.Events.EventOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return alice.dip.kafka.events.Events.internal_static_events_Event_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return alice.dip.kafka.events.Events.internal_static_events_Event_fieldAccessorTable + .ensureFieldAccessorsInitialized( + alice.dip.kafka.events.Events.Event.class, alice.dip.kafka.events.Events.Event.Builder.class); + } + + // Construct using ch.cern.alice.o2.control.events.Events.Event.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + timestamp_ = 0L; + timestampNano_ = 0L; + if (environmentEventBuilder_ != null) { + environmentEventBuilder_.clear(); + } + if (taskEventBuilder_ != null) { + taskEventBuilder_.clear(); + } + if (roleEventBuilder_ != null) { + roleEventBuilder_.clear(); + } + if (callEventBuilder_ != null) { + callEventBuilder_.clear(); + } + if (integratedServiceEventBuilder_ != null) { + integratedServiceEventBuilder_.clear(); + } + if (runEventBuilder_ != null) { + runEventBuilder_.clear(); + } + if (frameworkEventBuilder_ != null) { + frameworkEventBuilder_.clear(); + } + if (mesosHeartbeatEventBuilder_ != null) { + mesosHeartbeatEventBuilder_.clear(); + } + if (coreStartEventBuilder_ != null) { + coreStartEventBuilder_.clear(); + } + if (beamModeEventBuilder_ != null) { + beamModeEventBuilder_.clear(); + } + payloadCase_ = 0; + payload_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return alice.dip.kafka.events.Events.internal_static_events_Event_descriptor; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Event getDefaultInstanceForType() { + return alice.dip.kafka.events.Events.Event.getDefaultInstance(); + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Event build() { + alice.dip.kafka.events.Events.Event result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Event buildPartial() { + alice.dip.kafka.events.Events.Event result = new alice.dip.kafka.events.Events.Event(this); + if (bitField0_ != 0) { buildPartial0(result); } + buildPartialOneofs(result); + onBuilt(); + return result; + } + + private void buildPartial0(alice.dip.kafka.events.Events.Event result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.timestamp_ = timestamp_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.timestampNano_ = timestampNano_; + } + } + + private void buildPartialOneofs(alice.dip.kafka.events.Events.Event result) { + result.payloadCase_ = payloadCase_; + result.payload_ = this.payload_; + if (payloadCase_ == 11 && + environmentEventBuilder_ != null) { + result.payload_ = environmentEventBuilder_.build(); + } + if (payloadCase_ == 12 && + taskEventBuilder_ != null) { + result.payload_ = taskEventBuilder_.build(); + } + if (payloadCase_ == 13 && + roleEventBuilder_ != null) { + result.payload_ = roleEventBuilder_.build(); + } + if (payloadCase_ == 14 && + callEventBuilder_ != null) { + result.payload_ = callEventBuilder_.build(); + } + if (payloadCase_ == 15 && + integratedServiceEventBuilder_ != null) { + result.payload_ = integratedServiceEventBuilder_.build(); + } + if (payloadCase_ == 16 && + runEventBuilder_ != null) { + result.payload_ = runEventBuilder_.build(); + } + if (payloadCase_ == 101 && + frameworkEventBuilder_ != null) { + result.payload_ = frameworkEventBuilder_.build(); + } + if (payloadCase_ == 102 && + mesosHeartbeatEventBuilder_ != null) { + result.payload_ = mesosHeartbeatEventBuilder_.build(); + } + if (payloadCase_ == 103 && + coreStartEventBuilder_ != null) { + result.payload_ = coreStartEventBuilder_.build(); + } + if (payloadCase_ == 110 && + beamModeEventBuilder_ != null) { + result.payload_ = beamModeEventBuilder_.build(); + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof alice.dip.kafka.events.Events.Event) { + return mergeFrom((alice.dip.kafka.events.Events.Event)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(alice.dip.kafka.events.Events.Event other) { + if (other == alice.dip.kafka.events.Events.Event.getDefaultInstance()) return this; + if (other.getTimestamp() != 0L) { + setTimestamp(other.getTimestamp()); + } + if (other.getTimestampNano() != 0L) { + setTimestampNano(other.getTimestampNano()); + } + switch (other.getPayloadCase()) { + case ENVIRONMENTEVENT: { + mergeEnvironmentEvent(other.getEnvironmentEvent()); + break; + } + case TASKEVENT: { + mergeTaskEvent(other.getTaskEvent()); + break; + } + case ROLEEVENT: { + mergeRoleEvent(other.getRoleEvent()); + break; + } + case CALLEVENT: { + mergeCallEvent(other.getCallEvent()); + break; + } + case INTEGRATEDSERVICEEVENT: { + mergeIntegratedServiceEvent(other.getIntegratedServiceEvent()); + break; + } + case RUNEVENT: { + mergeRunEvent(other.getRunEvent()); + break; + } + case FRAMEWORKEVENT: { + mergeFrameworkEvent(other.getFrameworkEvent()); + break; + } + case MESOSHEARTBEATEVENT: { + mergeMesosHeartbeatEvent(other.getMesosHeartbeatEvent()); + break; + } + case CORESTARTEVENT: { + mergeCoreStartEvent(other.getCoreStartEvent()); + break; + } + case BEAMMODEEVENT: { + mergeBeamModeEvent(other.getBeamModeEvent()); + break; + } + case PAYLOAD_NOT_SET: { + break; + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + timestamp_ = input.readInt64(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 16: { + timestampNano_ = input.readInt64(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 90: { + input.readMessage( + internalGetEnvironmentEventFieldBuilder().getBuilder(), + extensionRegistry); + payloadCase_ = 11; + break; + } // case 90 + case 98: { + input.readMessage( + internalGetTaskEventFieldBuilder().getBuilder(), + extensionRegistry); + payloadCase_ = 12; + break; + } // case 98 + case 106: { + input.readMessage( + internalGetRoleEventFieldBuilder().getBuilder(), + extensionRegistry); + payloadCase_ = 13; + break; + } // case 106 + case 114: { + input.readMessage( + internalGetCallEventFieldBuilder().getBuilder(), + extensionRegistry); + payloadCase_ = 14; + break; + } // case 114 + case 122: { + input.readMessage( + internalGetIntegratedServiceEventFieldBuilder().getBuilder(), + extensionRegistry); + payloadCase_ = 15; + break; + } // case 122 + case 130: { + input.readMessage( + internalGetRunEventFieldBuilder().getBuilder(), + extensionRegistry); + payloadCase_ = 16; + break; + } // case 130 + case 810: { + input.readMessage( + internalGetFrameworkEventFieldBuilder().getBuilder(), + extensionRegistry); + payloadCase_ = 101; + break; + } // case 810 + case 818: { + input.readMessage( + internalGetMesosHeartbeatEventFieldBuilder().getBuilder(), + extensionRegistry); + payloadCase_ = 102; + break; + } // case 818 + case 826: { + input.readMessage( + internalGetCoreStartEventFieldBuilder().getBuilder(), + extensionRegistry); + payloadCase_ = 103; + break; + } // case 826 + case 882: { + input.readMessage( + internalGetBeamModeEventFieldBuilder().getBuilder(), + extensionRegistry); + payloadCase_ = 110; + break; + } // case 882 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int payloadCase_ = 0; + private java.lang.Object payload_; + public PayloadCase + getPayloadCase() { + return PayloadCase.forNumber( + payloadCase_); + } + + public Builder clearPayload() { + payloadCase_ = 0; + payload_ = null; + onChanged(); + return this; + } + + private int bitField0_; + + private long timestamp_ ; + /** + * int64 timestamp = 1; + * @return The timestamp. + */ + @java.lang.Override + public long getTimestamp() { + return timestamp_; + } + /** + * int64 timestamp = 1; + * @param value The timestamp to set. + * @return This builder for chaining. + */ + public Builder setTimestamp(long value) { + + timestamp_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * int64 timestamp = 1; + * @return This builder for chaining. + */ + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000001); + timestamp_ = 0L; + onChanged(); + return this; + } + + private long timestampNano_ ; + /** + * int64 timestampNano = 2; + * @return The timestampNano. + */ + @java.lang.Override + public long getTimestampNano() { + return timestampNano_; + } + /** + * int64 timestampNano = 2; + * @param value The timestampNano to set. + * @return This builder for chaining. + */ + public Builder setTimestampNano(long value) { + + timestampNano_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * int64 timestampNano = 2; + * @return This builder for chaining. + */ + public Builder clearTimestampNano() { + bitField0_ = (bitField0_ & ~0x00000002); + timestampNano_ = 0L; + onChanged(); + return this; + } + + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_EnvironmentEvent, alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder, alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder> environmentEventBuilder_; + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + * @return Whether the environmentEvent field is set. + */ + @java.lang.Override + public boolean hasEnvironmentEvent() { + return payloadCase_ == 11; + } + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + * @return The environmentEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_EnvironmentEvent getEnvironmentEvent() { + if (environmentEventBuilder_ == null) { + if (payloadCase_ == 11) { + return (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance(); + } else { + if (payloadCase_ == 11) { + return environmentEventBuilder_.getMessage(); + } + return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + */ + public Builder setEnvironmentEvent(alice.dip.kafka.events.Events.Ev_EnvironmentEvent value) { + if (environmentEventBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + payload_ = value; + onChanged(); + } else { + environmentEventBuilder_.setMessage(value); + } + payloadCase_ = 11; + return this; + } + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + */ + public Builder setEnvironmentEvent( + alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder builderForValue) { + if (environmentEventBuilder_ == null) { + payload_ = builderForValue.build(); + onChanged(); + } else { + environmentEventBuilder_.setMessage(builderForValue.build()); + } + payloadCase_ = 11; + return this; + } + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + */ + public Builder mergeEnvironmentEvent(alice.dip.kafka.events.Events.Ev_EnvironmentEvent value) { + if (environmentEventBuilder_ == null) { + if (payloadCase_ == 11 && + payload_ != alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance()) { + payload_ = alice.dip.kafka.events.Events.Ev_EnvironmentEvent.newBuilder((alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_) + .mergeFrom(value).buildPartial(); + } else { + payload_ = value; + } + onChanged(); + } else { + if (payloadCase_ == 11) { + environmentEventBuilder_.mergeFrom(value); + } else { + environmentEventBuilder_.setMessage(value); + } + } + payloadCase_ = 11; + return this; + } + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + */ + public Builder clearEnvironmentEvent() { + if (environmentEventBuilder_ == null) { + if (payloadCase_ == 11) { + payloadCase_ = 0; + payload_ = null; + onChanged(); + } + } else { + if (payloadCase_ == 11) { + payloadCase_ = 0; + payload_ = null; + } + environmentEventBuilder_.clear(); + } + return this; + } + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + */ + public alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder getEnvironmentEventBuilder() { + return internalGetEnvironmentEventFieldBuilder().getBuilder(); + } + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder getEnvironmentEventOrBuilder() { + if ((payloadCase_ == 11) && (environmentEventBuilder_ != null)) { + return environmentEventBuilder_.getMessageOrBuilder(); + } else { + if (payloadCase_ == 11) { + return (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_EnvironmentEvent environmentEvent = 11; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_EnvironmentEvent, alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder, alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder> + internalGetEnvironmentEventFieldBuilder() { + if (environmentEventBuilder_ == null) { + if (!(payloadCase_ == 11)) { + payload_ = alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance(); + } + environmentEventBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_EnvironmentEvent, alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder, alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder>( + (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_, + getParentForChildren(), + isClean()); + payload_ = null; + } + payloadCase_ = 11; + onChanged(); + return environmentEventBuilder_; + } + + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_TaskEvent, alice.dip.kafka.events.Events.Ev_TaskEvent.Builder, alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder> taskEventBuilder_; + /** + * .events.Ev_TaskEvent taskEvent = 12; + * @return Whether the taskEvent field is set. + */ + @java.lang.Override + public boolean hasTaskEvent() { + return payloadCase_ == 12; + } + /** + * .events.Ev_TaskEvent taskEvent = 12; + * @return The taskEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_TaskEvent getTaskEvent() { + if (taskEventBuilder_ == null) { + if (payloadCase_ == 12) { + return (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance(); + } else { + if (payloadCase_ == 12) { + return taskEventBuilder_.getMessage(); + } + return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_TaskEvent taskEvent = 12; + */ + public Builder setTaskEvent(alice.dip.kafka.events.Events.Ev_TaskEvent value) { + if (taskEventBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + payload_ = value; + onChanged(); + } else { + taskEventBuilder_.setMessage(value); + } + payloadCase_ = 12; + return this; + } + /** + * .events.Ev_TaskEvent taskEvent = 12; + */ + public Builder setTaskEvent( + alice.dip.kafka.events.Events.Ev_TaskEvent.Builder builderForValue) { + if (taskEventBuilder_ == null) { + payload_ = builderForValue.build(); + onChanged(); + } else { + taskEventBuilder_.setMessage(builderForValue.build()); + } + payloadCase_ = 12; + return this; + } + /** + * .events.Ev_TaskEvent taskEvent = 12; + */ + public Builder mergeTaskEvent(alice.dip.kafka.events.Events.Ev_TaskEvent value) { + if (taskEventBuilder_ == null) { + if (payloadCase_ == 12 && + payload_ != alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance()) { + payload_ = alice.dip.kafka.events.Events.Ev_TaskEvent.newBuilder((alice.dip.kafka.events.Events.Ev_TaskEvent) payload_) + .mergeFrom(value).buildPartial(); + } else { + payload_ = value; + } + onChanged(); + } else { + if (payloadCase_ == 12) { + taskEventBuilder_.mergeFrom(value); + } else { + taskEventBuilder_.setMessage(value); + } + } + payloadCase_ = 12; + return this; + } + /** + * .events.Ev_TaskEvent taskEvent = 12; + */ + public Builder clearTaskEvent() { + if (taskEventBuilder_ == null) { + if (payloadCase_ == 12) { + payloadCase_ = 0; + payload_ = null; + onChanged(); + } + } else { + if (payloadCase_ == 12) { + payloadCase_ = 0; + payload_ = null; + } + taskEventBuilder_.clear(); + } + return this; + } + /** + * .events.Ev_TaskEvent taskEvent = 12; + */ + public alice.dip.kafka.events.Events.Ev_TaskEvent.Builder getTaskEventBuilder() { + return internalGetTaskEventFieldBuilder().getBuilder(); + } + /** + * .events.Ev_TaskEvent taskEvent = 12; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder getTaskEventOrBuilder() { + if ((payloadCase_ == 12) && (taskEventBuilder_ != null)) { + return taskEventBuilder_.getMessageOrBuilder(); + } else { + if (payloadCase_ == 12) { + return (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_TaskEvent taskEvent = 12; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_TaskEvent, alice.dip.kafka.events.Events.Ev_TaskEvent.Builder, alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder> + internalGetTaskEventFieldBuilder() { + if (taskEventBuilder_ == null) { + if (!(payloadCase_ == 12)) { + payload_ = alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance(); + } + taskEventBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_TaskEvent, alice.dip.kafka.events.Events.Ev_TaskEvent.Builder, alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder>( + (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_, + getParentForChildren(), + isClean()); + payload_ = null; + } + payloadCase_ = 12; + onChanged(); + return taskEventBuilder_; + } + + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_RoleEvent, alice.dip.kafka.events.Events.Ev_RoleEvent.Builder, alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder> roleEventBuilder_; + /** + * .events.Ev_RoleEvent roleEvent = 13; + * @return Whether the roleEvent field is set. + */ + @java.lang.Override + public boolean hasRoleEvent() { + return payloadCase_ == 13; + } + /** + * .events.Ev_RoleEvent roleEvent = 13; + * @return The roleEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RoleEvent getRoleEvent() { + if (roleEventBuilder_ == null) { + if (payloadCase_ == 13) { + return (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance(); + } else { + if (payloadCase_ == 13) { + return roleEventBuilder_.getMessage(); + } + return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_RoleEvent roleEvent = 13; + */ + public Builder setRoleEvent(alice.dip.kafka.events.Events.Ev_RoleEvent value) { + if (roleEventBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + payload_ = value; + onChanged(); + } else { + roleEventBuilder_.setMessage(value); + } + payloadCase_ = 13; + return this; + } + /** + * .events.Ev_RoleEvent roleEvent = 13; + */ + public Builder setRoleEvent( + alice.dip.kafka.events.Events.Ev_RoleEvent.Builder builderForValue) { + if (roleEventBuilder_ == null) { + payload_ = builderForValue.build(); + onChanged(); + } else { + roleEventBuilder_.setMessage(builderForValue.build()); + } + payloadCase_ = 13; + return this; + } + /** + * .events.Ev_RoleEvent roleEvent = 13; + */ + public Builder mergeRoleEvent(alice.dip.kafka.events.Events.Ev_RoleEvent value) { + if (roleEventBuilder_ == null) { + if (payloadCase_ == 13 && + payload_ != alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance()) { + payload_ = alice.dip.kafka.events.Events.Ev_RoleEvent.newBuilder((alice.dip.kafka.events.Events.Ev_RoleEvent) payload_) + .mergeFrom(value).buildPartial(); + } else { + payload_ = value; + } + onChanged(); + } else { + if (payloadCase_ == 13) { + roleEventBuilder_.mergeFrom(value); + } else { + roleEventBuilder_.setMessage(value); + } + } + payloadCase_ = 13; + return this; + } + /** + * .events.Ev_RoleEvent roleEvent = 13; + */ + public Builder clearRoleEvent() { + if (roleEventBuilder_ == null) { + if (payloadCase_ == 13) { + payloadCase_ = 0; + payload_ = null; + onChanged(); + } + } else { + if (payloadCase_ == 13) { + payloadCase_ = 0; + payload_ = null; + } + roleEventBuilder_.clear(); + } + return this; + } + /** + * .events.Ev_RoleEvent roleEvent = 13; + */ + public alice.dip.kafka.events.Events.Ev_RoleEvent.Builder getRoleEventBuilder() { + return internalGetRoleEventFieldBuilder().getBuilder(); + } + /** + * .events.Ev_RoleEvent roleEvent = 13; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder getRoleEventOrBuilder() { + if ((payloadCase_ == 13) && (roleEventBuilder_ != null)) { + return roleEventBuilder_.getMessageOrBuilder(); + } else { + if (payloadCase_ == 13) { + return (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_RoleEvent roleEvent = 13; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_RoleEvent, alice.dip.kafka.events.Events.Ev_RoleEvent.Builder, alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder> + internalGetRoleEventFieldBuilder() { + if (roleEventBuilder_ == null) { + if (!(payloadCase_ == 13)) { + payload_ = alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance(); + } + roleEventBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_RoleEvent, alice.dip.kafka.events.Events.Ev_RoleEvent.Builder, alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder>( + (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_, + getParentForChildren(), + isClean()); + payload_ = null; + } + payloadCase_ = 13; + onChanged(); + return roleEventBuilder_; + } + + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_CallEvent, alice.dip.kafka.events.Events.Ev_CallEvent.Builder, alice.dip.kafka.events.Events.Ev_CallEventOrBuilder> callEventBuilder_; + /** + * .events.Ev_CallEvent callEvent = 14; + * @return Whether the callEvent field is set. + */ + @java.lang.Override + public boolean hasCallEvent() { + return payloadCase_ == 14; + } + /** + * .events.Ev_CallEvent callEvent = 14; + * @return The callEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_CallEvent getCallEvent() { + if (callEventBuilder_ == null) { + if (payloadCase_ == 14) { + return (alice.dip.kafka.events.Events.Ev_CallEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance(); + } else { + if (payloadCase_ == 14) { + return callEventBuilder_.getMessage(); + } + return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_CallEvent callEvent = 14; + */ + public Builder setCallEvent(alice.dip.kafka.events.Events.Ev_CallEvent value) { + if (callEventBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + payload_ = value; + onChanged(); + } else { + callEventBuilder_.setMessage(value); + } + payloadCase_ = 14; + return this; + } + /** + * .events.Ev_CallEvent callEvent = 14; + */ + public Builder setCallEvent( + alice.dip.kafka.events.Events.Ev_CallEvent.Builder builderForValue) { + if (callEventBuilder_ == null) { + payload_ = builderForValue.build(); + onChanged(); + } else { + callEventBuilder_.setMessage(builderForValue.build()); + } + payloadCase_ = 14; + return this; + } + /** + * .events.Ev_CallEvent callEvent = 14; + */ + public Builder mergeCallEvent(alice.dip.kafka.events.Events.Ev_CallEvent value) { + if (callEventBuilder_ == null) { + if (payloadCase_ == 14 && + payload_ != alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance()) { + payload_ = alice.dip.kafka.events.Events.Ev_CallEvent.newBuilder((alice.dip.kafka.events.Events.Ev_CallEvent) payload_) + .mergeFrom(value).buildPartial(); + } else { + payload_ = value; + } + onChanged(); + } else { + if (payloadCase_ == 14) { + callEventBuilder_.mergeFrom(value); + } else { + callEventBuilder_.setMessage(value); + } + } + payloadCase_ = 14; + return this; + } + /** + * .events.Ev_CallEvent callEvent = 14; + */ + public Builder clearCallEvent() { + if (callEventBuilder_ == null) { + if (payloadCase_ == 14) { + payloadCase_ = 0; + payload_ = null; + onChanged(); + } + } else { + if (payloadCase_ == 14) { + payloadCase_ = 0; + payload_ = null; + } + callEventBuilder_.clear(); + } + return this; + } + /** + * .events.Ev_CallEvent callEvent = 14; + */ + public alice.dip.kafka.events.Events.Ev_CallEvent.Builder getCallEventBuilder() { + return internalGetCallEventFieldBuilder().getBuilder(); + } + /** + * .events.Ev_CallEvent callEvent = 14; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_CallEventOrBuilder getCallEventOrBuilder() { + if ((payloadCase_ == 14) && (callEventBuilder_ != null)) { + return callEventBuilder_.getMessageOrBuilder(); + } else { + if (payloadCase_ == 14) { + return (alice.dip.kafka.events.Events.Ev_CallEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_CallEvent callEvent = 14; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_CallEvent, alice.dip.kafka.events.Events.Ev_CallEvent.Builder, alice.dip.kafka.events.Events.Ev_CallEventOrBuilder> + internalGetCallEventFieldBuilder() { + if (callEventBuilder_ == null) { + if (!(payloadCase_ == 14)) { + payload_ = alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance(); + } + callEventBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_CallEvent, alice.dip.kafka.events.Events.Ev_CallEvent.Builder, alice.dip.kafka.events.Events.Ev_CallEventOrBuilder>( + (alice.dip.kafka.events.Events.Ev_CallEvent) payload_, + getParentForChildren(), + isClean()); + payload_ = null; + } + payloadCase_ = 14; + onChanged(); + return callEventBuilder_; + } + + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent, alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder, alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder> integratedServiceEventBuilder_; + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + * @return Whether the integratedServiceEvent field is set. + */ + @java.lang.Override + public boolean hasIntegratedServiceEvent() { + return payloadCase_ == 15; + } + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + * @return The integratedServiceEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getIntegratedServiceEvent() { + if (integratedServiceEventBuilder_ == null) { + if (payloadCase_ == 15) { + return (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance(); + } else { + if (payloadCase_ == 15) { + return integratedServiceEventBuilder_.getMessage(); + } + return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + */ + public Builder setIntegratedServiceEvent(alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent value) { + if (integratedServiceEventBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + payload_ = value; + onChanged(); + } else { + integratedServiceEventBuilder_.setMessage(value); + } + payloadCase_ = 15; + return this; + } + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + */ + public Builder setIntegratedServiceEvent( + alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder builderForValue) { + if (integratedServiceEventBuilder_ == null) { + payload_ = builderForValue.build(); + onChanged(); + } else { + integratedServiceEventBuilder_.setMessage(builderForValue.build()); + } + payloadCase_ = 15; + return this; + } + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + */ + public Builder mergeIntegratedServiceEvent(alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent value) { + if (integratedServiceEventBuilder_ == null) { + if (payloadCase_ == 15 && + payload_ != alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance()) { + payload_ = alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.newBuilder((alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_) + .mergeFrom(value).buildPartial(); + } else { + payload_ = value; + } + onChanged(); + } else { + if (payloadCase_ == 15) { + integratedServiceEventBuilder_.mergeFrom(value); + } else { + integratedServiceEventBuilder_.setMessage(value); + } + } + payloadCase_ = 15; + return this; + } + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + */ + public Builder clearIntegratedServiceEvent() { + if (integratedServiceEventBuilder_ == null) { + if (payloadCase_ == 15) { + payloadCase_ = 0; + payload_ = null; + onChanged(); + } + } else { + if (payloadCase_ == 15) { + payloadCase_ = 0; + payload_ = null; + } + integratedServiceEventBuilder_.clear(); + } + return this; + } + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + */ + public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder getIntegratedServiceEventBuilder() { + return internalGetIntegratedServiceEventFieldBuilder().getBuilder(); + } + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder getIntegratedServiceEventOrBuilder() { + if ((payloadCase_ == 15) && (integratedServiceEventBuilder_ != null)) { + return integratedServiceEventBuilder_.getMessageOrBuilder(); + } else { + if (payloadCase_ == 15) { + return (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent, alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder, alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder> + internalGetIntegratedServiceEventFieldBuilder() { + if (integratedServiceEventBuilder_ == null) { + if (!(payloadCase_ == 15)) { + payload_ = alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance(); + } + integratedServiceEventBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent, alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder, alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder>( + (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_, + getParentForChildren(), + isClean()); + payload_ = null; + } + payloadCase_ = 15; + onChanged(); + return integratedServiceEventBuilder_; + } + + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_RunEvent, alice.dip.kafka.events.Events.Ev_RunEvent.Builder, alice.dip.kafka.events.Events.Ev_RunEventOrBuilder> runEventBuilder_; + /** + * .events.Ev_RunEvent runEvent = 16; + * @return Whether the runEvent field is set. + */ + @java.lang.Override + public boolean hasRunEvent() { + return payloadCase_ == 16; + } + /** + * .events.Ev_RunEvent runEvent = 16; + * @return The runEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RunEvent getRunEvent() { + if (runEventBuilder_ == null) { + if (payloadCase_ == 16) { + return (alice.dip.kafka.events.Events.Ev_RunEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance(); + } else { + if (payloadCase_ == 16) { + return runEventBuilder_.getMessage(); + } + return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_RunEvent runEvent = 16; + */ + public Builder setRunEvent(alice.dip.kafka.events.Events.Ev_RunEvent value) { + if (runEventBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + payload_ = value; + onChanged(); + } else { + runEventBuilder_.setMessage(value); + } + payloadCase_ = 16; + return this; + } + /** + * .events.Ev_RunEvent runEvent = 16; + */ + public Builder setRunEvent( + alice.dip.kafka.events.Events.Ev_RunEvent.Builder builderForValue) { + if (runEventBuilder_ == null) { + payload_ = builderForValue.build(); + onChanged(); + } else { + runEventBuilder_.setMessage(builderForValue.build()); + } + payloadCase_ = 16; + return this; + } + /** + * .events.Ev_RunEvent runEvent = 16; + */ + public Builder mergeRunEvent(alice.dip.kafka.events.Events.Ev_RunEvent value) { + if (runEventBuilder_ == null) { + if (payloadCase_ == 16 && + payload_ != alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance()) { + payload_ = alice.dip.kafka.events.Events.Ev_RunEvent.newBuilder((alice.dip.kafka.events.Events.Ev_RunEvent) payload_) + .mergeFrom(value).buildPartial(); + } else { + payload_ = value; + } + onChanged(); + } else { + if (payloadCase_ == 16) { + runEventBuilder_.mergeFrom(value); + } else { + runEventBuilder_.setMessage(value); + } + } + payloadCase_ = 16; + return this; + } + /** + * .events.Ev_RunEvent runEvent = 16; + */ + public Builder clearRunEvent() { + if (runEventBuilder_ == null) { + if (payloadCase_ == 16) { + payloadCase_ = 0; + payload_ = null; + onChanged(); + } + } else { + if (payloadCase_ == 16) { + payloadCase_ = 0; + payload_ = null; + } + runEventBuilder_.clear(); + } + return this; + } + /** + * .events.Ev_RunEvent runEvent = 16; + */ + public alice.dip.kafka.events.Events.Ev_RunEvent.Builder getRunEventBuilder() { + return internalGetRunEventFieldBuilder().getBuilder(); + } + /** + * .events.Ev_RunEvent runEvent = 16; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_RunEventOrBuilder getRunEventOrBuilder() { + if ((payloadCase_ == 16) && (runEventBuilder_ != null)) { + return runEventBuilder_.getMessageOrBuilder(); + } else { + if (payloadCase_ == 16) { + return (alice.dip.kafka.events.Events.Ev_RunEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_RunEvent runEvent = 16; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_RunEvent, alice.dip.kafka.events.Events.Ev_RunEvent.Builder, alice.dip.kafka.events.Events.Ev_RunEventOrBuilder> + internalGetRunEventFieldBuilder() { + if (runEventBuilder_ == null) { + if (!(payloadCase_ == 16)) { + payload_ = alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance(); + } + runEventBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_RunEvent, alice.dip.kafka.events.Events.Ev_RunEvent.Builder, alice.dip.kafka.events.Events.Ev_RunEventOrBuilder>( + (alice.dip.kafka.events.Events.Ev_RunEvent) payload_, + getParentForChildren(), + isClean()); + payload_ = null; + } + payloadCase_ = 16; + onChanged(); + return runEventBuilder_; + } + + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder> frameworkEventBuilder_; + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + * @return Whether the frameworkEvent field is set. + */ + @java.lang.Override + public boolean hasFrameworkEvent() { + return payloadCase_ == 101; + } + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + * @return The frameworkEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getFrameworkEvent() { + if (frameworkEventBuilder_ == null) { + if (payloadCase_ == 101) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance(); + } else { + if (payloadCase_ == 101) { + return frameworkEventBuilder_.getMessage(); + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + */ + public Builder setFrameworkEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent value) { + if (frameworkEventBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + payload_ = value; + onChanged(); + } else { + frameworkEventBuilder_.setMessage(value); + } + payloadCase_ = 101; + return this; + } + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + */ + public Builder setFrameworkEvent( + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder builderForValue) { + if (frameworkEventBuilder_ == null) { + payload_ = builderForValue.build(); + onChanged(); + } else { + frameworkEventBuilder_.setMessage(builderForValue.build()); + } + payloadCase_ = 101; + return this; + } + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + */ + public Builder mergeFrameworkEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent value) { + if (frameworkEventBuilder_ == null) { + if (payloadCase_ == 101 && + payload_ != alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance()) { + payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.newBuilder((alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_) + .mergeFrom(value).buildPartial(); + } else { + payload_ = value; + } + onChanged(); + } else { + if (payloadCase_ == 101) { + frameworkEventBuilder_.mergeFrom(value); + } else { + frameworkEventBuilder_.setMessage(value); + } + } + payloadCase_ = 101; + return this; + } + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + */ + public Builder clearFrameworkEvent() { + if (frameworkEventBuilder_ == null) { + if (payloadCase_ == 101) { + payloadCase_ = 0; + payload_ = null; + onChanged(); + } + } else { + if (payloadCase_ == 101) { + payloadCase_ = 0; + payload_ = null; + } + frameworkEventBuilder_.clear(); + } + return this; + } + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + */ + public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder getFrameworkEventBuilder() { + return internalGetFrameworkEventFieldBuilder().getBuilder(); + } + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder getFrameworkEventOrBuilder() { + if ((payloadCase_ == 101) && (frameworkEventBuilder_ != null)) { + return frameworkEventBuilder_.getMessageOrBuilder(); + } else { + if (payloadCase_ == 101) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder> + internalGetFrameworkEventFieldBuilder() { + if (frameworkEventBuilder_ == null) { + if (!(payloadCase_ == 101)) { + payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance(); + } + frameworkEventBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder>( + (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_, + getParentForChildren(), + isClean()); + payload_ = null; + } + payloadCase_ = 101; + onChanged(); + return frameworkEventBuilder_; + } + + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder> mesosHeartbeatEventBuilder_; + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + * @return Whether the mesosHeartbeatEvent field is set. + */ + @java.lang.Override + public boolean hasMesosHeartbeatEvent() { + return payloadCase_ == 102; + } + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + * @return The mesosHeartbeatEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getMesosHeartbeatEvent() { + if (mesosHeartbeatEventBuilder_ == null) { + if (payloadCase_ == 102) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance(); + } else { + if (payloadCase_ == 102) { + return mesosHeartbeatEventBuilder_.getMessage(); + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance(); + } + } + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + */ + public Builder setMesosHeartbeatEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat value) { + if (mesosHeartbeatEventBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + payload_ = value; + onChanged(); + } else { + mesosHeartbeatEventBuilder_.setMessage(value); + } + payloadCase_ = 102; + return this; + } + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + */ + public Builder setMesosHeartbeatEvent( + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder builderForValue) { + if (mesosHeartbeatEventBuilder_ == null) { + payload_ = builderForValue.build(); + onChanged(); + } else { + mesosHeartbeatEventBuilder_.setMessage(builderForValue.build()); + } + payloadCase_ = 102; + return this; + } + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + */ + public Builder mergeMesosHeartbeatEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat value) { + if (mesosHeartbeatEventBuilder_ == null) { + if (payloadCase_ == 102 && + payload_ != alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance()) { + payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.newBuilder((alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_) + .mergeFrom(value).buildPartial(); + } else { + payload_ = value; + } + onChanged(); + } else { + if (payloadCase_ == 102) { + mesosHeartbeatEventBuilder_.mergeFrom(value); + } else { + mesosHeartbeatEventBuilder_.setMessage(value); + } + } + payloadCase_ = 102; + return this; + } + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + */ + public Builder clearMesosHeartbeatEvent() { + if (mesosHeartbeatEventBuilder_ == null) { + if (payloadCase_ == 102) { + payloadCase_ = 0; + payload_ = null; + onChanged(); + } + } else { + if (payloadCase_ == 102) { + payloadCase_ = 0; + payload_ = null; + } + mesosHeartbeatEventBuilder_.clear(); + } + return this; + } + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + */ + public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder getMesosHeartbeatEventBuilder() { + return internalGetMesosHeartbeatEventFieldBuilder().getBuilder(); + } + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder getMesosHeartbeatEventOrBuilder() { + if ((payloadCase_ == 102) && (mesosHeartbeatEventBuilder_ != null)) { + return mesosHeartbeatEventBuilder_.getMessageOrBuilder(); + } else { + if (payloadCase_ == 102) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance(); + } + } + /** + * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder> + internalGetMesosHeartbeatEventFieldBuilder() { + if (mesosHeartbeatEventBuilder_ == null) { + if (!(payloadCase_ == 102)) { + payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance(); + } + mesosHeartbeatEventBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder>( + (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_, + getParentForChildren(), + isClean()); + payload_ = null; + } + payloadCase_ = 102; + onChanged(); + return mesosHeartbeatEventBuilder_; + } + + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder> coreStartEventBuilder_; + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + * @return Whether the coreStartEvent field is set. + */ + @java.lang.Override + public boolean hasCoreStartEvent() { + return payloadCase_ == 103; + } + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + * @return The coreStartEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getCoreStartEvent() { + if (coreStartEventBuilder_ == null) { + if (payloadCase_ == 103) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance(); + } else { + if (payloadCase_ == 103) { + return coreStartEventBuilder_.getMessage(); + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance(); + } + } + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + */ + public Builder setCoreStartEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart value) { + if (coreStartEventBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + payload_ = value; + onChanged(); + } else { + coreStartEventBuilder_.setMessage(value); + } + payloadCase_ = 103; + return this; + } + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + */ + public Builder setCoreStartEvent( + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder builderForValue) { + if (coreStartEventBuilder_ == null) { + payload_ = builderForValue.build(); + onChanged(); + } else { + coreStartEventBuilder_.setMessage(builderForValue.build()); + } + payloadCase_ = 103; + return this; + } + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + */ + public Builder mergeCoreStartEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart value) { + if (coreStartEventBuilder_ == null) { + if (payloadCase_ == 103 && + payload_ != alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance()) { + payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.newBuilder((alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_) + .mergeFrom(value).buildPartial(); + } else { + payload_ = value; + } + onChanged(); + } else { + if (payloadCase_ == 103) { + coreStartEventBuilder_.mergeFrom(value); + } else { + coreStartEventBuilder_.setMessage(value); + } + } + payloadCase_ = 103; + return this; + } + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + */ + public Builder clearCoreStartEvent() { + if (coreStartEventBuilder_ == null) { + if (payloadCase_ == 103) { + payloadCase_ = 0; + payload_ = null; + onChanged(); + } + } else { + if (payloadCase_ == 103) { + payloadCase_ = 0; + payload_ = null; + } + coreStartEventBuilder_.clear(); + } + return this; + } + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + */ + public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder getCoreStartEventBuilder() { + return internalGetCoreStartEventFieldBuilder().getBuilder(); + } + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder getCoreStartEventOrBuilder() { + if ((payloadCase_ == 103) && (coreStartEventBuilder_ != null)) { + return coreStartEventBuilder_.getMessageOrBuilder(); + } else { + if (payloadCase_ == 103) { + return (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_; + } + return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance(); + } + } + /** + * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder> + internalGetCoreStartEventFieldBuilder() { + if (coreStartEventBuilder_ == null) { + if (!(payloadCase_ == 103)) { + payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance(); + } + coreStartEventBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder>( + (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_, + getParentForChildren(), + isClean()); + payload_ = null; + } + payloadCase_ = 103; + onChanged(); + return coreStartEventBuilder_; + } + + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_BeamModeEvent, alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder, alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder> beamModeEventBuilder_; + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + * @return Whether the beamModeEvent field is set. + */ + @java.lang.Override + public boolean hasBeamModeEvent() { + return payloadCase_ == 110; + } + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + * @return The beamModeEvent. + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_BeamModeEvent getBeamModeEvent() { + if (beamModeEventBuilder_ == null) { + if (payloadCase_ == 110) { + return (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance(); + } else { + if (payloadCase_ == 110) { + return beamModeEventBuilder_.getMessage(); + } + return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + */ + public Builder setBeamModeEvent(alice.dip.kafka.events.Events.Ev_BeamModeEvent value) { + if (beamModeEventBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + payload_ = value; + onChanged(); + } else { + beamModeEventBuilder_.setMessage(value); + } + payloadCase_ = 110; + return this; + } + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + */ + public Builder setBeamModeEvent( + alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder builderForValue) { + if (beamModeEventBuilder_ == null) { + payload_ = builderForValue.build(); + onChanged(); + } else { + beamModeEventBuilder_.setMessage(builderForValue.build()); + } + payloadCase_ = 110; + return this; + } + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + */ + public Builder mergeBeamModeEvent(alice.dip.kafka.events.Events.Ev_BeamModeEvent value) { + if (beamModeEventBuilder_ == null) { + if (payloadCase_ == 110 && + payload_ != alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance()) { + payload_ = alice.dip.kafka.events.Events.Ev_BeamModeEvent.newBuilder((alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_) + .mergeFrom(value).buildPartial(); + } else { + payload_ = value; + } + onChanged(); + } else { + if (payloadCase_ == 110) { + beamModeEventBuilder_.mergeFrom(value); + } else { + beamModeEventBuilder_.setMessage(value); + } + } + payloadCase_ = 110; + return this; + } + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + */ + public Builder clearBeamModeEvent() { + if (beamModeEventBuilder_ == null) { + if (payloadCase_ == 110) { + payloadCase_ = 0; + payload_ = null; + onChanged(); + } + } else { + if (payloadCase_ == 110) { + payloadCase_ = 0; + payload_ = null; + } + beamModeEventBuilder_.clear(); + } + return this; + } + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + */ + public alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder getBeamModeEventBuilder() { + return internalGetBeamModeEventFieldBuilder().getBuilder(); + } + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + */ + @java.lang.Override + public alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder getBeamModeEventOrBuilder() { + if ((payloadCase_ == 110) && (beamModeEventBuilder_ != null)) { + return beamModeEventBuilder_.getMessageOrBuilder(); + } else { + if (payloadCase_ == 110) { + return (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_; + } + return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance(); + } + } + /** + * .events.Ev_BeamModeEvent beamModeEvent = 110; + */ + private com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_BeamModeEvent, alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder, alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder> + internalGetBeamModeEventFieldBuilder() { + if (beamModeEventBuilder_ == null) { + if (!(payloadCase_ == 110)) { + payload_ = alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance(); + } + beamModeEventBuilder_ = new com.google.protobuf.SingleFieldBuilder< + alice.dip.kafka.events.Events.Ev_BeamModeEvent, alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder, alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder>( + (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_, + getParentForChildren(), + isClean()); + payload_ = null; + } + payloadCase_ = 110; + onChanged(); + return beamModeEventBuilder_; + } + + // @@protoc_insertion_point(builder_scope:events.Event) + } + + // @@protoc_insertion_point(class_scope:events.Event) + private static final alice.dip.kafka.events.Events.Event DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Event(); + } + + public static alice.dip.kafka.events.Events.Event getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Event parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public alice.dip.kafka.events.Events.Event getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Ev_MetaEvent_MesosHeartbeat_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Ev_MetaEvent_CoreStart_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Ev_MetaEvent_CoreStart_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Ev_MetaEvent_FrameworkEvent_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Ev_EnvironmentEvent_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Ev_EnvironmentEvent_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Ev_EnvironmentEvent_VarsEntry_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Ev_EnvironmentEvent_VarsEntry_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Traits_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Traits_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Ev_TaskEvent_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Ev_TaskEvent_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Ev_CallEvent_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Ev_CallEvent_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Ev_RoleEvent_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Ev_RoleEvent_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Ev_IntegratedServiceEvent_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Ev_IntegratedServiceEvent_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Ev_RunEvent_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Ev_RunEvent_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Ev_BeamModeEvent_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Ev_BeamModeEvent_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_events_Event_descriptor; + private static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_events_Event_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\014events.proto\022\006events\032\023protos/common.pr" + + "oto\"\035\n\033Ev_MetaEvent_MesosHeartbeat\"-\n\026Ev" + + "_MetaEvent_CoreStart\022\023\n\013frameworkId\030\001 \001(" + + "\t\"C\n\033Ev_MetaEvent_FrameworkEvent\022\023\n\013fram" + + "eworkId\030\001 \001(\t\022\017\n\007message\030\002 \001(\t\"\213\003\n\023Ev_En" + + "vironmentEvent\022\025\n\renvironmentId\030\001 \001(\t\022\r\n" + + "\005state\030\002 \001(\t\022\021\n\trunNumber\030\003 \001(\r\022\r\n\005error" + + "\030\004 \001(\t\022\017\n\007message\030\005 \001(\t\022\022\n\ntransition\030\006 " + + "\001(\t\022\026\n\016transitionStep\030\007 \001(\t\022*\n\020transitio" + + "nStatus\030\010 \001(\0162\020.events.OpStatus\0223\n\004vars\030" + + "\t \003(\0132%.events.Ev_EnvironmentEvent.VarsE" + + "ntry\022%\n\017lastRequestUser\030\n \001(\0132\014.common.U" + + "ser\022:\n\024workflowTemplateInfo\030\013 \001(\0132\034.comm" + + "on.WorkflowTemplateInfo\032+\n\tVarsEntry\022\013\n\003" + + "key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"K\n\006Traits\022\017" + + "\n\007trigger\030\001 \001(\t\022\r\n\005await\030\002 \001(\t\022\017\n\007timeou" + + "t\030\003 \001(\t\022\020\n\010critical\030\004 \001(\010\"\265\001\n\014Ev_TaskEve" + + "nt\022\014\n\004name\030\001 \001(\t\022\016\n\006taskid\030\002 \001(\t\022\r\n\005stat" + + "e\030\003 \001(\t\022\016\n\006status\030\004 \001(\t\022\020\n\010hostname\030\005 \001(" + + "\t\022\021\n\tclassName\030\006 \001(\t\022\036\n\006traits\030\007 \001(\0132\016.e" + + "vents.Traits\022\025\n\renvironmentId\030\010 \001(\t\022\014\n\004p" + + "ath\030\t \001(\t\"\266\001\n\014Ev_CallEvent\022\014\n\004func\030\001 \001(\t" + + "\022$\n\ncallStatus\030\002 \001(\0162\020.events.OpStatus\022\016" + + "\n\006return\030\003 \001(\t\022\036\n\006traits\030\004 \001(\0132\016.events." + + "Traits\022\016\n\006output\030\005 \001(\t\022\r\n\005error\030\006 \001(\t\022\025\n" + + "\renvironmentId\030\007 \001(\t\022\014\n\004path\030\010 \001(\t\"d\n\014Ev" + + "_RoleEvent\022\014\n\004name\030\001 \001(\t\022\016\n\006status\030\002 \001(\t" + + "\022\r\n\005state\030\003 \001(\t\022\020\n\010rolePath\030\004 \001(\t\022\025\n\renv" + + "ironmentId\030\005 \001(\t\"\350\001\n\031Ev_IntegratedServic" + + "eEvent\022\014\n\004name\030\001 \001(\t\022\r\n\005error\030\002 \001(\t\022\025\n\ro" + + "perationName\030\003 \001(\t\022)\n\017operationStatus\030\004 " + + "\001(\0162\020.events.OpStatus\022\025\n\roperationStep\030\005" + + " \001(\t\022-\n\023operationStepStatus\030\006 \001(\0162\020.even" + + "ts.OpStatus\022\025\n\renvironmentId\030\007 \001(\t\022\017\n\007pa" + + "yload\030\010 \001(\t\"\302\001\n\013Ev_RunEvent\022\025\n\renvironme" + + "ntId\030\001 \001(\t\022\021\n\trunNumber\030\002 \001(\r\022\r\n\005state\030\003" + + " \001(\t\022\r\n\005error\030\004 \001(\t\022\022\n\ntransition\030\005 \001(\t\022" + + "*\n\020transitionStatus\030\006 \001(\0162\020.events.OpSta" + + "tus\022%\n\017lastRequestUser\030\010 \001(\0132\014.common.Us" + + "erJ\004\010\007\020\010\"I\n\020Ev_BeamModeEvent\022\021\n\ttimestam" + + "p\030\001 \001(\003\022\"\n\010beamInfo\030\002 \001(\0132\020.common.BeamI" + + "nfo\"\340\004\n\005Event\022\021\n\ttimestamp\030\001 \001(\003\022\025\n\rtime" + + "stampNano\030\002 \001(\003\0227\n\020environmentEvent\030\013 \001(" + + "\0132\033.events.Ev_EnvironmentEventH\000\022)\n\ttask" + + "Event\030\014 \001(\0132\024.events.Ev_TaskEventH\000\022)\n\tr" + + "oleEvent\030\r \001(\0132\024.events.Ev_RoleEventH\000\022)" + + "\n\tcallEvent\030\016 \001(\0132\024.events.Ev_CallEventH" + + "\000\022C\n\026integratedServiceEvent\030\017 \001(\0132!.even" + + "ts.Ev_IntegratedServiceEventH\000\022\'\n\010runEve" + + "nt\030\020 \001(\0132\023.events.Ev_RunEventH\000\022=\n\016frame" + + "workEvent\030e \001(\0132#.events.Ev_MetaEvent_Fr" + + "ameworkEventH\000\022B\n\023mesosHeartbeatEvent\030f " + + "\001(\0132#.events.Ev_MetaEvent_MesosHeartbeat" + + "H\000\0228\n\016coreStartEvent\030g \001(\0132\036.events.Ev_M" + + "etaEvent_CoreStartH\000\0221\n\rbeamModeEvent\030n " + + "\001(\0132\030.events.Ev_BeamModeEventH\000B\t\n\007Paylo" + + "adJ\004\010\003\020\013J\004\010\021\020e*]\n\010OpStatus\022\010\n\004NULL\020\000\022\013\n\007" + + "STARTED\020\001\022\013\n\007ONGOING\020\002\022\013\n\007DONE_OK\020\003\022\016\n\nD" + + "ONE_ERROR\020\004\022\020\n\014DONE_TIMEOUT\020\005BS\n\037ch.cern" + + ".alice.o2.control.eventsZ0github.com/Ali" + + "ceO2Group/Control/common/protos;pbP\000b\006pr" + + "oto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + alice.dip.kafka.events.Common.getDescriptor(), + }); + internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_events_Ev_MetaEvent_MesosHeartbeat_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor, + new java.lang.String[] { }); + internal_static_events_Ev_MetaEvent_CoreStart_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_events_Ev_MetaEvent_CoreStart_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Ev_MetaEvent_CoreStart_descriptor, + new java.lang.String[] { "FrameworkId", }); + internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_events_Ev_MetaEvent_FrameworkEvent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor, + new java.lang.String[] { "FrameworkId", "Message", }); + internal_static_events_Ev_EnvironmentEvent_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_events_Ev_EnvironmentEvent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Ev_EnvironmentEvent_descriptor, + new java.lang.String[] { "EnvironmentId", "State", "RunNumber", "Error", "Message", "Transition", "TransitionStep", "TransitionStatus", "Vars", "LastRequestUser", "WorkflowTemplateInfo", }); + internal_static_events_Ev_EnvironmentEvent_VarsEntry_descriptor = + internal_static_events_Ev_EnvironmentEvent_descriptor.getNestedTypes().get(0); + internal_static_events_Ev_EnvironmentEvent_VarsEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Ev_EnvironmentEvent_VarsEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_events_Traits_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_events_Traits_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Traits_descriptor, + new java.lang.String[] { "Trigger", "Await", "Timeout", "Critical", }); + internal_static_events_Ev_TaskEvent_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_events_Ev_TaskEvent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Ev_TaskEvent_descriptor, + new java.lang.String[] { "Name", "Taskid", "State", "Status", "Hostname", "ClassName", "Traits", "EnvironmentId", "Path", }); + internal_static_events_Ev_CallEvent_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_events_Ev_CallEvent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Ev_CallEvent_descriptor, + new java.lang.String[] { "Func", "CallStatus", "Return", "Traits", "Output", "Error", "EnvironmentId", "Path", }); + internal_static_events_Ev_RoleEvent_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_events_Ev_RoleEvent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Ev_RoleEvent_descriptor, + new java.lang.String[] { "Name", "Status", "State", "RolePath", "EnvironmentId", }); + internal_static_events_Ev_IntegratedServiceEvent_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_events_Ev_IntegratedServiceEvent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Ev_IntegratedServiceEvent_descriptor, + new java.lang.String[] { "Name", "Error", "OperationName", "OperationStatus", "OperationStep", "OperationStepStatus", "EnvironmentId", "Payload", }); + internal_static_events_Ev_RunEvent_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_events_Ev_RunEvent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Ev_RunEvent_descriptor, + new java.lang.String[] { "EnvironmentId", "RunNumber", "State", "Error", "Transition", "TransitionStatus", "LastRequestUser", }); + internal_static_events_Ev_BeamModeEvent_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_events_Ev_BeamModeEvent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Ev_BeamModeEvent_descriptor, + new java.lang.String[] { "Timestamp", "BeamInfo", }); + internal_static_events_Event_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_events_Event_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_events_Event_descriptor, + new java.lang.String[] { "Timestamp", "TimestampNano", "EnvironmentEvent", "TaskEvent", "RoleEvent", "CallEvent", "IntegratedServiceEvent", "RunEvent", "FrameworkEvent", "MesosHeartbeatEvent", "CoreStartEvent", "BeamModeEvent", "Payload", }); + descriptor.resolveAllFeaturesImmutable(); + alice.dip.kafka.events.Common.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} From b37733410919bc78a76e463f25446a29e3acf438 Mon Sep 17 00:00:00 2001 From: George Raduta Date: Tue, 7 Oct 2025 18:38:38 +0200 Subject: [PATCH 07/16] Mark beam producer as volatile as it is updated for a different thread --- src/alice/dip/DipMessagesProcessor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/alice/dip/DipMessagesProcessor.java b/src/alice/dip/DipMessagesProcessor.java index 4fe346b..795ae5f 100644 --- a/src/alice/dip/DipMessagesProcessor.java +++ b/src/alice/dip/DipMessagesProcessor.java @@ -56,7 +56,7 @@ public class DipMessagesProcessor implements Runnable { private BlockingQueue outputQueue = new ArrayBlockingQueue(100); private final LuminosityManager luminosityManager; - private BeamModeEventsKafkaProducer beamModeEventsKafkaProducer; + private volatile BeamModeEventsKafkaProducer beamModeEventsKafkaProducer; public DipMessagesProcessor(BookkeepingClient bookkeepingClient, LuminosityManager luminosityManager) { From 5d27220a8a6385f4a44e5861bfe10c37e747bd3d Mon Sep 17 00:00:00 2001 From: George Raduta Date: Tue, 7 Oct 2025 18:40:07 +0200 Subject: [PATCH 08/16] Map Beam Mode from LHC to enum of protobuf --- src/alice/dip/LhcInfoObj.java | 13 +++++++++++++ .../dip/kafka/BeamModeEventsKafkaProducer.java | 6 +++--- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/src/alice/dip/LhcInfoObj.java b/src/alice/dip/LhcInfoObj.java index b7bb8c7..1965600 100644 --- a/src/alice/dip/LhcInfoObj.java +++ b/src/alice/dip/LhcInfoObj.java @@ -317,6 +317,19 @@ public String getBeamMode() { return last.value; } + /** + * Get the beam mode as a key suitable for enum conversion usage in protobuf + * @return Beam mode string with spaces replaced by underscores, or "UNKNOWN" if beam mode is null + */ + public String getBeamModeAsKey() { + String bm = getBeamMode(); + if (bm == null) { + return "UNKNOWN"; + } + bm = bm.replace(" ", "_"); + return bm; + } + public String getStableBeamStartStr() { long t = getStableBeamStart(); diff --git a/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java b/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java index c8a50ae..8f3576f 100644 --- a/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java +++ b/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java @@ -18,8 +18,8 @@ import alice.dip.AliDip2BK; import alice.dip.LhcInfoObj; -import alice.dip.kafka.events.Events; -import alice.dip.kafka.events.Common; +import alice.dip.kafka.dto.Common; +import alice.dip.kafka.dto.Events; /** * Kafka producer for LHC Beam Mode events, serialized using Protocol Buffers. @@ -49,7 +49,7 @@ public void sendEvent(Integer fillNumber, LhcInfoObj fill, long timestamp) { .setStableBeamsEnd(fill.getStableBeamStop()) .setFillNumber(fill.fillNo) .setFillingSchemeName(fill.LHCFillingSchemeName) - .setBeamMode(Common.BeamMode.valueOf(fill.getBeamMode())) + .setBeamMode(Common.BeamMode.valueOf(fill.getBeamModeAsKey())) .setBeamType(fill.beamType) .build(); From 1b48e6887fdc907b5df9f2ee74ad2c2e344b17f5 Mon Sep 17 00:00:00 2001 From: George Raduta Date: Tue, 7 Oct 2025 18:40:16 +0200 Subject: [PATCH 09/16] Use future for kafka return --- src/alice/dip/kafka/KafkaProducerInterface.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/alice/dip/kafka/KafkaProducerInterface.java b/src/alice/dip/kafka/KafkaProducerInterface.java index d79d3a7..117706b 100644 --- a/src/alice/dip/kafka/KafkaProducerInterface.java +++ b/src/alice/dip/kafka/KafkaProducerInterface.java @@ -49,9 +49,9 @@ public KafkaProducerInterface(String bootstrapServers, String topic, Serializer< * @param key - message key for partitioning * @param value - message value (payload) */ - public void send(K key, V value) { + public java.util.concurrent.Future send(K key, V value) { ProducerRecord record = new ProducerRecord<>(topic, key, value); - producer.send(record); + return producer.send(record); } /** From 99c461cb054f43151c2df06144b7e594dedc090f Mon Sep 17 00:00:00 2001 From: George Raduta Date: Tue, 7 Oct 2025 18:40:32 +0200 Subject: [PATCH 10/16] Update project structure namings --- .../dip/kafka/{events => dto}/Common.java | 252 +-- .../dip/kafka/{events => dto}/Events.java | 1824 ++++++++--------- 2 files changed, 1038 insertions(+), 1038 deletions(-) rename src/alice/dip/kafka/{events => dto}/Common.java (90%) rename src/alice/dip/kafka/{events => dto}/Events.java (85%) diff --git a/src/alice/dip/kafka/events/Common.java b/src/alice/dip/kafka/dto/Common.java similarity index 90% rename from src/alice/dip/kafka/events/Common.java rename to src/alice/dip/kafka/dto/Common.java index eb325ca..f0cca3e 100644 --- a/src/alice/dip/kafka/events/Common.java +++ b/src/alice/dip/kafka/dto/Common.java @@ -3,7 +3,7 @@ // source: common.proto // Protobuf Java Version: 4.32.1 -package alice.dip.kafka.events; +package alice.dip.kafka.dto; @com.google.protobuf.Generated public final class Common extends com.google.protobuf.GeneratedFile { @@ -321,7 +321,7 @@ public BeamMode findValueByNumber(int number) { } public static com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { - return alice.dip.kafka.events.Common.getDescriptor().getEnumTypes().get(0); + return alice.dip.kafka.dto.Common.getDescriptor().getEnumTypes().get(0); } private static final BeamMode[] VALUES = values(); @@ -436,15 +436,15 @@ private User() { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return alice.dip.kafka.events.Common.internal_static_common_User_descriptor; + return alice.dip.kafka.dto.Common.internal_static_common_User_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return alice.dip.kafka.events.Common.internal_static_common_User_fieldAccessorTable + return alice.dip.kafka.dto.Common.internal_static_common_User_fieldAccessorTable .ensureFieldAccessorsInitialized( - alice.dip.kafka.events.Common.User.class, alice.dip.kafka.events.Common.User.Builder.class); + alice.dip.kafka.dto.Common.User.class, alice.dip.kafka.dto.Common.User.Builder.class); } private int bitField0_; @@ -602,10 +602,10 @@ public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } - if (!(obj instanceof alice.dip.kafka.events.Common.User)) { + if (!(obj instanceof alice.dip.kafka.dto.Common.User)) { return super.equals(obj); } - alice.dip.kafka.events.Common.User other = (alice.dip.kafka.events.Common.User) obj; + alice.dip.kafka.dto.Common.User other = (alice.dip.kafka.dto.Common.User) obj; if (hasExternalId() != other.hasExternalId()) return false; if (hasExternalId()) { @@ -645,44 +645,44 @@ public int hashCode() { return hash; } - public static alice.dip.kafka.events.Common.User parseFrom( + public static alice.dip.kafka.dto.Common.User parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static alice.dip.kafka.events.Common.User parseFrom( + public static alice.dip.kafka.dto.Common.User parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static alice.dip.kafka.events.Common.User parseFrom( + public static alice.dip.kafka.dto.Common.User parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static alice.dip.kafka.events.Common.User parseFrom( + public static alice.dip.kafka.dto.Common.User parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static alice.dip.kafka.events.Common.User parseFrom(byte[] data) + public static alice.dip.kafka.dto.Common.User parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static alice.dip.kafka.events.Common.User parseFrom( + public static alice.dip.kafka.dto.Common.User parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static alice.dip.kafka.events.Common.User parseFrom(java.io.InputStream input) + public static alice.dip.kafka.dto.Common.User parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } - public static alice.dip.kafka.events.Common.User parseFrom( + public static alice.dip.kafka.dto.Common.User parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -690,26 +690,26 @@ public static alice.dip.kafka.events.Common.User parseFrom( .parseWithIOException(PARSER, input, extensionRegistry); } - public static alice.dip.kafka.events.Common.User parseDelimitedFrom(java.io.InputStream input) + public static alice.dip.kafka.dto.Common.User parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input); } - public static alice.dip.kafka.events.Common.User parseDelimitedFrom( + public static alice.dip.kafka.dto.Common.User parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static alice.dip.kafka.events.Common.User parseFrom( + public static alice.dip.kafka.dto.Common.User parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } - public static alice.dip.kafka.events.Common.User parseFrom( + public static alice.dip.kafka.dto.Common.User parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -722,7 +722,7 @@ public static alice.dip.kafka.events.Common.User parseFrom( public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(alice.dip.kafka.events.Common.User prototype) { + public static Builder newBuilder(alice.dip.kafka.dto.Common.User prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override @@ -743,18 +743,18 @@ protected Builder newBuilderForType( public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:common.User) - alice.dip.kafka.events.Common.UserOrBuilder { + alice.dip.kafka.dto.Common.UserOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return alice.dip.kafka.events.Common.internal_static_common_User_descriptor; + return alice.dip.kafka.dto.Common.internal_static_common_User_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return alice.dip.kafka.events.Common.internal_static_common_User_fieldAccessorTable + return alice.dip.kafka.dto.Common.internal_static_common_User_fieldAccessorTable .ensureFieldAccessorsInitialized( - alice.dip.kafka.events.Common.User.class, alice.dip.kafka.events.Common.User.Builder.class); + alice.dip.kafka.dto.Common.User.class, alice.dip.kafka.dto.Common.User.Builder.class); } // Construct using ch.cern.alice.o2.control.common.Common.User.newBuilder() @@ -780,17 +780,17 @@ public Builder clear() { @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return alice.dip.kafka.events.Common.internal_static_common_User_descriptor; + return alice.dip.kafka.dto.Common.internal_static_common_User_descriptor; } @java.lang.Override - public alice.dip.kafka.events.Common.User getDefaultInstanceForType() { - return alice.dip.kafka.events.Common.User.getDefaultInstance(); + public alice.dip.kafka.dto.Common.User getDefaultInstanceForType() { + return alice.dip.kafka.dto.Common.User.getDefaultInstance(); } @java.lang.Override - public alice.dip.kafka.events.Common.User build() { - alice.dip.kafka.events.Common.User result = buildPartial(); + public alice.dip.kafka.dto.Common.User build() { + alice.dip.kafka.dto.Common.User result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } @@ -798,14 +798,14 @@ public alice.dip.kafka.events.Common.User build() { } @java.lang.Override - public alice.dip.kafka.events.Common.User buildPartial() { - alice.dip.kafka.events.Common.User result = new alice.dip.kafka.events.Common.User(this); + public alice.dip.kafka.dto.Common.User buildPartial() { + alice.dip.kafka.dto.Common.User result = new alice.dip.kafka.dto.Common.User(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } - private void buildPartial0(alice.dip.kafka.events.Common.User result) { + private void buildPartial0(alice.dip.kafka.dto.Common.User result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { @@ -824,16 +824,16 @@ private void buildPartial0(alice.dip.kafka.events.Common.User result) { @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof alice.dip.kafka.events.Common.User) { - return mergeFrom((alice.dip.kafka.events.Common.User)other); + if (other instanceof alice.dip.kafka.dto.Common.User) { + return mergeFrom((alice.dip.kafka.dto.Common.User)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(alice.dip.kafka.events.Common.User other) { - if (other == alice.dip.kafka.events.Common.User.getDefaultInstance()) return this; + public Builder mergeFrom(alice.dip.kafka.dto.Common.User other) { + if (other == alice.dip.kafka.dto.Common.User.getDefaultInstance()) return this; if (other.hasExternalId()) { setExternalId(other.getExternalId()); } @@ -1111,12 +1111,12 @@ public Builder setNameBytes( } // @@protoc_insertion_point(class_scope:common.User) - private static final alice.dip.kafka.events.Common.User DEFAULT_INSTANCE; + private static final alice.dip.kafka.dto.Common.User DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new alice.dip.kafka.events.Common.User(); + DEFAULT_INSTANCE = new alice.dip.kafka.dto.Common.User(); } - public static alice.dip.kafka.events.Common.User getDefaultInstance() { + public static alice.dip.kafka.dto.Common.User getDefaultInstance() { return DEFAULT_INSTANCE; } @@ -1152,7 +1152,7 @@ public com.google.protobuf.Parser getParserForType() { } @java.lang.Override - public alice.dip.kafka.events.Common.User getDefaultInstanceForType() { + public alice.dip.kafka.dto.Common.User getDefaultInstanceForType() { return DEFAULT_INSTANCE; } @@ -1237,15 +1237,15 @@ private WorkflowTemplateInfo() { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return alice.dip.kafka.events.Common.internal_static_common_WorkflowTemplateInfo_descriptor; + return alice.dip.kafka.dto.Common.internal_static_common_WorkflowTemplateInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return alice.dip.kafka.events.Common.internal_static_common_WorkflowTemplateInfo_fieldAccessorTable + return alice.dip.kafka.dto.Common.internal_static_common_WorkflowTemplateInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( - alice.dip.kafka.events.Common.WorkflowTemplateInfo.class, alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder.class); + alice.dip.kafka.dto.Common.WorkflowTemplateInfo.class, alice.dip.kafka.dto.Common.WorkflowTemplateInfo.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @@ -1438,10 +1438,10 @@ public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } - if (!(obj instanceof alice.dip.kafka.events.Common.WorkflowTemplateInfo)) { + if (!(obj instanceof alice.dip.kafka.dto.Common.WorkflowTemplateInfo)) { return super.equals(obj); } - alice.dip.kafka.events.Common.WorkflowTemplateInfo other = (alice.dip.kafka.events.Common.WorkflowTemplateInfo) obj; + alice.dip.kafka.dto.Common.WorkflowTemplateInfo other = (alice.dip.kafka.dto.Common.WorkflowTemplateInfo) obj; if (!getName() .equals(other.getName())) return false; @@ -1476,44 +1476,44 @@ public int hashCode() { return hash; } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom(byte[] data) + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom(java.io.InputStream input) + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1521,26 +1521,26 @@ public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( .parseWithIOException(PARSER, input, extensionRegistry); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseDelimitedFrom(java.io.InputStream input) + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseDelimitedFrom( + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessage .parseWithIOException(PARSER, input); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1553,7 +1553,7 @@ public static alice.dip.kafka.events.Common.WorkflowTemplateInfo parseFrom( public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(alice.dip.kafka.events.Common.WorkflowTemplateInfo prototype) { + public static Builder newBuilder(alice.dip.kafka.dto.Common.WorkflowTemplateInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override @@ -1574,18 +1574,18 @@ protected Builder newBuilderForType( public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder implements // @@protoc_insertion_point(builder_implements:common.WorkflowTemplateInfo) - alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder { + alice.dip.kafka.dto.Common.WorkflowTemplateInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return alice.dip.kafka.events.Common.internal_static_common_WorkflowTemplateInfo_descriptor; + return alice.dip.kafka.dto.Common.internal_static_common_WorkflowTemplateInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return alice.dip.kafka.events.Common.internal_static_common_WorkflowTemplateInfo_fieldAccessorTable + return alice.dip.kafka.dto.Common.internal_static_common_WorkflowTemplateInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( - alice.dip.kafka.events.Common.WorkflowTemplateInfo.class, alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder.class); + alice.dip.kafka.dto.Common.WorkflowTemplateInfo.class, alice.dip.kafka.dto.Common.WorkflowTemplateInfo.Builder.class); } // Construct using ch.cern.alice.o2.control.common.Common.WorkflowTemplateInfo.newBuilder() @@ -1612,17 +1612,17 @@ public Builder clear() { @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return alice.dip.kafka.events.Common.internal_static_common_WorkflowTemplateInfo_descriptor; + return alice.dip.kafka.dto.Common.internal_static_common_WorkflowTemplateInfo_descriptor; } @java.lang.Override - public alice.dip.kafka.events.Common.WorkflowTemplateInfo getDefaultInstanceForType() { - return alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance(); + public alice.dip.kafka.dto.Common.WorkflowTemplateInfo getDefaultInstanceForType() { + return alice.dip.kafka.dto.Common.WorkflowTemplateInfo.getDefaultInstance(); } @java.lang.Override - public alice.dip.kafka.events.Common.WorkflowTemplateInfo build() { - alice.dip.kafka.events.Common.WorkflowTemplateInfo result = buildPartial(); + public alice.dip.kafka.dto.Common.WorkflowTemplateInfo build() { + alice.dip.kafka.dto.Common.WorkflowTemplateInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } @@ -1630,14 +1630,14 @@ public alice.dip.kafka.events.Common.WorkflowTemplateInfo build() { } @java.lang.Override - public alice.dip.kafka.events.Common.WorkflowTemplateInfo buildPartial() { - alice.dip.kafka.events.Common.WorkflowTemplateInfo result = new alice.dip.kafka.events.Common.WorkflowTemplateInfo(this); + public alice.dip.kafka.dto.Common.WorkflowTemplateInfo buildPartial() { + alice.dip.kafka.dto.Common.WorkflowTemplateInfo result = new alice.dip.kafka.dto.Common.WorkflowTemplateInfo(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } - private void buildPartial0(alice.dip.kafka.events.Common.WorkflowTemplateInfo result) { + private void buildPartial0(alice.dip.kafka.dto.Common.WorkflowTemplateInfo result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; @@ -1655,16 +1655,16 @@ private void buildPartial0(alice.dip.kafka.events.Common.WorkflowTemplateInfo re @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof alice.dip.kafka.events.Common.WorkflowTemplateInfo) { - return mergeFrom((alice.dip.kafka.events.Common.WorkflowTemplateInfo)other); + if (other instanceof alice.dip.kafka.dto.Common.WorkflowTemplateInfo) { + return mergeFrom((alice.dip.kafka.dto.Common.WorkflowTemplateInfo)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(alice.dip.kafka.events.Common.WorkflowTemplateInfo other) { - if (other == alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance()) return this; + public Builder mergeFrom(alice.dip.kafka.dto.Common.WorkflowTemplateInfo other) { + if (other == alice.dip.kafka.dto.Common.WorkflowTemplateInfo.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; @@ -2010,12 +2010,12 @@ public Builder clearPublic() { } // @@protoc_insertion_point(class_scope:common.WorkflowTemplateInfo) - private static final alice.dip.kafka.events.Common.WorkflowTemplateInfo DEFAULT_INSTANCE; + private static final alice.dip.kafka.dto.Common.WorkflowTemplateInfo DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new alice.dip.kafka.events.Common.WorkflowTemplateInfo(); + DEFAULT_INSTANCE = new alice.dip.kafka.dto.Common.WorkflowTemplateInfo(); } - public static alice.dip.kafka.events.Common.WorkflowTemplateInfo getDefaultInstance() { + public static alice.dip.kafka.dto.Common.WorkflowTemplateInfo getDefaultInstance() { return DEFAULT_INSTANCE; } @@ -2051,7 +2051,7 @@ public com.google.protobuf.Parser getParserForType() { } @java.lang.Override - public alice.dip.kafka.events.Common.WorkflowTemplateInfo getDefaultInstanceForType() { + public alice.dip.kafka.dto.Common.WorkflowTemplateInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } @@ -2160,7 +2160,7 @@ public interface BeamInfoOrBuilder extends * .common.BeamMode beamMode = 8; * @return The beamMode. */ - alice.dip.kafka.events.Common.BeamMode getBeamMode(); + alice.dip.kafka.dto.Common.BeamMode getBeamMode(); } /** *
@@ -2196,15 +2196,15 @@ private BeamInfo() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Common.internal_static_common_BeamInfo_descriptor;
+      return alice.dip.kafka.dto.Common.internal_static_common_BeamInfo_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Common.internal_static_common_BeamInfo_fieldAccessorTable
+      return alice.dip.kafka.dto.Common.internal_static_common_BeamInfo_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Common.BeamInfo.class, alice.dip.kafka.events.Common.BeamInfo.Builder.class);
+              alice.dip.kafka.dto.Common.BeamInfo.class, alice.dip.kafka.dto.Common.BeamInfo.Builder.class);
     }
 
     public static final int STABLEBEAMSSTART_FIELD_NUMBER = 1;
@@ -2389,9 +2389,9 @@ public java.lang.String getBeamType() {
      * .common.BeamMode beamMode = 8;
      * @return The beamMode.
      */
-    @java.lang.Override public alice.dip.kafka.events.Common.BeamMode getBeamMode() {
-      alice.dip.kafka.events.Common.BeamMode result = alice.dip.kafka.events.Common.BeamMode.forNumber(beamMode_);
-      return result == null ? alice.dip.kafka.events.Common.BeamMode.UNRECOGNIZED : result;
+    @java.lang.Override public alice.dip.kafka.dto.Common.BeamMode getBeamMode() {
+      alice.dip.kafka.dto.Common.BeamMode result = alice.dip.kafka.dto.Common.BeamMode.forNumber(beamMode_);
+      return result == null ? alice.dip.kafka.dto.Common.BeamMode.UNRECOGNIZED : result;
     }
 
     private byte memoizedIsInitialized = -1;
@@ -2429,7 +2429,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(beamType_)) {
         com.google.protobuf.GeneratedMessage.writeString(output, 7, beamType_);
       }
-      if (beamMode_ != alice.dip.kafka.events.Common.BeamMode.UNKNOWN.getNumber()) {
+      if (beamMode_ != alice.dip.kafka.dto.Common.BeamMode.UNKNOWN.getNumber()) {
         output.writeEnum(8, beamMode_);
       }
       getUnknownFields().writeTo(output);
@@ -2467,7 +2467,7 @@ public int getSerializedSize() {
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(beamType_)) {
         size += com.google.protobuf.GeneratedMessage.computeStringSize(7, beamType_);
       }
-      if (beamMode_ != alice.dip.kafka.events.Common.BeamMode.UNKNOWN.getNumber()) {
+      if (beamMode_ != alice.dip.kafka.dto.Common.BeamMode.UNKNOWN.getNumber()) {
         size += com.google.protobuf.CodedOutputStream
           .computeEnumSize(8, beamMode_);
       }
@@ -2481,10 +2481,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Common.BeamInfo)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Common.BeamInfo)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Common.BeamInfo other = (alice.dip.kafka.events.Common.BeamInfo) obj;
+      alice.dip.kafka.dto.Common.BeamInfo other = (alice.dip.kafka.dto.Common.BeamInfo) obj;
 
       if (getStableBeamsStart()
           != other.getStableBeamsStart()) return false;
@@ -2539,44 +2539,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Common.BeamInfo parseFrom(
+    public static alice.dip.kafka.dto.Common.BeamInfo parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Common.BeamInfo parseFrom(
+    public static alice.dip.kafka.dto.Common.BeamInfo parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Common.BeamInfo parseFrom(
+    public static alice.dip.kafka.dto.Common.BeamInfo parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Common.BeamInfo parseFrom(
+    public static alice.dip.kafka.dto.Common.BeamInfo parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Common.BeamInfo parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Common.BeamInfo parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Common.BeamInfo parseFrom(
+    public static alice.dip.kafka.dto.Common.BeamInfo parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Common.BeamInfo parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Common.BeamInfo parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Common.BeamInfo parseFrom(
+    public static alice.dip.kafka.dto.Common.BeamInfo parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -2584,26 +2584,26 @@ public static alice.dip.kafka.events.Common.BeamInfo parseFrom(
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Common.BeamInfo parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Common.BeamInfo parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Common.BeamInfo parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Common.BeamInfo parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Common.BeamInfo parseFrom(
+    public static alice.dip.kafka.dto.Common.BeamInfo parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Common.BeamInfo parseFrom(
+    public static alice.dip.kafka.dto.Common.BeamInfo parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -2616,7 +2616,7 @@ public static alice.dip.kafka.events.Common.BeamInfo parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Common.BeamInfo prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Common.BeamInfo prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -2642,18 +2642,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:common.BeamInfo)
-        alice.dip.kafka.events.Common.BeamInfoOrBuilder {
+        alice.dip.kafka.dto.Common.BeamInfoOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Common.internal_static_common_BeamInfo_descriptor;
+        return alice.dip.kafka.dto.Common.internal_static_common_BeamInfo_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Common.internal_static_common_BeamInfo_fieldAccessorTable
+        return alice.dip.kafka.dto.Common.internal_static_common_BeamInfo_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Common.BeamInfo.class, alice.dip.kafka.events.Common.BeamInfo.Builder.class);
+                alice.dip.kafka.dto.Common.BeamInfo.class, alice.dip.kafka.dto.Common.BeamInfo.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.common.Common.BeamInfo.newBuilder()
@@ -2684,17 +2684,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Common.internal_static_common_BeamInfo_descriptor;
+        return alice.dip.kafka.dto.Common.internal_static_common_BeamInfo_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Common.BeamInfo getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance();
+      public alice.dip.kafka.dto.Common.BeamInfo getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Common.BeamInfo.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Common.BeamInfo build() {
-        alice.dip.kafka.events.Common.BeamInfo result = buildPartial();
+      public alice.dip.kafka.dto.Common.BeamInfo build() {
+        alice.dip.kafka.dto.Common.BeamInfo result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -2702,14 +2702,14 @@ public alice.dip.kafka.events.Common.BeamInfo build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Common.BeamInfo buildPartial() {
-        alice.dip.kafka.events.Common.BeamInfo result = new alice.dip.kafka.events.Common.BeamInfo(this);
+      public alice.dip.kafka.dto.Common.BeamInfo buildPartial() {
+        alice.dip.kafka.dto.Common.BeamInfo result = new alice.dip.kafka.dto.Common.BeamInfo(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Common.BeamInfo result) {
+      private void buildPartial0(alice.dip.kafka.dto.Common.BeamInfo result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.stableBeamsStart_ = stableBeamsStart_;
@@ -2739,16 +2739,16 @@ private void buildPartial0(alice.dip.kafka.events.Common.BeamInfo result) {
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Common.BeamInfo) {
-          return mergeFrom((alice.dip.kafka.events.Common.BeamInfo)other);
+        if (other instanceof alice.dip.kafka.dto.Common.BeamInfo) {
+          return mergeFrom((alice.dip.kafka.dto.Common.BeamInfo)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Common.BeamInfo other) {
-        if (other == alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Common.BeamInfo other) {
+        if (other == alice.dip.kafka.dto.Common.BeamInfo.getDefaultInstance()) return this;
         if (other.getStableBeamsStart() != 0L) {
           setStableBeamsStart(other.getStableBeamsStart());
         }
@@ -3288,16 +3288,16 @@ public Builder setBeamModeValue(int value) {
        * @return The beamMode.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Common.BeamMode getBeamMode() {
-        alice.dip.kafka.events.Common.BeamMode result = alice.dip.kafka.events.Common.BeamMode.forNumber(beamMode_);
-        return result == null ? alice.dip.kafka.events.Common.BeamMode.UNRECOGNIZED : result;
+      public alice.dip.kafka.dto.Common.BeamMode getBeamMode() {
+        alice.dip.kafka.dto.Common.BeamMode result = alice.dip.kafka.dto.Common.BeamMode.forNumber(beamMode_);
+        return result == null ? alice.dip.kafka.dto.Common.BeamMode.UNRECOGNIZED : result;
       }
       /**
        * .common.BeamMode beamMode = 8;
        * @param value The beamMode to set.
        * @return This builder for chaining.
        */
-      public Builder setBeamMode(alice.dip.kafka.events.Common.BeamMode value) {
+      public Builder setBeamMode(alice.dip.kafka.dto.Common.BeamMode value) {
         if (value == null) { throw new NullPointerException(); }
         bitField0_ |= 0x00000080;
         beamMode_ = value.getNumber();
@@ -3319,12 +3319,12 @@ public Builder clearBeamMode() {
     }
 
     // @@protoc_insertion_point(class_scope:common.BeamInfo)
-    private static final alice.dip.kafka.events.Common.BeamInfo DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Common.BeamInfo DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Common.BeamInfo();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Common.BeamInfo();
     }
 
-    public static alice.dip.kafka.events.Common.BeamInfo getDefaultInstance() {
+    public static alice.dip.kafka.dto.Common.BeamInfo getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -3360,7 +3360,7 @@ public com.google.protobuf.Parser getParserForType() {
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Common.BeamInfo getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Common.BeamInfo getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
diff --git a/src/alice/dip/kafka/events/Events.java b/src/alice/dip/kafka/dto/Events.java
similarity index 85%
rename from src/alice/dip/kafka/events/Events.java
rename to src/alice/dip/kafka/dto/Events.java
index 57aa7bc..ea31d8b 100644
--- a/src/alice/dip/kafka/events/Events.java
+++ b/src/alice/dip/kafka/dto/Events.java
@@ -3,7 +3,7 @@
 // source: events.proto
 // Protobuf Java Version: 4.32.1
 
-package alice.dip.kafka.events;
+package alice.dip.kafka.dto;
 
 @com.google.protobuf.Generated
 public final class Events extends com.google.protobuf.GeneratedFile {
@@ -153,7 +153,7 @@ public OpStatus findValueByNumber(int number) {
     }
     public static com.google.protobuf.Descriptors.EnumDescriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.getDescriptor().getEnumTypes().get(0);
+      return alice.dip.kafka.dto.Events.getDescriptor().getEnumTypes().get(0);
     }
 
     private static final OpStatus[] VALUES = values();
@@ -209,15 +209,15 @@ private Ev_MetaEvent_MesosHeartbeat() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.class, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder.class);
+              alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.class, alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.Builder.class);
     }
 
     private byte memoizedIsInitialized = -1;
@@ -253,10 +253,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat other = (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) obj;
+      alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat other = (alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat) obj;
 
       if (!getUnknownFields().equals(other.getUnknownFields())) return false;
       return true;
@@ -274,44 +274,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -319,26 +319,26 @@ public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFro
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -351,7 +351,7 @@ public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat parseFro
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -372,18 +372,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Ev_MetaEvent_MesosHeartbeat)
-        alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder {
+        alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.class, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder.class);
+                alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.class, alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Ev_MetaEvent_MesosHeartbeat.newBuilder()
@@ -405,17 +405,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat build() {
-        alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat result = buildPartial();
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat build() {
+        alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -423,24 +423,24 @@ public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat buildPartial() {
-        alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat result = new alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat(this);
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat buildPartial() {
+        alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat result = new alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat(this);
         onBuilt();
         return result;
       }
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) {
-          return mergeFrom((alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat other) {
-        if (other == alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat other) {
+        if (other == alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance()) return this;
         this.mergeUnknownFields(other.getUnknownFields());
         onChanged();
         return this;
@@ -487,12 +487,12 @@ public Builder mergeFrom(
     }
 
     // @@protoc_insertion_point(class_scope:events.Ev_MetaEvent_MesosHeartbeat)
-    private static final alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat();
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -528,7 +528,7 @@ public com.google.protobuf.Parser getParserForType(
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -577,15 +577,15 @@ private Ev_MetaEvent_CoreStart() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_CoreStart_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_CoreStart_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_CoreStart_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_CoreStart_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.class, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder.class);
+              alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.class, alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.Builder.class);
     }
 
     public static final int FRAMEWORKID_FIELD_NUMBER = 1;
@@ -666,10 +666,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart other = (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) obj;
+      alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart other = (alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart) obj;
 
       if (!getFrameworkId()
           .equals(other.getFrameworkId())) return false;
@@ -691,44 +691,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -736,26 +736,26 @@ public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -768,7 +768,7 @@ public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -789,18 +789,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Ev_MetaEvent_CoreStart)
-        alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder {
+        alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStartOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_CoreStart_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_CoreStart_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_CoreStart_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_CoreStart_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.class, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder.class);
+                alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.class, alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Ev_MetaEvent_CoreStart.newBuilder()
@@ -824,17 +824,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_CoreStart_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_CoreStart_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart build() {
-        alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart result = buildPartial();
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart build() {
+        alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -842,14 +842,14 @@ public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart buildPartial() {
-        alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart result = new alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart(this);
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart buildPartial() {
+        alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart result = new alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart result) {
+      private void buildPartial0(alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.frameworkId_ = frameworkId_;
@@ -858,16 +858,16 @@ private void buildPartial0(alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) {
-          return mergeFrom((alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart other) {
-        if (other == alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart other) {
+        if (other == alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.getDefaultInstance()) return this;
         if (!other.getFrameworkId().isEmpty()) {
           frameworkId_ = other.frameworkId_;
           bitField0_ |= 0x00000001;
@@ -997,12 +997,12 @@ public Builder setFrameworkIdBytes(
     }
 
     // @@protoc_insertion_point(class_scope:events.Ev_MetaEvent_CoreStart)
-    private static final alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart();
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -1038,7 +1038,7 @@ public com.google.protobuf.Parser getParserForType() {
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -1100,15 +1100,15 @@ private Ev_MetaEvent_FrameworkEvent() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.class, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder.class);
+              alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.class, alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.Builder.class);
     }
 
     public static final int FRAMEWORKID_FIELD_NUMBER = 1;
@@ -1234,10 +1234,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent other = (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) obj;
+      alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent other = (alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent) obj;
 
       if (!getFrameworkId()
           .equals(other.getFrameworkId())) return false;
@@ -1263,44 +1263,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -1308,26 +1308,26 @@ public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFro
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -1340,7 +1340,7 @@ public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent parseFro
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -1361,18 +1361,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Ev_MetaEvent_FrameworkEvent)
-        alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder {
+        alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEventOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.class, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder.class);
+                alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.class, alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Ev_MetaEvent_FrameworkEvent.newBuilder()
@@ -1397,17 +1397,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_MetaEvent_FrameworkEvent_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent build() {
-        alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent result = buildPartial();
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent build() {
+        alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -1415,14 +1415,14 @@ public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent buildPartial() {
-        alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent result = new alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent(this);
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent buildPartial() {
+        alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent result = new alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent result) {
+      private void buildPartial0(alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.frameworkId_ = frameworkId_;
@@ -1434,16 +1434,16 @@ private void buildPartial0(alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkE
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) {
-          return mergeFrom((alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent other) {
-        if (other == alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent other) {
+        if (other == alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance()) return this;
         if (!other.getFrameworkId().isEmpty()) {
           frameworkId_ = other.frameworkId_;
           bitField0_ |= 0x00000001;
@@ -1655,12 +1655,12 @@ public Builder setMessageBytes(
     }
 
     // @@protoc_insertion_point(class_scope:events.Ev_MetaEvent_FrameworkEvent)
-    private static final alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent();
     }
 
-    public static alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -1696,7 +1696,7 @@ public com.google.protobuf.Parser getParserForType(
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -1805,7 +1805,7 @@ public interface Ev_EnvironmentEventOrBuilder extends
      * .events.OpStatus transitionStatus = 8;
      * @return The transitionStatus.
      */
-    alice.dip.kafka.events.Events.OpStatus getTransitionStatus();
+    alice.dip.kafka.dto.Events.OpStatus getTransitionStatus();
 
     /**
      * 
@@ -1870,11 +1870,11 @@ java.lang.String getVarsOrThrow(
      * .common.User lastRequestUser = 10;
      * @return The lastRequestUser.
      */
-    alice.dip.kafka.events.Common.User getLastRequestUser();
+    alice.dip.kafka.dto.Common.User getLastRequestUser();
     /**
      * .common.User lastRequestUser = 10;
      */
-    alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder();
+    alice.dip.kafka.dto.Common.UserOrBuilder getLastRequestUserOrBuilder();
 
     /**
      * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
@@ -1885,11 +1885,11 @@ java.lang.String getVarsOrThrow(
      * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
      * @return The workflowTemplateInfo.
      */
-    alice.dip.kafka.events.Common.WorkflowTemplateInfo getWorkflowTemplateInfo();
+    alice.dip.kafka.dto.Common.WorkflowTemplateInfo getWorkflowTemplateInfo();
     /**
      * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
      */
-    alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder getWorkflowTemplateInfoOrBuilder();
+    alice.dip.kafka.dto.Common.WorkflowTemplateInfoOrBuilder getWorkflowTemplateInfoOrBuilder();
   }
   /**
    * Protobuf type {@code events.Ev_EnvironmentEvent}
@@ -1924,7 +1924,7 @@ private Ev_EnvironmentEvent() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_EnvironmentEvent_descriptor;
     }
 
     @SuppressWarnings({"rawtypes"})
@@ -1942,9 +1942,9 @@ protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldRefl
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_EnvironmentEvent_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Ev_EnvironmentEvent.class, alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder.class);
+              alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.class, alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.Builder.class);
     }
 
     private int bitField0_;
@@ -2218,9 +2218,9 @@ public java.lang.String getTransitionStep() {
      * .events.OpStatus transitionStatus = 8;
      * @return The transitionStatus.
      */
-    @java.lang.Override public alice.dip.kafka.events.Events.OpStatus getTransitionStatus() {
-      alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(transitionStatus_);
-      return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result;
+    @java.lang.Override public alice.dip.kafka.dto.Events.OpStatus getTransitionStatus() {
+      alice.dip.kafka.dto.Events.OpStatus result = alice.dip.kafka.dto.Events.OpStatus.forNumber(transitionStatus_);
+      return result == null ? alice.dip.kafka.dto.Events.OpStatus.UNRECOGNIZED : result;
     }
 
     public static final int VARS_FIELD_NUMBER = 9;
@@ -2229,7 +2229,7 @@ private static final class VarsDefaultEntryHolder {
           java.lang.String, java.lang.String> defaultEntry =
               com.google.protobuf.MapEntry
               .newDefaultInstance(
-                  alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_VarsEntry_descriptor, 
+                  alice.dip.kafka.dto.Events.internal_static_events_Ev_EnvironmentEvent_VarsEntry_descriptor, 
                   com.google.protobuf.WireFormat.FieldType.STRING,
                   "",
                   com.google.protobuf.WireFormat.FieldType.STRING,
@@ -2319,7 +2319,7 @@ public java.lang.String getVarsOrThrow(
     }
 
     public static final int LASTREQUESTUSER_FIELD_NUMBER = 10;
-    private alice.dip.kafka.events.Common.User lastRequestUser_;
+    private alice.dip.kafka.dto.Common.User lastRequestUser_;
     /**
      * .common.User lastRequestUser = 10;
      * @return Whether the lastRequestUser field is set.
@@ -2333,19 +2333,19 @@ public boolean hasLastRequestUser() {
      * @return The lastRequestUser.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Common.User getLastRequestUser() {
-      return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_;
+    public alice.dip.kafka.dto.Common.User getLastRequestUser() {
+      return lastRequestUser_ == null ? alice.dip.kafka.dto.Common.User.getDefaultInstance() : lastRequestUser_;
     }
     /**
      * .common.User lastRequestUser = 10;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder() {
-      return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_;
+    public alice.dip.kafka.dto.Common.UserOrBuilder getLastRequestUserOrBuilder() {
+      return lastRequestUser_ == null ? alice.dip.kafka.dto.Common.User.getDefaultInstance() : lastRequestUser_;
     }
 
     public static final int WORKFLOWTEMPLATEINFO_FIELD_NUMBER = 11;
-    private alice.dip.kafka.events.Common.WorkflowTemplateInfo workflowTemplateInfo_;
+    private alice.dip.kafka.dto.Common.WorkflowTemplateInfo workflowTemplateInfo_;
     /**
      * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
      * @return Whether the workflowTemplateInfo field is set.
@@ -2359,15 +2359,15 @@ public boolean hasWorkflowTemplateInfo() {
      * @return The workflowTemplateInfo.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Common.WorkflowTemplateInfo getWorkflowTemplateInfo() {
-      return workflowTemplateInfo_ == null ? alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_;
+    public alice.dip.kafka.dto.Common.WorkflowTemplateInfo getWorkflowTemplateInfo() {
+      return workflowTemplateInfo_ == null ? alice.dip.kafka.dto.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_;
     }
     /**
      * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder getWorkflowTemplateInfoOrBuilder() {
-      return workflowTemplateInfo_ == null ? alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_;
+    public alice.dip.kafka.dto.Common.WorkflowTemplateInfoOrBuilder getWorkflowTemplateInfoOrBuilder() {
+      return workflowTemplateInfo_ == null ? alice.dip.kafka.dto.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_;
     }
 
     private byte memoizedIsInitialized = -1;
@@ -2405,7 +2405,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(transitionStep_)) {
         com.google.protobuf.GeneratedMessage.writeString(output, 7, transitionStep_);
       }
-      if (transitionStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) {
+      if (transitionStatus_ != alice.dip.kafka.dto.Events.OpStatus.NULL.getNumber()) {
         output.writeEnum(8, transitionStatus_);
       }
       com.google.protobuf.GeneratedMessage
@@ -2451,7 +2451,7 @@ public int getSerializedSize() {
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(transitionStep_)) {
         size += com.google.protobuf.GeneratedMessage.computeStringSize(7, transitionStep_);
       }
-      if (transitionStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) {
+      if (transitionStatus_ != alice.dip.kafka.dto.Events.OpStatus.NULL.getNumber()) {
         size += com.google.protobuf.CodedOutputStream
           .computeEnumSize(8, transitionStatus_);
       }
@@ -2483,10 +2483,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Ev_EnvironmentEvent)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Ev_EnvironmentEvent)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Ev_EnvironmentEvent other = (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) obj;
+      alice.dip.kafka.dto.Events.Ev_EnvironmentEvent other = (alice.dip.kafka.dto.Events.Ev_EnvironmentEvent) obj;
 
       if (!getEnvironmentId()
           .equals(other.getEnvironmentId())) return false;
@@ -2559,44 +2559,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -2604,26 +2604,26 @@ public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -2636,7 +2636,7 @@ public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_EnvironmentEvent prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Ev_EnvironmentEvent prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -2657,10 +2657,10 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Ev_EnvironmentEvent)
-        alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder {
+        alice.dip.kafka.dto.Events.Ev_EnvironmentEventOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_EnvironmentEvent_descriptor;
       }
 
       @SuppressWarnings({"rawtypes"})
@@ -2688,9 +2688,9 @@ protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFi
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_EnvironmentEvent_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Ev_EnvironmentEvent.class, alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder.class);
+                alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.class, alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Ev_EnvironmentEvent.newBuilder()
@@ -2739,17 +2739,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_EnvironmentEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_EnvironmentEvent_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_EnvironmentEvent getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Ev_EnvironmentEvent getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_EnvironmentEvent build() {
-        alice.dip.kafka.events.Events.Ev_EnvironmentEvent result = buildPartial();
+      public alice.dip.kafka.dto.Events.Ev_EnvironmentEvent build() {
+        alice.dip.kafka.dto.Events.Ev_EnvironmentEvent result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -2757,14 +2757,14 @@ public alice.dip.kafka.events.Events.Ev_EnvironmentEvent build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_EnvironmentEvent buildPartial() {
-        alice.dip.kafka.events.Events.Ev_EnvironmentEvent result = new alice.dip.kafka.events.Events.Ev_EnvironmentEvent(this);
+      public alice.dip.kafka.dto.Events.Ev_EnvironmentEvent buildPartial() {
+        alice.dip.kafka.dto.Events.Ev_EnvironmentEvent result = new alice.dip.kafka.dto.Events.Ev_EnvironmentEvent(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Events.Ev_EnvironmentEvent result) {
+      private void buildPartial0(alice.dip.kafka.dto.Events.Ev_EnvironmentEvent result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.environmentId_ = environmentId_;
@@ -2812,16 +2812,16 @@ private void buildPartial0(alice.dip.kafka.events.Events.Ev_EnvironmentEvent res
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Ev_EnvironmentEvent) {
-          return mergeFrom((alice.dip.kafka.events.Events.Ev_EnvironmentEvent)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Ev_EnvironmentEvent) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Ev_EnvironmentEvent)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_EnvironmentEvent other) {
-        if (other == alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Ev_EnvironmentEvent other) {
+        if (other == alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.getDefaultInstance()) return this;
         if (!other.getEnvironmentId().isEmpty()) {
           environmentId_ = other.environmentId_;
           bitField0_ |= 0x00000001;
@@ -3493,16 +3493,16 @@ public Builder setTransitionStatusValue(int value) {
        * @return The transitionStatus.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.OpStatus getTransitionStatus() {
-        alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(transitionStatus_);
-        return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result;
+      public alice.dip.kafka.dto.Events.OpStatus getTransitionStatus() {
+        alice.dip.kafka.dto.Events.OpStatus result = alice.dip.kafka.dto.Events.OpStatus.forNumber(transitionStatus_);
+        return result == null ? alice.dip.kafka.dto.Events.OpStatus.UNRECOGNIZED : result;
       }
       /**
        * .events.OpStatus transitionStatus = 8;
        * @param value The transitionStatus to set.
        * @return This builder for chaining.
        */
-      public Builder setTransitionStatus(alice.dip.kafka.events.Events.OpStatus value) {
+      public Builder setTransitionStatus(alice.dip.kafka.dto.Events.OpStatus value) {
         if (value == null) { throw new NullPointerException(); }
         bitField0_ |= 0x00000080;
         transitionStatus_ = value.getNumber();
@@ -3675,9 +3675,9 @@ public Builder putAllVars(
         return this;
       }
 
-      private alice.dip.kafka.events.Common.User lastRequestUser_;
+      private alice.dip.kafka.dto.Common.User lastRequestUser_;
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder> lastRequestUserBuilder_;
+          alice.dip.kafka.dto.Common.User, alice.dip.kafka.dto.Common.User.Builder, alice.dip.kafka.dto.Common.UserOrBuilder> lastRequestUserBuilder_;
       /**
        * .common.User lastRequestUser = 10;
        * @return Whether the lastRequestUser field is set.
@@ -3689,9 +3689,9 @@ public boolean hasLastRequestUser() {
        * .common.User lastRequestUser = 10;
        * @return The lastRequestUser.
        */
-      public alice.dip.kafka.events.Common.User getLastRequestUser() {
+      public alice.dip.kafka.dto.Common.User getLastRequestUser() {
         if (lastRequestUserBuilder_ == null) {
-          return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_;
+          return lastRequestUser_ == null ? alice.dip.kafka.dto.Common.User.getDefaultInstance() : lastRequestUser_;
         } else {
           return lastRequestUserBuilder_.getMessage();
         }
@@ -3699,7 +3699,7 @@ public alice.dip.kafka.events.Common.User getLastRequestUser() {
       /**
        * .common.User lastRequestUser = 10;
        */
-      public Builder setLastRequestUser(alice.dip.kafka.events.Common.User value) {
+      public Builder setLastRequestUser(alice.dip.kafka.dto.Common.User value) {
         if (lastRequestUserBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -3716,7 +3716,7 @@ public Builder setLastRequestUser(alice.dip.kafka.events.Common.User value) {
        * .common.User lastRequestUser = 10;
        */
       public Builder setLastRequestUser(
-          alice.dip.kafka.events.Common.User.Builder builderForValue) {
+          alice.dip.kafka.dto.Common.User.Builder builderForValue) {
         if (lastRequestUserBuilder_ == null) {
           lastRequestUser_ = builderForValue.build();
         } else {
@@ -3729,11 +3729,11 @@ public Builder setLastRequestUser(
       /**
        * .common.User lastRequestUser = 10;
        */
-      public Builder mergeLastRequestUser(alice.dip.kafka.events.Common.User value) {
+      public Builder mergeLastRequestUser(alice.dip.kafka.dto.Common.User value) {
         if (lastRequestUserBuilder_ == null) {
           if (((bitField0_ & 0x00000200) != 0) &&
             lastRequestUser_ != null &&
-            lastRequestUser_ != alice.dip.kafka.events.Common.User.getDefaultInstance()) {
+            lastRequestUser_ != alice.dip.kafka.dto.Common.User.getDefaultInstance()) {
             getLastRequestUserBuilder().mergeFrom(value);
           } else {
             lastRequestUser_ = value;
@@ -3763,7 +3763,7 @@ public Builder clearLastRequestUser() {
       /**
        * .common.User lastRequestUser = 10;
        */
-      public alice.dip.kafka.events.Common.User.Builder getLastRequestUserBuilder() {
+      public alice.dip.kafka.dto.Common.User.Builder getLastRequestUserBuilder() {
         bitField0_ |= 0x00000200;
         onChanged();
         return internalGetLastRequestUserFieldBuilder().getBuilder();
@@ -3771,23 +3771,23 @@ public alice.dip.kafka.events.Common.User.Builder getLastRequestUserBuilder() {
       /**
        * .common.User lastRequestUser = 10;
        */
-      public alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder() {
+      public alice.dip.kafka.dto.Common.UserOrBuilder getLastRequestUserOrBuilder() {
         if (lastRequestUserBuilder_ != null) {
           return lastRequestUserBuilder_.getMessageOrBuilder();
         } else {
           return lastRequestUser_ == null ?
-              alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_;
+              alice.dip.kafka.dto.Common.User.getDefaultInstance() : lastRequestUser_;
         }
       }
       /**
        * .common.User lastRequestUser = 10;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder> 
+          alice.dip.kafka.dto.Common.User, alice.dip.kafka.dto.Common.User.Builder, alice.dip.kafka.dto.Common.UserOrBuilder> 
           internalGetLastRequestUserFieldBuilder() {
         if (lastRequestUserBuilder_ == null) {
           lastRequestUserBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder>(
+              alice.dip.kafka.dto.Common.User, alice.dip.kafka.dto.Common.User.Builder, alice.dip.kafka.dto.Common.UserOrBuilder>(
                   getLastRequestUser(),
                   getParentForChildren(),
                   isClean());
@@ -3796,9 +3796,9 @@ public alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder()
         return lastRequestUserBuilder_;
       }
 
-      private alice.dip.kafka.events.Common.WorkflowTemplateInfo workflowTemplateInfo_;
+      private alice.dip.kafka.dto.Common.WorkflowTemplateInfo workflowTemplateInfo_;
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Common.WorkflowTemplateInfo, alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder, alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder> workflowTemplateInfoBuilder_;
+          alice.dip.kafka.dto.Common.WorkflowTemplateInfo, alice.dip.kafka.dto.Common.WorkflowTemplateInfo.Builder, alice.dip.kafka.dto.Common.WorkflowTemplateInfoOrBuilder> workflowTemplateInfoBuilder_;
       /**
        * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
        * @return Whether the workflowTemplateInfo field is set.
@@ -3810,9 +3810,9 @@ public boolean hasWorkflowTemplateInfo() {
        * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
        * @return The workflowTemplateInfo.
        */
-      public alice.dip.kafka.events.Common.WorkflowTemplateInfo getWorkflowTemplateInfo() {
+      public alice.dip.kafka.dto.Common.WorkflowTemplateInfo getWorkflowTemplateInfo() {
         if (workflowTemplateInfoBuilder_ == null) {
-          return workflowTemplateInfo_ == null ? alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_;
+          return workflowTemplateInfo_ == null ? alice.dip.kafka.dto.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_;
         } else {
           return workflowTemplateInfoBuilder_.getMessage();
         }
@@ -3820,7 +3820,7 @@ public alice.dip.kafka.events.Common.WorkflowTemplateInfo getWorkflowTemplateInf
       /**
        * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
        */
-      public Builder setWorkflowTemplateInfo(alice.dip.kafka.events.Common.WorkflowTemplateInfo value) {
+      public Builder setWorkflowTemplateInfo(alice.dip.kafka.dto.Common.WorkflowTemplateInfo value) {
         if (workflowTemplateInfoBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -3837,7 +3837,7 @@ public Builder setWorkflowTemplateInfo(alice.dip.kafka.events.Common.WorkflowTem
        * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
        */
       public Builder setWorkflowTemplateInfo(
-          alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder builderForValue) {
+          alice.dip.kafka.dto.Common.WorkflowTemplateInfo.Builder builderForValue) {
         if (workflowTemplateInfoBuilder_ == null) {
           workflowTemplateInfo_ = builderForValue.build();
         } else {
@@ -3850,11 +3850,11 @@ public Builder setWorkflowTemplateInfo(
       /**
        * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
        */
-      public Builder mergeWorkflowTemplateInfo(alice.dip.kafka.events.Common.WorkflowTemplateInfo value) {
+      public Builder mergeWorkflowTemplateInfo(alice.dip.kafka.dto.Common.WorkflowTemplateInfo value) {
         if (workflowTemplateInfoBuilder_ == null) {
           if (((bitField0_ & 0x00000400) != 0) &&
             workflowTemplateInfo_ != null &&
-            workflowTemplateInfo_ != alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance()) {
+            workflowTemplateInfo_ != alice.dip.kafka.dto.Common.WorkflowTemplateInfo.getDefaultInstance()) {
             getWorkflowTemplateInfoBuilder().mergeFrom(value);
           } else {
             workflowTemplateInfo_ = value;
@@ -3884,7 +3884,7 @@ public Builder clearWorkflowTemplateInfo() {
       /**
        * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
        */
-      public alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder getWorkflowTemplateInfoBuilder() {
+      public alice.dip.kafka.dto.Common.WorkflowTemplateInfo.Builder getWorkflowTemplateInfoBuilder() {
         bitField0_ |= 0x00000400;
         onChanged();
         return internalGetWorkflowTemplateInfoFieldBuilder().getBuilder();
@@ -3892,23 +3892,23 @@ public alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder getWorkflowTem
       /**
        * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
        */
-      public alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder getWorkflowTemplateInfoOrBuilder() {
+      public alice.dip.kafka.dto.Common.WorkflowTemplateInfoOrBuilder getWorkflowTemplateInfoOrBuilder() {
         if (workflowTemplateInfoBuilder_ != null) {
           return workflowTemplateInfoBuilder_.getMessageOrBuilder();
         } else {
           return workflowTemplateInfo_ == null ?
-              alice.dip.kafka.events.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_;
+              alice.dip.kafka.dto.Common.WorkflowTemplateInfo.getDefaultInstance() : workflowTemplateInfo_;
         }
       }
       /**
        * .common.WorkflowTemplateInfo workflowTemplateInfo = 11;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Common.WorkflowTemplateInfo, alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder, alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder> 
+          alice.dip.kafka.dto.Common.WorkflowTemplateInfo, alice.dip.kafka.dto.Common.WorkflowTemplateInfo.Builder, alice.dip.kafka.dto.Common.WorkflowTemplateInfoOrBuilder> 
           internalGetWorkflowTemplateInfoFieldBuilder() {
         if (workflowTemplateInfoBuilder_ == null) {
           workflowTemplateInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Common.WorkflowTemplateInfo, alice.dip.kafka.events.Common.WorkflowTemplateInfo.Builder, alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder>(
+              alice.dip.kafka.dto.Common.WorkflowTemplateInfo, alice.dip.kafka.dto.Common.WorkflowTemplateInfo.Builder, alice.dip.kafka.dto.Common.WorkflowTemplateInfoOrBuilder>(
                   getWorkflowTemplateInfo(),
                   getParentForChildren(),
                   isClean());
@@ -3921,12 +3921,12 @@ public alice.dip.kafka.events.Common.WorkflowTemplateInfoOrBuilder getWorkflowTe
     }
 
     // @@protoc_insertion_point(class_scope:events.Ev_EnvironmentEvent)
-    private static final alice.dip.kafka.events.Events.Ev_EnvironmentEvent DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Ev_EnvironmentEvent DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_EnvironmentEvent();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Ev_EnvironmentEvent();
     }
 
-    public static alice.dip.kafka.events.Events.Ev_EnvironmentEvent getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Ev_EnvironmentEvent getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -3962,7 +3962,7 @@ public com.google.protobuf.Parser getParserForType() {
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_EnvironmentEvent getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Ev_EnvironmentEvent getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -4043,15 +4043,15 @@ private Traits() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Traits_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Traits_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Traits_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Traits_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Traits.class, alice.dip.kafka.events.Events.Traits.Builder.class);
+              alice.dip.kafka.dto.Events.Traits.class, alice.dip.kafka.dto.Events.Traits.Builder.class);
     }
 
     public static final int TRIGGER_FIELD_NUMBER = 1;
@@ -4240,10 +4240,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Traits)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Traits)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Traits other = (alice.dip.kafka.events.Events.Traits) obj;
+      alice.dip.kafka.dto.Events.Traits other = (alice.dip.kafka.dto.Events.Traits) obj;
 
       if (!getTrigger()
           .equals(other.getTrigger())) return false;
@@ -4278,44 +4278,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Traits parseFrom(
+    public static alice.dip.kafka.dto.Events.Traits parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Traits parseFrom(
+    public static alice.dip.kafka.dto.Events.Traits parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Traits parseFrom(
+    public static alice.dip.kafka.dto.Events.Traits parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Traits parseFrom(
+    public static alice.dip.kafka.dto.Events.Traits parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Traits parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Traits parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Traits parseFrom(
+    public static alice.dip.kafka.dto.Events.Traits parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Traits parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Traits parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Traits parseFrom(
+    public static alice.dip.kafka.dto.Events.Traits parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -4323,26 +4323,26 @@ public static alice.dip.kafka.events.Events.Traits parseFrom(
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Traits parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Traits parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Traits parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Traits parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Traits parseFrom(
+    public static alice.dip.kafka.dto.Events.Traits parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Traits parseFrom(
+    public static alice.dip.kafka.dto.Events.Traits parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -4355,7 +4355,7 @@ public static alice.dip.kafka.events.Events.Traits parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Traits prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Traits prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -4376,18 +4376,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Traits)
-        alice.dip.kafka.events.Events.TraitsOrBuilder {
+        alice.dip.kafka.dto.Events.TraitsOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Traits_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Traits_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Traits_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Traits_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Traits.class, alice.dip.kafka.events.Events.Traits.Builder.class);
+                alice.dip.kafka.dto.Events.Traits.class, alice.dip.kafka.dto.Events.Traits.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Traits.newBuilder()
@@ -4414,17 +4414,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Traits_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Traits_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Traits getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Traits.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Traits getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Traits.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Traits build() {
-        alice.dip.kafka.events.Events.Traits result = buildPartial();
+      public alice.dip.kafka.dto.Events.Traits build() {
+        alice.dip.kafka.dto.Events.Traits result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -4432,14 +4432,14 @@ public alice.dip.kafka.events.Events.Traits build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Traits buildPartial() {
-        alice.dip.kafka.events.Events.Traits result = new alice.dip.kafka.events.Events.Traits(this);
+      public alice.dip.kafka.dto.Events.Traits buildPartial() {
+        alice.dip.kafka.dto.Events.Traits result = new alice.dip.kafka.dto.Events.Traits(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Events.Traits result) {
+      private void buildPartial0(alice.dip.kafka.dto.Events.Traits result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.trigger_ = trigger_;
@@ -4457,16 +4457,16 @@ private void buildPartial0(alice.dip.kafka.events.Events.Traits result) {
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Traits) {
-          return mergeFrom((alice.dip.kafka.events.Events.Traits)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Traits) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Traits)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Traits other) {
-        if (other == alice.dip.kafka.events.Events.Traits.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Traits other) {
+        if (other == alice.dip.kafka.dto.Events.Traits.getDefaultInstance()) return this;
         if (!other.getTrigger().isEmpty()) {
           trigger_ = other.trigger_;
           bitField0_ |= 0x00000001;
@@ -4800,12 +4800,12 @@ public Builder clearCritical() {
     }
 
     // @@protoc_insertion_point(class_scope:events.Traits)
-    private static final alice.dip.kafka.events.Events.Traits DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Traits DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Traits();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Traits();
     }
 
-    public static alice.dip.kafka.events.Events.Traits getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Traits getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -4841,7 +4841,7 @@ public com.google.protobuf.Parser getParserForType() {
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Traits getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Traits getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -4972,11 +4972,11 @@ public interface Ev_TaskEventOrBuilder extends
      * .events.Traits traits = 7;
      * @return The traits.
      */
-    alice.dip.kafka.events.Events.Traits getTraits();
+    alice.dip.kafka.dto.Events.Traits getTraits();
     /**
      * .events.Traits traits = 7;
      */
-    alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder();
+    alice.dip.kafka.dto.Events.TraitsOrBuilder getTraitsOrBuilder();
 
     /**
      * string environmentId = 8;
@@ -5044,15 +5044,15 @@ private Ev_TaskEvent() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_TaskEvent_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_TaskEvent_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_TaskEvent_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_TaskEvent_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Ev_TaskEvent.class, alice.dip.kafka.events.Events.Ev_TaskEvent.Builder.class);
+              alice.dip.kafka.dto.Events.Ev_TaskEvent.class, alice.dip.kafka.dto.Events.Ev_TaskEvent.Builder.class);
     }
 
     private int bitField0_;
@@ -5331,7 +5331,7 @@ public java.lang.String getClassName() {
     }
 
     public static final int TRAITS_FIELD_NUMBER = 7;
-    private alice.dip.kafka.events.Events.Traits traits_;
+    private alice.dip.kafka.dto.Events.Traits traits_;
     /**
      * .events.Traits traits = 7;
      * @return Whether the traits field is set.
@@ -5345,15 +5345,15 @@ public boolean hasTraits() {
      * @return The traits.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Traits getTraits() {
-      return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_;
+    public alice.dip.kafka.dto.Events.Traits getTraits() {
+      return traits_ == null ? alice.dip.kafka.dto.Events.Traits.getDefaultInstance() : traits_;
     }
     /**
      * .events.Traits traits = 7;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder() {
-      return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_;
+    public alice.dip.kafka.dto.Events.TraitsOrBuilder getTraitsOrBuilder() {
+      return traits_ == null ? alice.dip.kafka.dto.Events.Traits.getDefaultInstance() : traits_;
     }
 
     public static final int ENVIRONMENTID_FIELD_NUMBER = 8;
@@ -5530,10 +5530,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Ev_TaskEvent)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Ev_TaskEvent)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Ev_TaskEvent other = (alice.dip.kafka.events.Events.Ev_TaskEvent) obj;
+      alice.dip.kafka.dto.Events.Ev_TaskEvent other = (alice.dip.kafka.dto.Events.Ev_TaskEvent) obj;
 
       if (!getName()
           .equals(other.getName())) return false;
@@ -5592,44 +5592,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -5637,26 +5637,26 @@ public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -5669,7 +5669,7 @@ public static alice.dip.kafka.events.Events.Ev_TaskEvent parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_TaskEvent prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Ev_TaskEvent prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -5690,18 +5690,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Ev_TaskEvent)
-        alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder {
+        alice.dip.kafka.dto.Events.Ev_TaskEventOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_TaskEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_TaskEvent_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_TaskEvent_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_TaskEvent_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Ev_TaskEvent.class, alice.dip.kafka.events.Events.Ev_TaskEvent.Builder.class);
+                alice.dip.kafka.dto.Events.Ev_TaskEvent.class, alice.dip.kafka.dto.Events.Ev_TaskEvent.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Ev_TaskEvent.newBuilder()
@@ -5743,17 +5743,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_TaskEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_TaskEvent_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_TaskEvent getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Ev_TaskEvent getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Ev_TaskEvent.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_TaskEvent build() {
-        alice.dip.kafka.events.Events.Ev_TaskEvent result = buildPartial();
+      public alice.dip.kafka.dto.Events.Ev_TaskEvent build() {
+        alice.dip.kafka.dto.Events.Ev_TaskEvent result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -5761,14 +5761,14 @@ public alice.dip.kafka.events.Events.Ev_TaskEvent build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_TaskEvent buildPartial() {
-        alice.dip.kafka.events.Events.Ev_TaskEvent result = new alice.dip.kafka.events.Events.Ev_TaskEvent(this);
+      public alice.dip.kafka.dto.Events.Ev_TaskEvent buildPartial() {
+        alice.dip.kafka.dto.Events.Ev_TaskEvent result = new alice.dip.kafka.dto.Events.Ev_TaskEvent(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Events.Ev_TaskEvent result) {
+      private void buildPartial0(alice.dip.kafka.dto.Events.Ev_TaskEvent result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.name_ = name_;
@@ -5806,16 +5806,16 @@ private void buildPartial0(alice.dip.kafka.events.Events.Ev_TaskEvent result) {
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Ev_TaskEvent) {
-          return mergeFrom((alice.dip.kafka.events.Events.Ev_TaskEvent)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Ev_TaskEvent) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Ev_TaskEvent)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_TaskEvent other) {
-        if (other == alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Ev_TaskEvent other) {
+        if (other == alice.dip.kafka.dto.Events.Ev_TaskEvent.getDefaultInstance()) return this;
         if (!other.getName().isEmpty()) {
           name_ = other.name_;
           bitField0_ |= 0x00000001;
@@ -6481,9 +6481,9 @@ public Builder setClassNameBytes(
         return this;
       }
 
-      private alice.dip.kafka.events.Events.Traits traits_;
+      private alice.dip.kafka.dto.Events.Traits traits_;
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder> traitsBuilder_;
+          alice.dip.kafka.dto.Events.Traits, alice.dip.kafka.dto.Events.Traits.Builder, alice.dip.kafka.dto.Events.TraitsOrBuilder> traitsBuilder_;
       /**
        * .events.Traits traits = 7;
        * @return Whether the traits field is set.
@@ -6495,9 +6495,9 @@ public boolean hasTraits() {
        * .events.Traits traits = 7;
        * @return The traits.
        */
-      public alice.dip.kafka.events.Events.Traits getTraits() {
+      public alice.dip.kafka.dto.Events.Traits getTraits() {
         if (traitsBuilder_ == null) {
-          return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_;
+          return traits_ == null ? alice.dip.kafka.dto.Events.Traits.getDefaultInstance() : traits_;
         } else {
           return traitsBuilder_.getMessage();
         }
@@ -6505,7 +6505,7 @@ public alice.dip.kafka.events.Events.Traits getTraits() {
       /**
        * .events.Traits traits = 7;
        */
-      public Builder setTraits(alice.dip.kafka.events.Events.Traits value) {
+      public Builder setTraits(alice.dip.kafka.dto.Events.Traits value) {
         if (traitsBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -6522,7 +6522,7 @@ public Builder setTraits(alice.dip.kafka.events.Events.Traits value) {
        * .events.Traits traits = 7;
        */
       public Builder setTraits(
-          alice.dip.kafka.events.Events.Traits.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Traits.Builder builderForValue) {
         if (traitsBuilder_ == null) {
           traits_ = builderForValue.build();
         } else {
@@ -6535,11 +6535,11 @@ public Builder setTraits(
       /**
        * .events.Traits traits = 7;
        */
-      public Builder mergeTraits(alice.dip.kafka.events.Events.Traits value) {
+      public Builder mergeTraits(alice.dip.kafka.dto.Events.Traits value) {
         if (traitsBuilder_ == null) {
           if (((bitField0_ & 0x00000040) != 0) &&
             traits_ != null &&
-            traits_ != alice.dip.kafka.events.Events.Traits.getDefaultInstance()) {
+            traits_ != alice.dip.kafka.dto.Events.Traits.getDefaultInstance()) {
             getTraitsBuilder().mergeFrom(value);
           } else {
             traits_ = value;
@@ -6569,7 +6569,7 @@ public Builder clearTraits() {
       /**
        * .events.Traits traits = 7;
        */
-      public alice.dip.kafka.events.Events.Traits.Builder getTraitsBuilder() {
+      public alice.dip.kafka.dto.Events.Traits.Builder getTraitsBuilder() {
         bitField0_ |= 0x00000040;
         onChanged();
         return internalGetTraitsFieldBuilder().getBuilder();
@@ -6577,23 +6577,23 @@ public alice.dip.kafka.events.Events.Traits.Builder getTraitsBuilder() {
       /**
        * .events.Traits traits = 7;
        */
-      public alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder() {
+      public alice.dip.kafka.dto.Events.TraitsOrBuilder getTraitsOrBuilder() {
         if (traitsBuilder_ != null) {
           return traitsBuilder_.getMessageOrBuilder();
         } else {
           return traits_ == null ?
-              alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_;
+              alice.dip.kafka.dto.Events.Traits.getDefaultInstance() : traits_;
         }
       }
       /**
        * .events.Traits traits = 7;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder> 
+          alice.dip.kafka.dto.Events.Traits, alice.dip.kafka.dto.Events.Traits.Builder, alice.dip.kafka.dto.Events.TraitsOrBuilder> 
           internalGetTraitsFieldBuilder() {
         if (traitsBuilder_ == null) {
           traitsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder>(
+              alice.dip.kafka.dto.Events.Traits, alice.dip.kafka.dto.Events.Traits.Builder, alice.dip.kafka.dto.Events.TraitsOrBuilder>(
                   getTraits(),
                   getParentForChildren(),
                   isClean());
@@ -6770,12 +6770,12 @@ public Builder setPathBytes(
     }
 
     // @@protoc_insertion_point(class_scope:events.Ev_TaskEvent)
-    private static final alice.dip.kafka.events.Events.Ev_TaskEvent DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Ev_TaskEvent DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_TaskEvent();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Ev_TaskEvent();
     }
 
-    public static alice.dip.kafka.events.Events.Ev_TaskEvent getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Ev_TaskEvent getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -6811,7 +6811,7 @@ public com.google.protobuf.Parser getParserForType() {
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_TaskEvent getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Ev_TaskEvent getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -6858,7 +6858,7 @@ public interface Ev_CallEventOrBuilder extends
      * .events.OpStatus callStatus = 2;
      * @return The callStatus.
      */
-    alice.dip.kafka.events.Events.OpStatus getCallStatus();
+    alice.dip.kafka.dto.Events.OpStatus getCallStatus();
 
     /**
      * 
@@ -6889,11 +6889,11 @@ public interface Ev_CallEventOrBuilder extends
      * .events.Traits traits = 4;
      * @return The traits.
      */
-    alice.dip.kafka.events.Events.Traits getTraits();
+    alice.dip.kafka.dto.Events.Traits getTraits();
     /**
      * .events.Traits traits = 4;
      */
-    alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder();
+    alice.dip.kafka.dto.Events.TraitsOrBuilder getTraitsOrBuilder();
 
     /**
      * 
@@ -7000,15 +7000,15 @@ private Ev_CallEvent() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_CallEvent_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_CallEvent_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_CallEvent_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_CallEvent_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Ev_CallEvent.class, alice.dip.kafka.events.Events.Ev_CallEvent.Builder.class);
+              alice.dip.kafka.dto.Events.Ev_CallEvent.class, alice.dip.kafka.dto.Events.Ev_CallEvent.Builder.class);
     }
 
     private int bitField0_;
@@ -7080,9 +7080,9 @@ public java.lang.String getFunc() {
      * .events.OpStatus callStatus = 2;
      * @return The callStatus.
      */
-    @java.lang.Override public alice.dip.kafka.events.Events.OpStatus getCallStatus() {
-      alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(callStatus_);
-      return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result;
+    @java.lang.Override public alice.dip.kafka.dto.Events.OpStatus getCallStatus() {
+      alice.dip.kafka.dto.Events.OpStatus result = alice.dip.kafka.dto.Events.OpStatus.forNumber(callStatus_);
+      return result == null ? alice.dip.kafka.dto.Events.OpStatus.UNRECOGNIZED : result;
     }
 
     public static final int RETURN_FIELD_NUMBER = 3;
@@ -7133,7 +7133,7 @@ public java.lang.String getReturn() {
     }
 
     public static final int TRAITS_FIELD_NUMBER = 4;
-    private alice.dip.kafka.events.Events.Traits traits_;
+    private alice.dip.kafka.dto.Events.Traits traits_;
     /**
      * .events.Traits traits = 4;
      * @return Whether the traits field is set.
@@ -7147,15 +7147,15 @@ public boolean hasTraits() {
      * @return The traits.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Traits getTraits() {
-      return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_;
+    public alice.dip.kafka.dto.Events.Traits getTraits() {
+      return traits_ == null ? alice.dip.kafka.dto.Events.Traits.getDefaultInstance() : traits_;
     }
     /**
      * .events.Traits traits = 4;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder() {
-      return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_;
+    public alice.dip.kafka.dto.Events.TraitsOrBuilder getTraitsOrBuilder() {
+      return traits_ == null ? alice.dip.kafka.dto.Events.Traits.getDefaultInstance() : traits_;
     }
 
     public static final int OUTPUT_FIELD_NUMBER = 5;
@@ -7355,7 +7355,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(func_)) {
         com.google.protobuf.GeneratedMessage.writeString(output, 1, func_);
       }
-      if (callStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) {
+      if (callStatus_ != alice.dip.kafka.dto.Events.OpStatus.NULL.getNumber()) {
         output.writeEnum(2, callStatus_);
       }
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(return_)) {
@@ -7388,7 +7388,7 @@ public int getSerializedSize() {
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(func_)) {
         size += com.google.protobuf.GeneratedMessage.computeStringSize(1, func_);
       }
-      if (callStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) {
+      if (callStatus_ != alice.dip.kafka.dto.Events.OpStatus.NULL.getNumber()) {
         size += com.google.protobuf.CodedOutputStream
           .computeEnumSize(2, callStatus_);
       }
@@ -7421,10 +7421,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Ev_CallEvent)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Ev_CallEvent)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Ev_CallEvent other = (alice.dip.kafka.events.Events.Ev_CallEvent) obj;
+      alice.dip.kafka.dto.Events.Ev_CallEvent other = (alice.dip.kafka.dto.Events.Ev_CallEvent) obj;
 
       if (!getFunc()
           .equals(other.getFunc())) return false;
@@ -7478,44 +7478,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -7523,26 +7523,26 @@ public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -7555,7 +7555,7 @@ public static alice.dip.kafka.events.Events.Ev_CallEvent parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_CallEvent prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Ev_CallEvent prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -7576,18 +7576,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Ev_CallEvent)
-        alice.dip.kafka.events.Events.Ev_CallEventOrBuilder {
+        alice.dip.kafka.dto.Events.Ev_CallEventOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_CallEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_CallEvent_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_CallEvent_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_CallEvent_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Ev_CallEvent.class, alice.dip.kafka.events.Events.Ev_CallEvent.Builder.class);
+                alice.dip.kafka.dto.Events.Ev_CallEvent.class, alice.dip.kafka.dto.Events.Ev_CallEvent.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Ev_CallEvent.newBuilder()
@@ -7628,17 +7628,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_CallEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_CallEvent_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_CallEvent getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Ev_CallEvent getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Ev_CallEvent.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_CallEvent build() {
-        alice.dip.kafka.events.Events.Ev_CallEvent result = buildPartial();
+      public alice.dip.kafka.dto.Events.Ev_CallEvent build() {
+        alice.dip.kafka.dto.Events.Ev_CallEvent result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -7646,14 +7646,14 @@ public alice.dip.kafka.events.Events.Ev_CallEvent build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_CallEvent buildPartial() {
-        alice.dip.kafka.events.Events.Ev_CallEvent result = new alice.dip.kafka.events.Events.Ev_CallEvent(this);
+      public alice.dip.kafka.dto.Events.Ev_CallEvent buildPartial() {
+        alice.dip.kafka.dto.Events.Ev_CallEvent result = new alice.dip.kafka.dto.Events.Ev_CallEvent(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Events.Ev_CallEvent result) {
+      private void buildPartial0(alice.dip.kafka.dto.Events.Ev_CallEvent result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.func_ = func_;
@@ -7688,16 +7688,16 @@ private void buildPartial0(alice.dip.kafka.events.Events.Ev_CallEvent result) {
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Ev_CallEvent) {
-          return mergeFrom((alice.dip.kafka.events.Events.Ev_CallEvent)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Ev_CallEvent) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Ev_CallEvent)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_CallEvent other) {
-        if (other == alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Ev_CallEvent other) {
+        if (other == alice.dip.kafka.dto.Events.Ev_CallEvent.getDefaultInstance()) return this;
         if (!other.getFunc().isEmpty()) {
           func_ = other.func_;
           bitField0_ |= 0x00000001;
@@ -7947,9 +7947,9 @@ public Builder setCallStatusValue(int value) {
        * @return The callStatus.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.OpStatus getCallStatus() {
-        alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(callStatus_);
-        return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result;
+      public alice.dip.kafka.dto.Events.OpStatus getCallStatus() {
+        alice.dip.kafka.dto.Events.OpStatus result = alice.dip.kafka.dto.Events.OpStatus.forNumber(callStatus_);
+        return result == null ? alice.dip.kafka.dto.Events.OpStatus.UNRECOGNIZED : result;
       }
       /**
        * 
@@ -7960,7 +7960,7 @@ public alice.dip.kafka.events.Events.OpStatus getCallStatus() {
        * @param value The callStatus to set.
        * @return This builder for chaining.
        */
-      public Builder setCallStatus(alice.dip.kafka.events.Events.OpStatus value) {
+      public Builder setCallStatus(alice.dip.kafka.dto.Events.OpStatus value) {
         if (value == null) { throw new NullPointerException(); }
         bitField0_ |= 0x00000002;
         callStatus_ = value.getNumber();
@@ -8074,9 +8074,9 @@ public Builder setReturnBytes(
         return this;
       }
 
-      private alice.dip.kafka.events.Events.Traits traits_;
+      private alice.dip.kafka.dto.Events.Traits traits_;
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder> traitsBuilder_;
+          alice.dip.kafka.dto.Events.Traits, alice.dip.kafka.dto.Events.Traits.Builder, alice.dip.kafka.dto.Events.TraitsOrBuilder> traitsBuilder_;
       /**
        * .events.Traits traits = 4;
        * @return Whether the traits field is set.
@@ -8088,9 +8088,9 @@ public boolean hasTraits() {
        * .events.Traits traits = 4;
        * @return The traits.
        */
-      public alice.dip.kafka.events.Events.Traits getTraits() {
+      public alice.dip.kafka.dto.Events.Traits getTraits() {
         if (traitsBuilder_ == null) {
-          return traits_ == null ? alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_;
+          return traits_ == null ? alice.dip.kafka.dto.Events.Traits.getDefaultInstance() : traits_;
         } else {
           return traitsBuilder_.getMessage();
         }
@@ -8098,7 +8098,7 @@ public alice.dip.kafka.events.Events.Traits getTraits() {
       /**
        * .events.Traits traits = 4;
        */
-      public Builder setTraits(alice.dip.kafka.events.Events.Traits value) {
+      public Builder setTraits(alice.dip.kafka.dto.Events.Traits value) {
         if (traitsBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -8115,7 +8115,7 @@ public Builder setTraits(alice.dip.kafka.events.Events.Traits value) {
        * .events.Traits traits = 4;
        */
       public Builder setTraits(
-          alice.dip.kafka.events.Events.Traits.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Traits.Builder builderForValue) {
         if (traitsBuilder_ == null) {
           traits_ = builderForValue.build();
         } else {
@@ -8128,11 +8128,11 @@ public Builder setTraits(
       /**
        * .events.Traits traits = 4;
        */
-      public Builder mergeTraits(alice.dip.kafka.events.Events.Traits value) {
+      public Builder mergeTraits(alice.dip.kafka.dto.Events.Traits value) {
         if (traitsBuilder_ == null) {
           if (((bitField0_ & 0x00000008) != 0) &&
             traits_ != null &&
-            traits_ != alice.dip.kafka.events.Events.Traits.getDefaultInstance()) {
+            traits_ != alice.dip.kafka.dto.Events.Traits.getDefaultInstance()) {
             getTraitsBuilder().mergeFrom(value);
           } else {
             traits_ = value;
@@ -8162,7 +8162,7 @@ public Builder clearTraits() {
       /**
        * .events.Traits traits = 4;
        */
-      public alice.dip.kafka.events.Events.Traits.Builder getTraitsBuilder() {
+      public alice.dip.kafka.dto.Events.Traits.Builder getTraitsBuilder() {
         bitField0_ |= 0x00000008;
         onChanged();
         return internalGetTraitsFieldBuilder().getBuilder();
@@ -8170,23 +8170,23 @@ public alice.dip.kafka.events.Events.Traits.Builder getTraitsBuilder() {
       /**
        * .events.Traits traits = 4;
        */
-      public alice.dip.kafka.events.Events.TraitsOrBuilder getTraitsOrBuilder() {
+      public alice.dip.kafka.dto.Events.TraitsOrBuilder getTraitsOrBuilder() {
         if (traitsBuilder_ != null) {
           return traitsBuilder_.getMessageOrBuilder();
         } else {
           return traits_ == null ?
-              alice.dip.kafka.events.Events.Traits.getDefaultInstance() : traits_;
+              alice.dip.kafka.dto.Events.Traits.getDefaultInstance() : traits_;
         }
       }
       /**
        * .events.Traits traits = 4;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder> 
+          alice.dip.kafka.dto.Events.Traits, alice.dip.kafka.dto.Events.Traits.Builder, alice.dip.kafka.dto.Events.TraitsOrBuilder> 
           internalGetTraitsFieldBuilder() {
         if (traitsBuilder_ == null) {
           traitsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Traits, alice.dip.kafka.events.Events.Traits.Builder, alice.dip.kafka.events.Events.TraitsOrBuilder>(
+              alice.dip.kafka.dto.Events.Traits, alice.dip.kafka.dto.Events.Traits.Builder, alice.dip.kafka.dto.Events.TraitsOrBuilder>(
                   getTraits(),
                   getParentForChildren(),
                   isClean());
@@ -8547,12 +8547,12 @@ public Builder setPathBytes(
     }
 
     // @@protoc_insertion_point(class_scope:events.Ev_CallEvent)
-    private static final alice.dip.kafka.events.Events.Ev_CallEvent DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Ev_CallEvent DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_CallEvent();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Ev_CallEvent();
     }
 
-    public static alice.dip.kafka.events.Events.Ev_CallEvent getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Ev_CallEvent getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -8588,7 +8588,7 @@ public com.google.protobuf.Parser getParserForType() {
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_CallEvent getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Ev_CallEvent getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -8721,15 +8721,15 @@ private Ev_RoleEvent() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_RoleEvent_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_RoleEvent_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_RoleEvent_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_RoleEvent_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Ev_RoleEvent.class, alice.dip.kafka.events.Events.Ev_RoleEvent.Builder.class);
+              alice.dip.kafka.dto.Events.Ev_RoleEvent.class, alice.dip.kafka.dto.Events.Ev_RoleEvent.Builder.class);
     }
 
     public static final int NAME_FIELD_NUMBER = 1;
@@ -9022,10 +9022,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Ev_RoleEvent)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Ev_RoleEvent)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Ev_RoleEvent other = (alice.dip.kafka.events.Events.Ev_RoleEvent) obj;
+      alice.dip.kafka.dto.Events.Ev_RoleEvent other = (alice.dip.kafka.dto.Events.Ev_RoleEvent) obj;
 
       if (!getName()
           .equals(other.getName())) return false;
@@ -9063,44 +9063,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -9108,26 +9108,26 @@ public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -9140,7 +9140,7 @@ public static alice.dip.kafka.events.Events.Ev_RoleEvent parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_RoleEvent prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Ev_RoleEvent prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -9161,18 +9161,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Ev_RoleEvent)
-        alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder {
+        alice.dip.kafka.dto.Events.Ev_RoleEventOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_RoleEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_RoleEvent_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_RoleEvent_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_RoleEvent_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Ev_RoleEvent.class, alice.dip.kafka.events.Events.Ev_RoleEvent.Builder.class);
+                alice.dip.kafka.dto.Events.Ev_RoleEvent.class, alice.dip.kafka.dto.Events.Ev_RoleEvent.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Ev_RoleEvent.newBuilder()
@@ -9200,17 +9200,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_RoleEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_RoleEvent_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_RoleEvent getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Ev_RoleEvent getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Ev_RoleEvent.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_RoleEvent build() {
-        alice.dip.kafka.events.Events.Ev_RoleEvent result = buildPartial();
+      public alice.dip.kafka.dto.Events.Ev_RoleEvent build() {
+        alice.dip.kafka.dto.Events.Ev_RoleEvent result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -9218,14 +9218,14 @@ public alice.dip.kafka.events.Events.Ev_RoleEvent build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_RoleEvent buildPartial() {
-        alice.dip.kafka.events.Events.Ev_RoleEvent result = new alice.dip.kafka.events.Events.Ev_RoleEvent(this);
+      public alice.dip.kafka.dto.Events.Ev_RoleEvent buildPartial() {
+        alice.dip.kafka.dto.Events.Ev_RoleEvent result = new alice.dip.kafka.dto.Events.Ev_RoleEvent(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Events.Ev_RoleEvent result) {
+      private void buildPartial0(alice.dip.kafka.dto.Events.Ev_RoleEvent result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.name_ = name_;
@@ -9246,16 +9246,16 @@ private void buildPartial0(alice.dip.kafka.events.Events.Ev_RoleEvent result) {
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Ev_RoleEvent) {
-          return mergeFrom((alice.dip.kafka.events.Events.Ev_RoleEvent)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Ev_RoleEvent) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Ev_RoleEvent)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_RoleEvent other) {
-        if (other == alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Ev_RoleEvent other) {
+        if (other == alice.dip.kafka.dto.Events.Ev_RoleEvent.getDefaultInstance()) return this;
         if (!other.getName().isEmpty()) {
           name_ = other.name_;
           bitField0_ |= 0x00000001;
@@ -9793,12 +9793,12 @@ public Builder setEnvironmentIdBytes(
     }
 
     // @@protoc_insertion_point(class_scope:events.Ev_RoleEvent)
-    private static final alice.dip.kafka.events.Events.Ev_RoleEvent DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Ev_RoleEvent DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_RoleEvent();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Ev_RoleEvent();
     }
 
-    public static alice.dip.kafka.events.Events.Ev_RoleEvent getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Ev_RoleEvent getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -9834,7 +9834,7 @@ public com.google.protobuf.Parser getParserForType() {
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_RoleEvent getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Ev_RoleEvent getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -9921,7 +9921,7 @@ public interface Ev_IntegratedServiceEventOrBuilder extends
      * .events.OpStatus operationStatus = 4;
      * @return The operationStatus.
      */
-    alice.dip.kafka.events.Events.OpStatus getOperationStatus();
+    alice.dip.kafka.dto.Events.OpStatus getOperationStatus();
 
     /**
      * 
@@ -9960,7 +9960,7 @@ public interface Ev_IntegratedServiceEventOrBuilder extends
      * .events.OpStatus operationStepStatus = 6;
      * @return The operationStepStatus.
      */
-    alice.dip.kafka.events.Events.OpStatus getOperationStepStatus();
+    alice.dip.kafka.dto.Events.OpStatus getOperationStepStatus();
 
     /**
      * string environmentId = 7;
@@ -10028,15 +10028,15 @@ private Ev_IntegratedServiceEvent() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_IntegratedServiceEvent_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_IntegratedServiceEvent_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_IntegratedServiceEvent_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_IntegratedServiceEvent_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.class, alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder.class);
+              alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.class, alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.Builder.class);
     }
 
     public static final int NAME_FIELD_NUMBER = 1;
@@ -10201,9 +10201,9 @@ public java.lang.String getOperationName() {
      * .events.OpStatus operationStatus = 4;
      * @return The operationStatus.
      */
-    @java.lang.Override public alice.dip.kafka.events.Events.OpStatus getOperationStatus() {
-      alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(operationStatus_);
-      return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result;
+    @java.lang.Override public alice.dip.kafka.dto.Events.OpStatus getOperationStatus() {
+      alice.dip.kafka.dto.Events.OpStatus result = alice.dip.kafka.dto.Events.OpStatus.forNumber(operationStatus_);
+      return result == null ? alice.dip.kafka.dto.Events.OpStatus.UNRECOGNIZED : result;
     }
 
     public static final int OPERATIONSTEP_FIELD_NUMBER = 5;
@@ -10274,9 +10274,9 @@ public java.lang.String getOperationStep() {
      * .events.OpStatus operationStepStatus = 6;
      * @return The operationStepStatus.
      */
-    @java.lang.Override public alice.dip.kafka.events.Events.OpStatus getOperationStepStatus() {
-      alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(operationStepStatus_);
-      return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result;
+    @java.lang.Override public alice.dip.kafka.dto.Events.OpStatus getOperationStepStatus() {
+      alice.dip.kafka.dto.Events.OpStatus result = alice.dip.kafka.dto.Events.OpStatus.forNumber(operationStepStatus_);
+      return result == null ? alice.dip.kafka.dto.Events.OpStatus.UNRECOGNIZED : result;
     }
 
     public static final int ENVIRONMENTID_FIELD_NUMBER = 7;
@@ -10388,13 +10388,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(operationName_)) {
         com.google.protobuf.GeneratedMessage.writeString(output, 3, operationName_);
       }
-      if (operationStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) {
+      if (operationStatus_ != alice.dip.kafka.dto.Events.OpStatus.NULL.getNumber()) {
         output.writeEnum(4, operationStatus_);
       }
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(operationStep_)) {
         com.google.protobuf.GeneratedMessage.writeString(output, 5, operationStep_);
       }
-      if (operationStepStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) {
+      if (operationStepStatus_ != alice.dip.kafka.dto.Events.OpStatus.NULL.getNumber()) {
         output.writeEnum(6, operationStepStatus_);
       }
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(environmentId_)) {
@@ -10421,14 +10421,14 @@ public int getSerializedSize() {
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(operationName_)) {
         size += com.google.protobuf.GeneratedMessage.computeStringSize(3, operationName_);
       }
-      if (operationStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) {
+      if (operationStatus_ != alice.dip.kafka.dto.Events.OpStatus.NULL.getNumber()) {
         size += com.google.protobuf.CodedOutputStream
           .computeEnumSize(4, operationStatus_);
       }
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(operationStep_)) {
         size += com.google.protobuf.GeneratedMessage.computeStringSize(5, operationStep_);
       }
-      if (operationStepStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) {
+      if (operationStepStatus_ != alice.dip.kafka.dto.Events.OpStatus.NULL.getNumber()) {
         size += com.google.protobuf.CodedOutputStream
           .computeEnumSize(6, operationStepStatus_);
       }
@@ -10448,10 +10448,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent other = (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) obj;
+      alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent other = (alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent) obj;
 
       if (!getName()
           .equals(other.getName())) return false;
@@ -10499,44 +10499,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -10544,26 +10544,26 @@ public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -10576,7 +10576,7 @@ public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -10597,18 +10597,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Ev_IntegratedServiceEvent)
-        alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder {
+        alice.dip.kafka.dto.Events.Ev_IntegratedServiceEventOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_IntegratedServiceEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_IntegratedServiceEvent_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_IntegratedServiceEvent_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_IntegratedServiceEvent_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.class, alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder.class);
+                alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.class, alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Ev_IntegratedServiceEvent.newBuilder()
@@ -10639,17 +10639,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_IntegratedServiceEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_IntegratedServiceEvent_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent build() {
-        alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent result = buildPartial();
+      public alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent build() {
+        alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -10657,14 +10657,14 @@ public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent buildPartial() {
-        alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent result = new alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent(this);
+      public alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent buildPartial() {
+        alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent result = new alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent result) {
+      private void buildPartial0(alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.name_ = name_;
@@ -10694,16 +10694,16 @@ private void buildPartial0(alice.dip.kafka.events.Events.Ev_IntegratedServiceEve
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) {
-          return mergeFrom((alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent other) {
-        if (other == alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent other) {
+        if (other == alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.getDefaultInstance()) return this;
         if (!other.getName().isEmpty()) {
           name_ = other.name_;
           bitField0_ |= 0x00000001;
@@ -11135,9 +11135,9 @@ public Builder setOperationStatusValue(int value) {
        * @return The operationStatus.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.OpStatus getOperationStatus() {
-        alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(operationStatus_);
-        return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result;
+      public alice.dip.kafka.dto.Events.OpStatus getOperationStatus() {
+        alice.dip.kafka.dto.Events.OpStatus result = alice.dip.kafka.dto.Events.OpStatus.forNumber(operationStatus_);
+        return result == null ? alice.dip.kafka.dto.Events.OpStatus.UNRECOGNIZED : result;
       }
       /**
        * 
@@ -11148,7 +11148,7 @@ public alice.dip.kafka.events.Events.OpStatus getOperationStatus() {
        * @param value The operationStatus to set.
        * @return This builder for chaining.
        */
-      public Builder setOperationStatus(alice.dip.kafka.events.Events.OpStatus value) {
+      public Builder setOperationStatus(alice.dip.kafka.dto.Events.OpStatus value) {
         if (value == null) { throw new NullPointerException(); }
         bitField0_ |= 0x00000008;
         operationStatus_ = value.getNumber();
@@ -11298,9 +11298,9 @@ public Builder setOperationStepStatusValue(int value) {
        * @return The operationStepStatus.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.OpStatus getOperationStepStatus() {
-        alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(operationStepStatus_);
-        return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result;
+      public alice.dip.kafka.dto.Events.OpStatus getOperationStepStatus() {
+        alice.dip.kafka.dto.Events.OpStatus result = alice.dip.kafka.dto.Events.OpStatus.forNumber(operationStepStatus_);
+        return result == null ? alice.dip.kafka.dto.Events.OpStatus.UNRECOGNIZED : result;
       }
       /**
        * 
@@ -11311,7 +11311,7 @@ public alice.dip.kafka.events.Events.OpStatus getOperationStepStatus() {
        * @param value The operationStepStatus to set.
        * @return This builder for chaining.
        */
-      public Builder setOperationStepStatus(alice.dip.kafka.events.Events.OpStatus value) {
+      public Builder setOperationStepStatus(alice.dip.kafka.dto.Events.OpStatus value) {
         if (value == null) { throw new NullPointerException(); }
         bitField0_ |= 0x00000020;
         operationStepStatus_ = value.getNumber();
@@ -11501,12 +11501,12 @@ public Builder setPayloadBytes(
     }
 
     // @@protoc_insertion_point(class_scope:events.Ev_IntegratedServiceEvent)
-    private static final alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent();
     }
 
-    public static alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -11542,7 +11542,7 @@ public com.google.protobuf.Parser getParserForType()
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -11615,7 +11615,7 @@ public interface Ev_RunEventOrBuilder extends
      * .events.OpStatus transitionStatus = 6;
      * @return The transitionStatus.
      */
-    alice.dip.kafka.events.Events.OpStatus getTransitionStatus();
+    alice.dip.kafka.dto.Events.OpStatus getTransitionStatus();
 
     /**
      * .common.User lastRequestUser = 8;
@@ -11626,11 +11626,11 @@ public interface Ev_RunEventOrBuilder extends
      * .common.User lastRequestUser = 8;
      * @return The lastRequestUser.
      */
-    alice.dip.kafka.events.Common.User getLastRequestUser();
+    alice.dip.kafka.dto.Common.User getLastRequestUser();
     /**
      * .common.User lastRequestUser = 8;
      */
-    alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder();
+    alice.dip.kafka.dto.Common.UserOrBuilder getLastRequestUserOrBuilder();
   }
   /**
    * Protobuf type {@code events.Ev_RunEvent}
@@ -11663,15 +11663,15 @@ private Ev_RunEvent() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_RunEvent_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_RunEvent_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_RunEvent_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_RunEvent_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Ev_RunEvent.class, alice.dip.kafka.events.Events.Ev_RunEvent.Builder.class);
+              alice.dip.kafka.dto.Events.Ev_RunEvent.class, alice.dip.kafka.dto.Events.Ev_RunEvent.Builder.class);
     }
 
     private int bitField0_;
@@ -11855,13 +11855,13 @@ public java.lang.String getTransition() {
      * .events.OpStatus transitionStatus = 6;
      * @return The transitionStatus.
      */
-    @java.lang.Override public alice.dip.kafka.events.Events.OpStatus getTransitionStatus() {
-      alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(transitionStatus_);
-      return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result;
+    @java.lang.Override public alice.dip.kafka.dto.Events.OpStatus getTransitionStatus() {
+      alice.dip.kafka.dto.Events.OpStatus result = alice.dip.kafka.dto.Events.OpStatus.forNumber(transitionStatus_);
+      return result == null ? alice.dip.kafka.dto.Events.OpStatus.UNRECOGNIZED : result;
     }
 
     public static final int LASTREQUESTUSER_FIELD_NUMBER = 8;
-    private alice.dip.kafka.events.Common.User lastRequestUser_;
+    private alice.dip.kafka.dto.Common.User lastRequestUser_;
     /**
      * .common.User lastRequestUser = 8;
      * @return Whether the lastRequestUser field is set.
@@ -11875,15 +11875,15 @@ public boolean hasLastRequestUser() {
      * @return The lastRequestUser.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Common.User getLastRequestUser() {
-      return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_;
+    public alice.dip.kafka.dto.Common.User getLastRequestUser() {
+      return lastRequestUser_ == null ? alice.dip.kafka.dto.Common.User.getDefaultInstance() : lastRequestUser_;
     }
     /**
      * .common.User lastRequestUser = 8;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder() {
-      return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_;
+    public alice.dip.kafka.dto.Common.UserOrBuilder getLastRequestUserOrBuilder() {
+      return lastRequestUser_ == null ? alice.dip.kafka.dto.Common.User.getDefaultInstance() : lastRequestUser_;
     }
 
     private byte memoizedIsInitialized = -1;
@@ -11915,7 +11915,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(transition_)) {
         com.google.protobuf.GeneratedMessage.writeString(output, 5, transition_);
       }
-      if (transitionStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) {
+      if (transitionStatus_ != alice.dip.kafka.dto.Events.OpStatus.NULL.getNumber()) {
         output.writeEnum(6, transitionStatus_);
       }
       if (((bitField0_ & 0x00000001) != 0)) {
@@ -11946,7 +11946,7 @@ public int getSerializedSize() {
       if (!com.google.protobuf.GeneratedMessage.isStringEmpty(transition_)) {
         size += com.google.protobuf.GeneratedMessage.computeStringSize(5, transition_);
       }
-      if (transitionStatus_ != alice.dip.kafka.events.Events.OpStatus.NULL.getNumber()) {
+      if (transitionStatus_ != alice.dip.kafka.dto.Events.OpStatus.NULL.getNumber()) {
         size += com.google.protobuf.CodedOutputStream
           .computeEnumSize(6, transitionStatus_);
       }
@@ -11964,10 +11964,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Ev_RunEvent)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Ev_RunEvent)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Ev_RunEvent other = (alice.dip.kafka.events.Events.Ev_RunEvent) obj;
+      alice.dip.kafka.dto.Events.Ev_RunEvent other = (alice.dip.kafka.dto.Events.Ev_RunEvent) obj;
 
       if (!getEnvironmentId()
           .equals(other.getEnvironmentId())) return false;
@@ -12017,44 +12017,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -12062,26 +12062,26 @@ public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -12094,7 +12094,7 @@ public static alice.dip.kafka.events.Events.Ev_RunEvent parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_RunEvent prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Ev_RunEvent prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -12115,18 +12115,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Ev_RunEvent)
-        alice.dip.kafka.events.Events.Ev_RunEventOrBuilder {
+        alice.dip.kafka.dto.Events.Ev_RunEventOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_RunEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_RunEvent_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_RunEvent_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_RunEvent_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Ev_RunEvent.class, alice.dip.kafka.events.Events.Ev_RunEvent.Builder.class);
+                alice.dip.kafka.dto.Events.Ev_RunEvent.class, alice.dip.kafka.dto.Events.Ev_RunEvent.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Ev_RunEvent.newBuilder()
@@ -12166,17 +12166,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_RunEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_RunEvent_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_RunEvent getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Ev_RunEvent getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Ev_RunEvent.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_RunEvent build() {
-        alice.dip.kafka.events.Events.Ev_RunEvent result = buildPartial();
+      public alice.dip.kafka.dto.Events.Ev_RunEvent build() {
+        alice.dip.kafka.dto.Events.Ev_RunEvent result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -12184,14 +12184,14 @@ public alice.dip.kafka.events.Events.Ev_RunEvent build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_RunEvent buildPartial() {
-        alice.dip.kafka.events.Events.Ev_RunEvent result = new alice.dip.kafka.events.Events.Ev_RunEvent(this);
+      public alice.dip.kafka.dto.Events.Ev_RunEvent buildPartial() {
+        alice.dip.kafka.dto.Events.Ev_RunEvent result = new alice.dip.kafka.dto.Events.Ev_RunEvent(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Events.Ev_RunEvent result) {
+      private void buildPartial0(alice.dip.kafka.dto.Events.Ev_RunEvent result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.environmentId_ = environmentId_;
@@ -12223,16 +12223,16 @@ private void buildPartial0(alice.dip.kafka.events.Events.Ev_RunEvent result) {
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Ev_RunEvent) {
-          return mergeFrom((alice.dip.kafka.events.Events.Ev_RunEvent)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Ev_RunEvent) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Ev_RunEvent)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_RunEvent other) {
-        if (other == alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Ev_RunEvent other) {
+        if (other == alice.dip.kafka.dto.Events.Ev_RunEvent.getDefaultInstance()) return this;
         if (!other.getEnvironmentId().isEmpty()) {
           environmentId_ = other.environmentId_;
           bitField0_ |= 0x00000001;
@@ -12686,16 +12686,16 @@ public Builder setTransitionStatusValue(int value) {
        * @return The transitionStatus.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.OpStatus getTransitionStatus() {
-        alice.dip.kafka.events.Events.OpStatus result = alice.dip.kafka.events.Events.OpStatus.forNumber(transitionStatus_);
-        return result == null ? alice.dip.kafka.events.Events.OpStatus.UNRECOGNIZED : result;
+      public alice.dip.kafka.dto.Events.OpStatus getTransitionStatus() {
+        alice.dip.kafka.dto.Events.OpStatus result = alice.dip.kafka.dto.Events.OpStatus.forNumber(transitionStatus_);
+        return result == null ? alice.dip.kafka.dto.Events.OpStatus.UNRECOGNIZED : result;
       }
       /**
        * .events.OpStatus transitionStatus = 6;
        * @param value The transitionStatus to set.
        * @return This builder for chaining.
        */
-      public Builder setTransitionStatus(alice.dip.kafka.events.Events.OpStatus value) {
+      public Builder setTransitionStatus(alice.dip.kafka.dto.Events.OpStatus value) {
         if (value == null) { throw new NullPointerException(); }
         bitField0_ |= 0x00000020;
         transitionStatus_ = value.getNumber();
@@ -12713,9 +12713,9 @@ public Builder clearTransitionStatus() {
         return this;
       }
 
-      private alice.dip.kafka.events.Common.User lastRequestUser_;
+      private alice.dip.kafka.dto.Common.User lastRequestUser_;
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder> lastRequestUserBuilder_;
+          alice.dip.kafka.dto.Common.User, alice.dip.kafka.dto.Common.User.Builder, alice.dip.kafka.dto.Common.UserOrBuilder> lastRequestUserBuilder_;
       /**
        * .common.User lastRequestUser = 8;
        * @return Whether the lastRequestUser field is set.
@@ -12727,9 +12727,9 @@ public boolean hasLastRequestUser() {
        * .common.User lastRequestUser = 8;
        * @return The lastRequestUser.
        */
-      public alice.dip.kafka.events.Common.User getLastRequestUser() {
+      public alice.dip.kafka.dto.Common.User getLastRequestUser() {
         if (lastRequestUserBuilder_ == null) {
-          return lastRequestUser_ == null ? alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_;
+          return lastRequestUser_ == null ? alice.dip.kafka.dto.Common.User.getDefaultInstance() : lastRequestUser_;
         } else {
           return lastRequestUserBuilder_.getMessage();
         }
@@ -12737,7 +12737,7 @@ public alice.dip.kafka.events.Common.User getLastRequestUser() {
       /**
        * .common.User lastRequestUser = 8;
        */
-      public Builder setLastRequestUser(alice.dip.kafka.events.Common.User value) {
+      public Builder setLastRequestUser(alice.dip.kafka.dto.Common.User value) {
         if (lastRequestUserBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -12754,7 +12754,7 @@ public Builder setLastRequestUser(alice.dip.kafka.events.Common.User value) {
        * .common.User lastRequestUser = 8;
        */
       public Builder setLastRequestUser(
-          alice.dip.kafka.events.Common.User.Builder builderForValue) {
+          alice.dip.kafka.dto.Common.User.Builder builderForValue) {
         if (lastRequestUserBuilder_ == null) {
           lastRequestUser_ = builderForValue.build();
         } else {
@@ -12767,11 +12767,11 @@ public Builder setLastRequestUser(
       /**
        * .common.User lastRequestUser = 8;
        */
-      public Builder mergeLastRequestUser(alice.dip.kafka.events.Common.User value) {
+      public Builder mergeLastRequestUser(alice.dip.kafka.dto.Common.User value) {
         if (lastRequestUserBuilder_ == null) {
           if (((bitField0_ & 0x00000040) != 0) &&
             lastRequestUser_ != null &&
-            lastRequestUser_ != alice.dip.kafka.events.Common.User.getDefaultInstance()) {
+            lastRequestUser_ != alice.dip.kafka.dto.Common.User.getDefaultInstance()) {
             getLastRequestUserBuilder().mergeFrom(value);
           } else {
             lastRequestUser_ = value;
@@ -12801,7 +12801,7 @@ public Builder clearLastRequestUser() {
       /**
        * .common.User lastRequestUser = 8;
        */
-      public alice.dip.kafka.events.Common.User.Builder getLastRequestUserBuilder() {
+      public alice.dip.kafka.dto.Common.User.Builder getLastRequestUserBuilder() {
         bitField0_ |= 0x00000040;
         onChanged();
         return internalGetLastRequestUserFieldBuilder().getBuilder();
@@ -12809,23 +12809,23 @@ public alice.dip.kafka.events.Common.User.Builder getLastRequestUserBuilder() {
       /**
        * .common.User lastRequestUser = 8;
        */
-      public alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder() {
+      public alice.dip.kafka.dto.Common.UserOrBuilder getLastRequestUserOrBuilder() {
         if (lastRequestUserBuilder_ != null) {
           return lastRequestUserBuilder_.getMessageOrBuilder();
         } else {
           return lastRequestUser_ == null ?
-              alice.dip.kafka.events.Common.User.getDefaultInstance() : lastRequestUser_;
+              alice.dip.kafka.dto.Common.User.getDefaultInstance() : lastRequestUser_;
         }
       }
       /**
        * .common.User lastRequestUser = 8;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder> 
+          alice.dip.kafka.dto.Common.User, alice.dip.kafka.dto.Common.User.Builder, alice.dip.kafka.dto.Common.UserOrBuilder> 
           internalGetLastRequestUserFieldBuilder() {
         if (lastRequestUserBuilder_ == null) {
           lastRequestUserBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Common.User, alice.dip.kafka.events.Common.User.Builder, alice.dip.kafka.events.Common.UserOrBuilder>(
+              alice.dip.kafka.dto.Common.User, alice.dip.kafka.dto.Common.User.Builder, alice.dip.kafka.dto.Common.UserOrBuilder>(
                   getLastRequestUser(),
                   getParentForChildren(),
                   isClean());
@@ -12838,12 +12838,12 @@ public alice.dip.kafka.events.Common.UserOrBuilder getLastRequestUserOrBuilder()
     }
 
     // @@protoc_insertion_point(class_scope:events.Ev_RunEvent)
-    private static final alice.dip.kafka.events.Events.Ev_RunEvent DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Ev_RunEvent DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_RunEvent();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Ev_RunEvent();
     }
 
-    public static alice.dip.kafka.events.Events.Ev_RunEvent getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Ev_RunEvent getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -12879,7 +12879,7 @@ public com.google.protobuf.Parser getParserForType() {
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_RunEvent getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Ev_RunEvent getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -12908,11 +12908,11 @@ public interface Ev_BeamModeEventOrBuilder extends
      * .common.BeamInfo beamInfo = 2;
      * @return The beamInfo.
      */
-    alice.dip.kafka.events.Common.BeamInfo getBeamInfo();
+    alice.dip.kafka.dto.Common.BeamInfo getBeamInfo();
     /**
      * .common.BeamInfo beamInfo = 2;
      */
-    alice.dip.kafka.events.Common.BeamInfoOrBuilder getBeamInfoOrBuilder();
+    alice.dip.kafka.dto.Common.BeamInfoOrBuilder getBeamInfoOrBuilder();
   }
   /**
    * 
@@ -12946,15 +12946,15 @@ private Ev_BeamModeEvent() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_BeamModeEvent_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_BeamModeEvent_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Ev_BeamModeEvent_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Ev_BeamModeEvent_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Ev_BeamModeEvent.class, alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder.class);
+              alice.dip.kafka.dto.Events.Ev_BeamModeEvent.class, alice.dip.kafka.dto.Events.Ev_BeamModeEvent.Builder.class);
     }
 
     private int bitField0_;
@@ -12974,7 +12974,7 @@ public long getTimestamp() {
     }
 
     public static final int BEAMINFO_FIELD_NUMBER = 2;
-    private alice.dip.kafka.events.Common.BeamInfo beamInfo_;
+    private alice.dip.kafka.dto.Common.BeamInfo beamInfo_;
     /**
      * .common.BeamInfo beamInfo = 2;
      * @return Whether the beamInfo field is set.
@@ -12988,15 +12988,15 @@ public boolean hasBeamInfo() {
      * @return The beamInfo.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Common.BeamInfo getBeamInfo() {
-      return beamInfo_ == null ? alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance() : beamInfo_;
+    public alice.dip.kafka.dto.Common.BeamInfo getBeamInfo() {
+      return beamInfo_ == null ? alice.dip.kafka.dto.Common.BeamInfo.getDefaultInstance() : beamInfo_;
     }
     /**
      * .common.BeamInfo beamInfo = 2;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Common.BeamInfoOrBuilder getBeamInfoOrBuilder() {
-      return beamInfo_ == null ? alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance() : beamInfo_;
+    public alice.dip.kafka.dto.Common.BeamInfoOrBuilder getBeamInfoOrBuilder() {
+      return beamInfo_ == null ? alice.dip.kafka.dto.Common.BeamInfo.getDefaultInstance() : beamInfo_;
     }
 
     private byte memoizedIsInitialized = -1;
@@ -13046,10 +13046,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Ev_BeamModeEvent)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Ev_BeamModeEvent)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Ev_BeamModeEvent other = (alice.dip.kafka.events.Events.Ev_BeamModeEvent) obj;
+      alice.dip.kafka.dto.Events.Ev_BeamModeEvent other = (alice.dip.kafka.dto.Events.Ev_BeamModeEvent) obj;
 
       if (getTimestamp()
           != other.getTimestamp()) return false;
@@ -13081,44 +13081,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -13126,26 +13126,26 @@ public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -13158,7 +13158,7 @@ public static alice.dip.kafka.events.Events.Ev_BeamModeEvent parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Ev_BeamModeEvent prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Ev_BeamModeEvent prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -13185,18 +13185,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Ev_BeamModeEvent)
-        alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder {
+        alice.dip.kafka.dto.Events.Ev_BeamModeEventOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_BeamModeEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_BeamModeEvent_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_BeamModeEvent_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_BeamModeEvent_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Ev_BeamModeEvent.class, alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder.class);
+                alice.dip.kafka.dto.Events.Ev_BeamModeEvent.class, alice.dip.kafka.dto.Events.Ev_BeamModeEvent.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Ev_BeamModeEvent.newBuilder()
@@ -13231,17 +13231,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Ev_BeamModeEvent_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Ev_BeamModeEvent_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_BeamModeEvent getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Ev_BeamModeEvent getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Ev_BeamModeEvent.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_BeamModeEvent build() {
-        alice.dip.kafka.events.Events.Ev_BeamModeEvent result = buildPartial();
+      public alice.dip.kafka.dto.Events.Ev_BeamModeEvent build() {
+        alice.dip.kafka.dto.Events.Ev_BeamModeEvent result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -13249,14 +13249,14 @@ public alice.dip.kafka.events.Events.Ev_BeamModeEvent build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_BeamModeEvent buildPartial() {
-        alice.dip.kafka.events.Events.Ev_BeamModeEvent result = new alice.dip.kafka.events.Events.Ev_BeamModeEvent(this);
+      public alice.dip.kafka.dto.Events.Ev_BeamModeEvent buildPartial() {
+        alice.dip.kafka.dto.Events.Ev_BeamModeEvent result = new alice.dip.kafka.dto.Events.Ev_BeamModeEvent(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Events.Ev_BeamModeEvent result) {
+      private void buildPartial0(alice.dip.kafka.dto.Events.Ev_BeamModeEvent result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.timestamp_ = timestamp_;
@@ -13273,16 +13273,16 @@ private void buildPartial0(alice.dip.kafka.events.Events.Ev_BeamModeEvent result
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Ev_BeamModeEvent) {
-          return mergeFrom((alice.dip.kafka.events.Events.Ev_BeamModeEvent)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Ev_BeamModeEvent) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Ev_BeamModeEvent)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Ev_BeamModeEvent other) {
-        if (other == alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Ev_BeamModeEvent other) {
+        if (other == alice.dip.kafka.dto.Events.Ev_BeamModeEvent.getDefaultInstance()) return this;
         if (other.getTimestamp() != 0L) {
           setTimestamp(other.getTimestamp());
         }
@@ -13388,9 +13388,9 @@ public Builder clearTimestamp() {
         return this;
       }
 
-      private alice.dip.kafka.events.Common.BeamInfo beamInfo_;
+      private alice.dip.kafka.dto.Common.BeamInfo beamInfo_;
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Common.BeamInfo, alice.dip.kafka.events.Common.BeamInfo.Builder, alice.dip.kafka.events.Common.BeamInfoOrBuilder> beamInfoBuilder_;
+          alice.dip.kafka.dto.Common.BeamInfo, alice.dip.kafka.dto.Common.BeamInfo.Builder, alice.dip.kafka.dto.Common.BeamInfoOrBuilder> beamInfoBuilder_;
       /**
        * .common.BeamInfo beamInfo = 2;
        * @return Whether the beamInfo field is set.
@@ -13402,9 +13402,9 @@ public boolean hasBeamInfo() {
        * .common.BeamInfo beamInfo = 2;
        * @return The beamInfo.
        */
-      public alice.dip.kafka.events.Common.BeamInfo getBeamInfo() {
+      public alice.dip.kafka.dto.Common.BeamInfo getBeamInfo() {
         if (beamInfoBuilder_ == null) {
-          return beamInfo_ == null ? alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance() : beamInfo_;
+          return beamInfo_ == null ? alice.dip.kafka.dto.Common.BeamInfo.getDefaultInstance() : beamInfo_;
         } else {
           return beamInfoBuilder_.getMessage();
         }
@@ -13412,7 +13412,7 @@ public alice.dip.kafka.events.Common.BeamInfo getBeamInfo() {
       /**
        * .common.BeamInfo beamInfo = 2;
        */
-      public Builder setBeamInfo(alice.dip.kafka.events.Common.BeamInfo value) {
+      public Builder setBeamInfo(alice.dip.kafka.dto.Common.BeamInfo value) {
         if (beamInfoBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -13429,7 +13429,7 @@ public Builder setBeamInfo(alice.dip.kafka.events.Common.BeamInfo value) {
        * .common.BeamInfo beamInfo = 2;
        */
       public Builder setBeamInfo(
-          alice.dip.kafka.events.Common.BeamInfo.Builder builderForValue) {
+          alice.dip.kafka.dto.Common.BeamInfo.Builder builderForValue) {
         if (beamInfoBuilder_ == null) {
           beamInfo_ = builderForValue.build();
         } else {
@@ -13442,11 +13442,11 @@ public Builder setBeamInfo(
       /**
        * .common.BeamInfo beamInfo = 2;
        */
-      public Builder mergeBeamInfo(alice.dip.kafka.events.Common.BeamInfo value) {
+      public Builder mergeBeamInfo(alice.dip.kafka.dto.Common.BeamInfo value) {
         if (beamInfoBuilder_ == null) {
           if (((bitField0_ & 0x00000002) != 0) &&
             beamInfo_ != null &&
-            beamInfo_ != alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance()) {
+            beamInfo_ != alice.dip.kafka.dto.Common.BeamInfo.getDefaultInstance()) {
             getBeamInfoBuilder().mergeFrom(value);
           } else {
             beamInfo_ = value;
@@ -13476,7 +13476,7 @@ public Builder clearBeamInfo() {
       /**
        * .common.BeamInfo beamInfo = 2;
        */
-      public alice.dip.kafka.events.Common.BeamInfo.Builder getBeamInfoBuilder() {
+      public alice.dip.kafka.dto.Common.BeamInfo.Builder getBeamInfoBuilder() {
         bitField0_ |= 0x00000002;
         onChanged();
         return internalGetBeamInfoFieldBuilder().getBuilder();
@@ -13484,23 +13484,23 @@ public alice.dip.kafka.events.Common.BeamInfo.Builder getBeamInfoBuilder() {
       /**
        * .common.BeamInfo beamInfo = 2;
        */
-      public alice.dip.kafka.events.Common.BeamInfoOrBuilder getBeamInfoOrBuilder() {
+      public alice.dip.kafka.dto.Common.BeamInfoOrBuilder getBeamInfoOrBuilder() {
         if (beamInfoBuilder_ != null) {
           return beamInfoBuilder_.getMessageOrBuilder();
         } else {
           return beamInfo_ == null ?
-              alice.dip.kafka.events.Common.BeamInfo.getDefaultInstance() : beamInfo_;
+              alice.dip.kafka.dto.Common.BeamInfo.getDefaultInstance() : beamInfo_;
         }
       }
       /**
        * .common.BeamInfo beamInfo = 2;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Common.BeamInfo, alice.dip.kafka.events.Common.BeamInfo.Builder, alice.dip.kafka.events.Common.BeamInfoOrBuilder> 
+          alice.dip.kafka.dto.Common.BeamInfo, alice.dip.kafka.dto.Common.BeamInfo.Builder, alice.dip.kafka.dto.Common.BeamInfoOrBuilder> 
           internalGetBeamInfoFieldBuilder() {
         if (beamInfoBuilder_ == null) {
           beamInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Common.BeamInfo, alice.dip.kafka.events.Common.BeamInfo.Builder, alice.dip.kafka.events.Common.BeamInfoOrBuilder>(
+              alice.dip.kafka.dto.Common.BeamInfo, alice.dip.kafka.dto.Common.BeamInfo.Builder, alice.dip.kafka.dto.Common.BeamInfoOrBuilder>(
                   getBeamInfo(),
                   getParentForChildren(),
                   isClean());
@@ -13513,12 +13513,12 @@ public alice.dip.kafka.events.Common.BeamInfoOrBuilder getBeamInfoOrBuilder() {
     }
 
     // @@protoc_insertion_point(class_scope:events.Ev_BeamModeEvent)
-    private static final alice.dip.kafka.events.Events.Ev_BeamModeEvent DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Ev_BeamModeEvent DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Ev_BeamModeEvent();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Ev_BeamModeEvent();
     }
 
-    public static alice.dip.kafka.events.Events.Ev_BeamModeEvent getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Ev_BeamModeEvent getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -13554,7 +13554,7 @@ public com.google.protobuf.Parser getParserForType() {
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_BeamModeEvent getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Ev_BeamModeEvent getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -13585,11 +13585,11 @@ public interface EventOrBuilder extends
      * .events.Ev_EnvironmentEvent environmentEvent = 11;
      * @return The environmentEvent.
      */
-    alice.dip.kafka.events.Events.Ev_EnvironmentEvent getEnvironmentEvent();
+    alice.dip.kafka.dto.Events.Ev_EnvironmentEvent getEnvironmentEvent();
     /**
      * .events.Ev_EnvironmentEvent environmentEvent = 11;
      */
-    alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder getEnvironmentEventOrBuilder();
+    alice.dip.kafka.dto.Events.Ev_EnvironmentEventOrBuilder getEnvironmentEventOrBuilder();
 
     /**
      * .events.Ev_TaskEvent taskEvent = 12;
@@ -13600,11 +13600,11 @@ public interface EventOrBuilder extends
      * .events.Ev_TaskEvent taskEvent = 12;
      * @return The taskEvent.
      */
-    alice.dip.kafka.events.Events.Ev_TaskEvent getTaskEvent();
+    alice.dip.kafka.dto.Events.Ev_TaskEvent getTaskEvent();
     /**
      * .events.Ev_TaskEvent taskEvent = 12;
      */
-    alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder getTaskEventOrBuilder();
+    alice.dip.kafka.dto.Events.Ev_TaskEventOrBuilder getTaskEventOrBuilder();
 
     /**
      * .events.Ev_RoleEvent roleEvent = 13;
@@ -13615,11 +13615,11 @@ public interface EventOrBuilder extends
      * .events.Ev_RoleEvent roleEvent = 13;
      * @return The roleEvent.
      */
-    alice.dip.kafka.events.Events.Ev_RoleEvent getRoleEvent();
+    alice.dip.kafka.dto.Events.Ev_RoleEvent getRoleEvent();
     /**
      * .events.Ev_RoleEvent roleEvent = 13;
      */
-    alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder getRoleEventOrBuilder();
+    alice.dip.kafka.dto.Events.Ev_RoleEventOrBuilder getRoleEventOrBuilder();
 
     /**
      * .events.Ev_CallEvent callEvent = 14;
@@ -13630,11 +13630,11 @@ public interface EventOrBuilder extends
      * .events.Ev_CallEvent callEvent = 14;
      * @return The callEvent.
      */
-    alice.dip.kafka.events.Events.Ev_CallEvent getCallEvent();
+    alice.dip.kafka.dto.Events.Ev_CallEvent getCallEvent();
     /**
      * .events.Ev_CallEvent callEvent = 14;
      */
-    alice.dip.kafka.events.Events.Ev_CallEventOrBuilder getCallEventOrBuilder();
+    alice.dip.kafka.dto.Events.Ev_CallEventOrBuilder getCallEventOrBuilder();
 
     /**
      * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15;
@@ -13645,11 +13645,11 @@ public interface EventOrBuilder extends
      * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15;
      * @return The integratedServiceEvent.
      */
-    alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getIntegratedServiceEvent();
+    alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent getIntegratedServiceEvent();
     /**
      * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15;
      */
-    alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder getIntegratedServiceEventOrBuilder();
+    alice.dip.kafka.dto.Events.Ev_IntegratedServiceEventOrBuilder getIntegratedServiceEventOrBuilder();
 
     /**
      * .events.Ev_RunEvent runEvent = 16;
@@ -13660,11 +13660,11 @@ public interface EventOrBuilder extends
      * .events.Ev_RunEvent runEvent = 16;
      * @return The runEvent.
      */
-    alice.dip.kafka.events.Events.Ev_RunEvent getRunEvent();
+    alice.dip.kafka.dto.Events.Ev_RunEvent getRunEvent();
     /**
      * .events.Ev_RunEvent runEvent = 16;
      */
-    alice.dip.kafka.events.Events.Ev_RunEventOrBuilder getRunEventOrBuilder();
+    alice.dip.kafka.dto.Events.Ev_RunEventOrBuilder getRunEventOrBuilder();
 
     /**
      * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101;
@@ -13675,11 +13675,11 @@ public interface EventOrBuilder extends
      * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101;
      * @return The frameworkEvent.
      */
-    alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getFrameworkEvent();
+    alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent getFrameworkEvent();
     /**
      * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101;
      */
-    alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder getFrameworkEventOrBuilder();
+    alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEventOrBuilder getFrameworkEventOrBuilder();
 
     /**
      * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102;
@@ -13690,11 +13690,11 @@ public interface EventOrBuilder extends
      * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102;
      * @return The mesosHeartbeatEvent.
      */
-    alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getMesosHeartbeatEvent();
+    alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat getMesosHeartbeatEvent();
     /**
      * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102;
      */
-    alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder getMesosHeartbeatEventOrBuilder();
+    alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder getMesosHeartbeatEventOrBuilder();
 
     /**
      * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103;
@@ -13705,11 +13705,11 @@ public interface EventOrBuilder extends
      * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103;
      * @return The coreStartEvent.
      */
-    alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getCoreStartEvent();
+    alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart getCoreStartEvent();
     /**
      * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103;
      */
-    alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder getCoreStartEventOrBuilder();
+    alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStartOrBuilder getCoreStartEventOrBuilder();
 
     /**
      * .events.Ev_BeamModeEvent beamModeEvent = 110;
@@ -13720,13 +13720,13 @@ public interface EventOrBuilder extends
      * .events.Ev_BeamModeEvent beamModeEvent = 110;
      * @return The beamModeEvent.
      */
-    alice.dip.kafka.events.Events.Ev_BeamModeEvent getBeamModeEvent();
+    alice.dip.kafka.dto.Events.Ev_BeamModeEvent getBeamModeEvent();
     /**
      * .events.Ev_BeamModeEvent beamModeEvent = 110;
      */
-    alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder getBeamModeEventOrBuilder();
+    alice.dip.kafka.dto.Events.Ev_BeamModeEventOrBuilder getBeamModeEventOrBuilder();
 
-    alice.dip.kafka.events.Events.Event.PayloadCase getPayloadCase();
+    alice.dip.kafka.dto.Events.Event.PayloadCase getPayloadCase();
   }
   /**
    * Protobuf type {@code events.Event}
@@ -13754,15 +13754,15 @@ private Event() {
 
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return alice.dip.kafka.events.Events.internal_static_events_Event_descriptor;
+      return alice.dip.kafka.dto.Events.internal_static_events_Event_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return alice.dip.kafka.events.Events.internal_static_events_Event_fieldAccessorTable
+      return alice.dip.kafka.dto.Events.internal_static_events_Event_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              alice.dip.kafka.events.Events.Event.class, alice.dip.kafka.events.Events.Event.Builder.class);
+              alice.dip.kafka.dto.Events.Event.class, alice.dip.kafka.dto.Events.Event.Builder.class);
     }
 
     private int payloadCase_ = 0;
@@ -13859,21 +13859,21 @@ public boolean hasEnvironmentEvent() {
      * @return The environmentEvent.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_EnvironmentEvent getEnvironmentEvent() {
+    public alice.dip.kafka.dto.Events.Ev_EnvironmentEvent getEnvironmentEvent() {
       if (payloadCase_ == 11) {
-         return (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_EnvironmentEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.getDefaultInstance();
     }
     /**
      * .events.Ev_EnvironmentEvent environmentEvent = 11;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder getEnvironmentEventOrBuilder() {
+    public alice.dip.kafka.dto.Events.Ev_EnvironmentEventOrBuilder getEnvironmentEventOrBuilder() {
       if (payloadCase_ == 11) {
-         return (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_EnvironmentEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.getDefaultInstance();
     }
 
     public static final int TASKEVENT_FIELD_NUMBER = 12;
@@ -13890,21 +13890,21 @@ public boolean hasTaskEvent() {
      * @return The taskEvent.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_TaskEvent getTaskEvent() {
+    public alice.dip.kafka.dto.Events.Ev_TaskEvent getTaskEvent() {
       if (payloadCase_ == 12) {
-         return (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_TaskEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_TaskEvent.getDefaultInstance();
     }
     /**
      * .events.Ev_TaskEvent taskEvent = 12;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder getTaskEventOrBuilder() {
+    public alice.dip.kafka.dto.Events.Ev_TaskEventOrBuilder getTaskEventOrBuilder() {
       if (payloadCase_ == 12) {
-         return (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_TaskEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_TaskEvent.getDefaultInstance();
     }
 
     public static final int ROLEEVENT_FIELD_NUMBER = 13;
@@ -13921,21 +13921,21 @@ public boolean hasRoleEvent() {
      * @return The roleEvent.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_RoleEvent getRoleEvent() {
+    public alice.dip.kafka.dto.Events.Ev_RoleEvent getRoleEvent() {
       if (payloadCase_ == 13) {
-         return (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_RoleEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_RoleEvent.getDefaultInstance();
     }
     /**
      * .events.Ev_RoleEvent roleEvent = 13;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder getRoleEventOrBuilder() {
+    public alice.dip.kafka.dto.Events.Ev_RoleEventOrBuilder getRoleEventOrBuilder() {
       if (payloadCase_ == 13) {
-         return (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_RoleEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_RoleEvent.getDefaultInstance();
     }
 
     public static final int CALLEVENT_FIELD_NUMBER = 14;
@@ -13952,21 +13952,21 @@ public boolean hasCallEvent() {
      * @return The callEvent.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_CallEvent getCallEvent() {
+    public alice.dip.kafka.dto.Events.Ev_CallEvent getCallEvent() {
       if (payloadCase_ == 14) {
-         return (alice.dip.kafka.events.Events.Ev_CallEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_CallEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_CallEvent.getDefaultInstance();
     }
     /**
      * .events.Ev_CallEvent callEvent = 14;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_CallEventOrBuilder getCallEventOrBuilder() {
+    public alice.dip.kafka.dto.Events.Ev_CallEventOrBuilder getCallEventOrBuilder() {
       if (payloadCase_ == 14) {
-         return (alice.dip.kafka.events.Events.Ev_CallEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_CallEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_CallEvent.getDefaultInstance();
     }
 
     public static final int INTEGRATEDSERVICEEVENT_FIELD_NUMBER = 15;
@@ -13983,21 +13983,21 @@ public boolean hasIntegratedServiceEvent() {
      * @return The integratedServiceEvent.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getIntegratedServiceEvent() {
+    public alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent getIntegratedServiceEvent() {
       if (payloadCase_ == 15) {
-         return (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
     }
     /**
      * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder getIntegratedServiceEventOrBuilder() {
+    public alice.dip.kafka.dto.Events.Ev_IntegratedServiceEventOrBuilder getIntegratedServiceEventOrBuilder() {
       if (payloadCase_ == 15) {
-         return (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
     }
 
     public static final int RUNEVENT_FIELD_NUMBER = 16;
@@ -14014,21 +14014,21 @@ public boolean hasRunEvent() {
      * @return The runEvent.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_RunEvent getRunEvent() {
+    public alice.dip.kafka.dto.Events.Ev_RunEvent getRunEvent() {
       if (payloadCase_ == 16) {
-         return (alice.dip.kafka.events.Events.Ev_RunEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_RunEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_RunEvent.getDefaultInstance();
     }
     /**
      * .events.Ev_RunEvent runEvent = 16;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_RunEventOrBuilder getRunEventOrBuilder() {
+    public alice.dip.kafka.dto.Events.Ev_RunEventOrBuilder getRunEventOrBuilder() {
       if (payloadCase_ == 16) {
-         return (alice.dip.kafka.events.Events.Ev_RunEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_RunEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_RunEvent.getDefaultInstance();
     }
 
     public static final int FRAMEWORKEVENT_FIELD_NUMBER = 101;
@@ -14045,21 +14045,21 @@ public boolean hasFrameworkEvent() {
      * @return The frameworkEvent.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getFrameworkEvent() {
+    public alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent getFrameworkEvent() {
       if (payloadCase_ == 101) {
-         return (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
     }
     /**
      * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder getFrameworkEventOrBuilder() {
+    public alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEventOrBuilder getFrameworkEventOrBuilder() {
       if (payloadCase_ == 101) {
-         return (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
     }
 
     public static final int MESOSHEARTBEATEVENT_FIELD_NUMBER = 102;
@@ -14076,21 +14076,21 @@ public boolean hasMesosHeartbeatEvent() {
      * @return The mesosHeartbeatEvent.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getMesosHeartbeatEvent() {
+    public alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat getMesosHeartbeatEvent() {
       if (payloadCase_ == 102) {
-         return (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
     }
     /**
      * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder getMesosHeartbeatEventOrBuilder() {
+    public alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder getMesosHeartbeatEventOrBuilder() {
       if (payloadCase_ == 102) {
-         return (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
     }
 
     public static final int CORESTARTEVENT_FIELD_NUMBER = 103;
@@ -14107,21 +14107,21 @@ public boolean hasCoreStartEvent() {
      * @return The coreStartEvent.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getCoreStartEvent() {
+    public alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart getCoreStartEvent() {
       if (payloadCase_ == 103) {
-         return (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
     }
     /**
      * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder getCoreStartEventOrBuilder() {
+    public alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStartOrBuilder getCoreStartEventOrBuilder() {
       if (payloadCase_ == 103) {
-         return (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
     }
 
     public static final int BEAMMODEEVENT_FIELD_NUMBER = 110;
@@ -14138,21 +14138,21 @@ public boolean hasBeamModeEvent() {
      * @return The beamModeEvent.
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_BeamModeEvent getBeamModeEvent() {
+    public alice.dip.kafka.dto.Events.Ev_BeamModeEvent getBeamModeEvent() {
       if (payloadCase_ == 110) {
-         return (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_BeamModeEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_BeamModeEvent.getDefaultInstance();
     }
     /**
      * .events.Ev_BeamModeEvent beamModeEvent = 110;
      */
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder getBeamModeEventOrBuilder() {
+    public alice.dip.kafka.dto.Events.Ev_BeamModeEventOrBuilder getBeamModeEventOrBuilder() {
       if (payloadCase_ == 110) {
-         return (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_;
+         return (alice.dip.kafka.dto.Events.Ev_BeamModeEvent) payload_;
       }
-      return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance();
+      return alice.dip.kafka.dto.Events.Ev_BeamModeEvent.getDefaultInstance();
     }
 
     private byte memoizedIsInitialized = -1;
@@ -14176,34 +14176,34 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
         output.writeInt64(2, timestampNano_);
       }
       if (payloadCase_ == 11) {
-        output.writeMessage(11, (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_);
+        output.writeMessage(11, (alice.dip.kafka.dto.Events.Ev_EnvironmentEvent) payload_);
       }
       if (payloadCase_ == 12) {
-        output.writeMessage(12, (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_);
+        output.writeMessage(12, (alice.dip.kafka.dto.Events.Ev_TaskEvent) payload_);
       }
       if (payloadCase_ == 13) {
-        output.writeMessage(13, (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_);
+        output.writeMessage(13, (alice.dip.kafka.dto.Events.Ev_RoleEvent) payload_);
       }
       if (payloadCase_ == 14) {
-        output.writeMessage(14, (alice.dip.kafka.events.Events.Ev_CallEvent) payload_);
+        output.writeMessage(14, (alice.dip.kafka.dto.Events.Ev_CallEvent) payload_);
       }
       if (payloadCase_ == 15) {
-        output.writeMessage(15, (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_);
+        output.writeMessage(15, (alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent) payload_);
       }
       if (payloadCase_ == 16) {
-        output.writeMessage(16, (alice.dip.kafka.events.Events.Ev_RunEvent) payload_);
+        output.writeMessage(16, (alice.dip.kafka.dto.Events.Ev_RunEvent) payload_);
       }
       if (payloadCase_ == 101) {
-        output.writeMessage(101, (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_);
+        output.writeMessage(101, (alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent) payload_);
       }
       if (payloadCase_ == 102) {
-        output.writeMessage(102, (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_);
+        output.writeMessage(102, (alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat) payload_);
       }
       if (payloadCase_ == 103) {
-        output.writeMessage(103, (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_);
+        output.writeMessage(103, (alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart) payload_);
       }
       if (payloadCase_ == 110) {
-        output.writeMessage(110, (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_);
+        output.writeMessage(110, (alice.dip.kafka.dto.Events.Ev_BeamModeEvent) payload_);
       }
       getUnknownFields().writeTo(output);
     }
@@ -14224,43 +14224,43 @@ public int getSerializedSize() {
       }
       if (payloadCase_ == 11) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(11, (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_);
+          .computeMessageSize(11, (alice.dip.kafka.dto.Events.Ev_EnvironmentEvent) payload_);
       }
       if (payloadCase_ == 12) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(12, (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_);
+          .computeMessageSize(12, (alice.dip.kafka.dto.Events.Ev_TaskEvent) payload_);
       }
       if (payloadCase_ == 13) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(13, (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_);
+          .computeMessageSize(13, (alice.dip.kafka.dto.Events.Ev_RoleEvent) payload_);
       }
       if (payloadCase_ == 14) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(14, (alice.dip.kafka.events.Events.Ev_CallEvent) payload_);
+          .computeMessageSize(14, (alice.dip.kafka.dto.Events.Ev_CallEvent) payload_);
       }
       if (payloadCase_ == 15) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(15, (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_);
+          .computeMessageSize(15, (alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent) payload_);
       }
       if (payloadCase_ == 16) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(16, (alice.dip.kafka.events.Events.Ev_RunEvent) payload_);
+          .computeMessageSize(16, (alice.dip.kafka.dto.Events.Ev_RunEvent) payload_);
       }
       if (payloadCase_ == 101) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(101, (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_);
+          .computeMessageSize(101, (alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent) payload_);
       }
       if (payloadCase_ == 102) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(102, (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_);
+          .computeMessageSize(102, (alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat) payload_);
       }
       if (payloadCase_ == 103) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(103, (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_);
+          .computeMessageSize(103, (alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart) payload_);
       }
       if (payloadCase_ == 110) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(110, (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_);
+          .computeMessageSize(110, (alice.dip.kafka.dto.Events.Ev_BeamModeEvent) payload_);
       }
       size += getUnknownFields().getSerializedSize();
       memoizedSize = size;
@@ -14272,10 +14272,10 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof alice.dip.kafka.events.Events.Event)) {
+      if (!(obj instanceof alice.dip.kafka.dto.Events.Event)) {
         return super.equals(obj);
       }
-      alice.dip.kafka.events.Events.Event other = (alice.dip.kafka.events.Events.Event) obj;
+      alice.dip.kafka.dto.Events.Event other = (alice.dip.kafka.dto.Events.Event) obj;
 
       if (getTimestamp()
           != other.getTimestamp()) return false;
@@ -14392,44 +14392,44 @@ public int hashCode() {
       return hash;
     }
 
-    public static alice.dip.kafka.events.Events.Event parseFrom(
+    public static alice.dip.kafka.dto.Events.Event parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Event parseFrom(
+    public static alice.dip.kafka.dto.Events.Event parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Event parseFrom(
+    public static alice.dip.kafka.dto.Events.Event parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Event parseFrom(
+    public static alice.dip.kafka.dto.Events.Event parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Event parseFrom(byte[] data)
+    public static alice.dip.kafka.dto.Events.Event parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static alice.dip.kafka.events.Events.Event parseFrom(
+    public static alice.dip.kafka.dto.Events.Event parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Event parseFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Event parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Event parseFrom(
+    public static alice.dip.kafka.dto.Events.Event parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -14437,26 +14437,26 @@ public static alice.dip.kafka.events.Events.Event parseFrom(
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static alice.dip.kafka.events.Events.Event parseDelimitedFrom(java.io.InputStream input)
+    public static alice.dip.kafka.dto.Events.Event parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input);
     }
 
-    public static alice.dip.kafka.events.Events.Event parseDelimitedFrom(
+    public static alice.dip.kafka.dto.Events.Event parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static alice.dip.kafka.events.Events.Event parseFrom(
+    public static alice.dip.kafka.dto.Events.Event parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessage
           .parseWithIOException(PARSER, input);
     }
-    public static alice.dip.kafka.events.Events.Event parseFrom(
+    public static alice.dip.kafka.dto.Events.Event parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -14469,7 +14469,7 @@ public static alice.dip.kafka.events.Events.Event parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(alice.dip.kafka.events.Events.Event prototype) {
+    public static Builder newBuilder(alice.dip.kafka.dto.Events.Event prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -14490,18 +14490,18 @@ protected Builder newBuilderForType(
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder implements
         // @@protoc_insertion_point(builder_implements:events.Event)
-        alice.dip.kafka.events.Events.EventOrBuilder {
+        alice.dip.kafka.dto.Events.EventOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return alice.dip.kafka.events.Events.internal_static_events_Event_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Event_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return alice.dip.kafka.events.Events.internal_static_events_Event_fieldAccessorTable
+        return alice.dip.kafka.dto.Events.internal_static_events_Event_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                alice.dip.kafka.events.Events.Event.class, alice.dip.kafka.events.Events.Event.Builder.class);
+                alice.dip.kafka.dto.Events.Event.class, alice.dip.kafka.dto.Events.Event.Builder.class);
       }
 
       // Construct using ch.cern.alice.o2.control.events.Events.Event.newBuilder()
@@ -14558,17 +14558,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return alice.dip.kafka.events.Events.internal_static_events_Event_descriptor;
+        return alice.dip.kafka.dto.Events.internal_static_events_Event_descriptor;
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Event getDefaultInstanceForType() {
-        return alice.dip.kafka.events.Events.Event.getDefaultInstance();
+      public alice.dip.kafka.dto.Events.Event getDefaultInstanceForType() {
+        return alice.dip.kafka.dto.Events.Event.getDefaultInstance();
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Event build() {
-        alice.dip.kafka.events.Events.Event result = buildPartial();
+      public alice.dip.kafka.dto.Events.Event build() {
+        alice.dip.kafka.dto.Events.Event result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -14576,15 +14576,15 @@ public alice.dip.kafka.events.Events.Event build() {
       }
 
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Event buildPartial() {
-        alice.dip.kafka.events.Events.Event result = new alice.dip.kafka.events.Events.Event(this);
+      public alice.dip.kafka.dto.Events.Event buildPartial() {
+        alice.dip.kafka.dto.Events.Event result = new alice.dip.kafka.dto.Events.Event(this);
         if (bitField0_ != 0) { buildPartial0(result); }
         buildPartialOneofs(result);
         onBuilt();
         return result;
       }
 
-      private void buildPartial0(alice.dip.kafka.events.Events.Event result) {
+      private void buildPartial0(alice.dip.kafka.dto.Events.Event result) {
         int from_bitField0_ = bitField0_;
         if (((from_bitField0_ & 0x00000001) != 0)) {
           result.timestamp_ = timestamp_;
@@ -14594,7 +14594,7 @@ private void buildPartial0(alice.dip.kafka.events.Events.Event result) {
         }
       }
 
-      private void buildPartialOneofs(alice.dip.kafka.events.Events.Event result) {
+      private void buildPartialOneofs(alice.dip.kafka.dto.Events.Event result) {
         result.payloadCase_ = payloadCase_;
         result.payload_ = this.payload_;
         if (payloadCase_ == 11 &&
@@ -14641,16 +14641,16 @@ private void buildPartialOneofs(alice.dip.kafka.events.Events.Event result) {
 
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof alice.dip.kafka.events.Events.Event) {
-          return mergeFrom((alice.dip.kafka.events.Events.Event)other);
+        if (other instanceof alice.dip.kafka.dto.Events.Event) {
+          return mergeFrom((alice.dip.kafka.dto.Events.Event)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(alice.dip.kafka.events.Events.Event other) {
-        if (other == alice.dip.kafka.events.Events.Event.getDefaultInstance()) return this;
+      public Builder mergeFrom(alice.dip.kafka.dto.Events.Event other) {
+        if (other == alice.dip.kafka.dto.Events.Event.getDefaultInstance()) return this;
         if (other.getTimestamp() != 0L) {
           setTimestamp(other.getTimestamp());
         }
@@ -14905,7 +14905,7 @@ public Builder clearTimestampNano() {
       }
 
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_EnvironmentEvent, alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder, alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder> environmentEventBuilder_;
+          alice.dip.kafka.dto.Events.Ev_EnvironmentEvent, alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.Builder, alice.dip.kafka.dto.Events.Ev_EnvironmentEventOrBuilder> environmentEventBuilder_;
       /**
        * .events.Ev_EnvironmentEvent environmentEvent = 11;
        * @return Whether the environmentEvent field is set.
@@ -14919,23 +14919,23 @@ public boolean hasEnvironmentEvent() {
        * @return The environmentEvent.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_EnvironmentEvent getEnvironmentEvent() {
+      public alice.dip.kafka.dto.Events.Ev_EnvironmentEvent getEnvironmentEvent() {
         if (environmentEventBuilder_ == null) {
           if (payloadCase_ == 11) {
-            return (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_EnvironmentEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.getDefaultInstance();
         } else {
           if (payloadCase_ == 11) {
             return environmentEventBuilder_.getMessage();
           }
-          return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_EnvironmentEvent environmentEvent = 11;
        */
-      public Builder setEnvironmentEvent(alice.dip.kafka.events.Events.Ev_EnvironmentEvent value) {
+      public Builder setEnvironmentEvent(alice.dip.kafka.dto.Events.Ev_EnvironmentEvent value) {
         if (environmentEventBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -14952,7 +14952,7 @@ public Builder setEnvironmentEvent(alice.dip.kafka.events.Events.Ev_EnvironmentE
        * .events.Ev_EnvironmentEvent environmentEvent = 11;
        */
       public Builder setEnvironmentEvent(
-          alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.Builder builderForValue) {
         if (environmentEventBuilder_ == null) {
           payload_ = builderForValue.build();
           onChanged();
@@ -14965,11 +14965,11 @@ public Builder setEnvironmentEvent(
       /**
        * .events.Ev_EnvironmentEvent environmentEvent = 11;
        */
-      public Builder mergeEnvironmentEvent(alice.dip.kafka.events.Events.Ev_EnvironmentEvent value) {
+      public Builder mergeEnvironmentEvent(alice.dip.kafka.dto.Events.Ev_EnvironmentEvent value) {
         if (environmentEventBuilder_ == null) {
           if (payloadCase_ == 11 &&
-              payload_ != alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance()) {
-            payload_ = alice.dip.kafka.events.Events.Ev_EnvironmentEvent.newBuilder((alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_)
+              payload_ != alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.getDefaultInstance()) {
+            payload_ = alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.newBuilder((alice.dip.kafka.dto.Events.Ev_EnvironmentEvent) payload_)
                 .mergeFrom(value).buildPartial();
           } else {
             payload_ = value;
@@ -15007,36 +15007,36 @@ public Builder clearEnvironmentEvent() {
       /**
        * .events.Ev_EnvironmentEvent environmentEvent = 11;
        */
-      public alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder getEnvironmentEventBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.Builder getEnvironmentEventBuilder() {
         return internalGetEnvironmentEventFieldBuilder().getBuilder();
       }
       /**
        * .events.Ev_EnvironmentEvent environmentEvent = 11;
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder getEnvironmentEventOrBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_EnvironmentEventOrBuilder getEnvironmentEventOrBuilder() {
         if ((payloadCase_ == 11) && (environmentEventBuilder_ != null)) {
           return environmentEventBuilder_.getMessageOrBuilder();
         } else {
           if (payloadCase_ == 11) {
-            return (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_EnvironmentEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_EnvironmentEvent environmentEvent = 11;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_EnvironmentEvent, alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder, alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder> 
+          alice.dip.kafka.dto.Events.Ev_EnvironmentEvent, alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.Builder, alice.dip.kafka.dto.Events.Ev_EnvironmentEventOrBuilder> 
           internalGetEnvironmentEventFieldBuilder() {
         if (environmentEventBuilder_ == null) {
           if (!(payloadCase_ == 11)) {
-            payload_ = alice.dip.kafka.events.Events.Ev_EnvironmentEvent.getDefaultInstance();
+            payload_ = alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.getDefaultInstance();
           }
           environmentEventBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Ev_EnvironmentEvent, alice.dip.kafka.events.Events.Ev_EnvironmentEvent.Builder, alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder>(
-                  (alice.dip.kafka.events.Events.Ev_EnvironmentEvent) payload_,
+              alice.dip.kafka.dto.Events.Ev_EnvironmentEvent, alice.dip.kafka.dto.Events.Ev_EnvironmentEvent.Builder, alice.dip.kafka.dto.Events.Ev_EnvironmentEventOrBuilder>(
+                  (alice.dip.kafka.dto.Events.Ev_EnvironmentEvent) payload_,
                   getParentForChildren(),
                   isClean());
           payload_ = null;
@@ -15047,7 +15047,7 @@ public alice.dip.kafka.events.Events.Ev_EnvironmentEventOrBuilder getEnvironment
       }
 
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_TaskEvent, alice.dip.kafka.events.Events.Ev_TaskEvent.Builder, alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder> taskEventBuilder_;
+          alice.dip.kafka.dto.Events.Ev_TaskEvent, alice.dip.kafka.dto.Events.Ev_TaskEvent.Builder, alice.dip.kafka.dto.Events.Ev_TaskEventOrBuilder> taskEventBuilder_;
       /**
        * .events.Ev_TaskEvent taskEvent = 12;
        * @return Whether the taskEvent field is set.
@@ -15061,23 +15061,23 @@ public boolean hasTaskEvent() {
        * @return The taskEvent.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_TaskEvent getTaskEvent() {
+      public alice.dip.kafka.dto.Events.Ev_TaskEvent getTaskEvent() {
         if (taskEventBuilder_ == null) {
           if (payloadCase_ == 12) {
-            return (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_TaskEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_TaskEvent.getDefaultInstance();
         } else {
           if (payloadCase_ == 12) {
             return taskEventBuilder_.getMessage();
           }
-          return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_TaskEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_TaskEvent taskEvent = 12;
        */
-      public Builder setTaskEvent(alice.dip.kafka.events.Events.Ev_TaskEvent value) {
+      public Builder setTaskEvent(alice.dip.kafka.dto.Events.Ev_TaskEvent value) {
         if (taskEventBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -15094,7 +15094,7 @@ public Builder setTaskEvent(alice.dip.kafka.events.Events.Ev_TaskEvent value) {
        * .events.Ev_TaskEvent taskEvent = 12;
        */
       public Builder setTaskEvent(
-          alice.dip.kafka.events.Events.Ev_TaskEvent.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Ev_TaskEvent.Builder builderForValue) {
         if (taskEventBuilder_ == null) {
           payload_ = builderForValue.build();
           onChanged();
@@ -15107,11 +15107,11 @@ public Builder setTaskEvent(
       /**
        * .events.Ev_TaskEvent taskEvent = 12;
        */
-      public Builder mergeTaskEvent(alice.dip.kafka.events.Events.Ev_TaskEvent value) {
+      public Builder mergeTaskEvent(alice.dip.kafka.dto.Events.Ev_TaskEvent value) {
         if (taskEventBuilder_ == null) {
           if (payloadCase_ == 12 &&
-              payload_ != alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance()) {
-            payload_ = alice.dip.kafka.events.Events.Ev_TaskEvent.newBuilder((alice.dip.kafka.events.Events.Ev_TaskEvent) payload_)
+              payload_ != alice.dip.kafka.dto.Events.Ev_TaskEvent.getDefaultInstance()) {
+            payload_ = alice.dip.kafka.dto.Events.Ev_TaskEvent.newBuilder((alice.dip.kafka.dto.Events.Ev_TaskEvent) payload_)
                 .mergeFrom(value).buildPartial();
           } else {
             payload_ = value;
@@ -15149,36 +15149,36 @@ public Builder clearTaskEvent() {
       /**
        * .events.Ev_TaskEvent taskEvent = 12;
        */
-      public alice.dip.kafka.events.Events.Ev_TaskEvent.Builder getTaskEventBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_TaskEvent.Builder getTaskEventBuilder() {
         return internalGetTaskEventFieldBuilder().getBuilder();
       }
       /**
        * .events.Ev_TaskEvent taskEvent = 12;
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder getTaskEventOrBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_TaskEventOrBuilder getTaskEventOrBuilder() {
         if ((payloadCase_ == 12) && (taskEventBuilder_ != null)) {
           return taskEventBuilder_.getMessageOrBuilder();
         } else {
           if (payloadCase_ == 12) {
-            return (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_TaskEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_TaskEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_TaskEvent taskEvent = 12;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_TaskEvent, alice.dip.kafka.events.Events.Ev_TaskEvent.Builder, alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder> 
+          alice.dip.kafka.dto.Events.Ev_TaskEvent, alice.dip.kafka.dto.Events.Ev_TaskEvent.Builder, alice.dip.kafka.dto.Events.Ev_TaskEventOrBuilder> 
           internalGetTaskEventFieldBuilder() {
         if (taskEventBuilder_ == null) {
           if (!(payloadCase_ == 12)) {
-            payload_ = alice.dip.kafka.events.Events.Ev_TaskEvent.getDefaultInstance();
+            payload_ = alice.dip.kafka.dto.Events.Ev_TaskEvent.getDefaultInstance();
           }
           taskEventBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Ev_TaskEvent, alice.dip.kafka.events.Events.Ev_TaskEvent.Builder, alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder>(
-                  (alice.dip.kafka.events.Events.Ev_TaskEvent) payload_,
+              alice.dip.kafka.dto.Events.Ev_TaskEvent, alice.dip.kafka.dto.Events.Ev_TaskEvent.Builder, alice.dip.kafka.dto.Events.Ev_TaskEventOrBuilder>(
+                  (alice.dip.kafka.dto.Events.Ev_TaskEvent) payload_,
                   getParentForChildren(),
                   isClean());
           payload_ = null;
@@ -15189,7 +15189,7 @@ public alice.dip.kafka.events.Events.Ev_TaskEventOrBuilder getTaskEventOrBuilder
       }
 
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_RoleEvent, alice.dip.kafka.events.Events.Ev_RoleEvent.Builder, alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder> roleEventBuilder_;
+          alice.dip.kafka.dto.Events.Ev_RoleEvent, alice.dip.kafka.dto.Events.Ev_RoleEvent.Builder, alice.dip.kafka.dto.Events.Ev_RoleEventOrBuilder> roleEventBuilder_;
       /**
        * .events.Ev_RoleEvent roleEvent = 13;
        * @return Whether the roleEvent field is set.
@@ -15203,23 +15203,23 @@ public boolean hasRoleEvent() {
        * @return The roleEvent.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_RoleEvent getRoleEvent() {
+      public alice.dip.kafka.dto.Events.Ev_RoleEvent getRoleEvent() {
         if (roleEventBuilder_ == null) {
           if (payloadCase_ == 13) {
-            return (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_RoleEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_RoleEvent.getDefaultInstance();
         } else {
           if (payloadCase_ == 13) {
             return roleEventBuilder_.getMessage();
           }
-          return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_RoleEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_RoleEvent roleEvent = 13;
        */
-      public Builder setRoleEvent(alice.dip.kafka.events.Events.Ev_RoleEvent value) {
+      public Builder setRoleEvent(alice.dip.kafka.dto.Events.Ev_RoleEvent value) {
         if (roleEventBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -15236,7 +15236,7 @@ public Builder setRoleEvent(alice.dip.kafka.events.Events.Ev_RoleEvent value) {
        * .events.Ev_RoleEvent roleEvent = 13;
        */
       public Builder setRoleEvent(
-          alice.dip.kafka.events.Events.Ev_RoleEvent.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Ev_RoleEvent.Builder builderForValue) {
         if (roleEventBuilder_ == null) {
           payload_ = builderForValue.build();
           onChanged();
@@ -15249,11 +15249,11 @@ public Builder setRoleEvent(
       /**
        * .events.Ev_RoleEvent roleEvent = 13;
        */
-      public Builder mergeRoleEvent(alice.dip.kafka.events.Events.Ev_RoleEvent value) {
+      public Builder mergeRoleEvent(alice.dip.kafka.dto.Events.Ev_RoleEvent value) {
         if (roleEventBuilder_ == null) {
           if (payloadCase_ == 13 &&
-              payload_ != alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance()) {
-            payload_ = alice.dip.kafka.events.Events.Ev_RoleEvent.newBuilder((alice.dip.kafka.events.Events.Ev_RoleEvent) payload_)
+              payload_ != alice.dip.kafka.dto.Events.Ev_RoleEvent.getDefaultInstance()) {
+            payload_ = alice.dip.kafka.dto.Events.Ev_RoleEvent.newBuilder((alice.dip.kafka.dto.Events.Ev_RoleEvent) payload_)
                 .mergeFrom(value).buildPartial();
           } else {
             payload_ = value;
@@ -15291,36 +15291,36 @@ public Builder clearRoleEvent() {
       /**
        * .events.Ev_RoleEvent roleEvent = 13;
        */
-      public alice.dip.kafka.events.Events.Ev_RoleEvent.Builder getRoleEventBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_RoleEvent.Builder getRoleEventBuilder() {
         return internalGetRoleEventFieldBuilder().getBuilder();
       }
       /**
        * .events.Ev_RoleEvent roleEvent = 13;
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder getRoleEventOrBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_RoleEventOrBuilder getRoleEventOrBuilder() {
         if ((payloadCase_ == 13) && (roleEventBuilder_ != null)) {
           return roleEventBuilder_.getMessageOrBuilder();
         } else {
           if (payloadCase_ == 13) {
-            return (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_RoleEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_RoleEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_RoleEvent roleEvent = 13;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_RoleEvent, alice.dip.kafka.events.Events.Ev_RoleEvent.Builder, alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder> 
+          alice.dip.kafka.dto.Events.Ev_RoleEvent, alice.dip.kafka.dto.Events.Ev_RoleEvent.Builder, alice.dip.kafka.dto.Events.Ev_RoleEventOrBuilder> 
           internalGetRoleEventFieldBuilder() {
         if (roleEventBuilder_ == null) {
           if (!(payloadCase_ == 13)) {
-            payload_ = alice.dip.kafka.events.Events.Ev_RoleEvent.getDefaultInstance();
+            payload_ = alice.dip.kafka.dto.Events.Ev_RoleEvent.getDefaultInstance();
           }
           roleEventBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Ev_RoleEvent, alice.dip.kafka.events.Events.Ev_RoleEvent.Builder, alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder>(
-                  (alice.dip.kafka.events.Events.Ev_RoleEvent) payload_,
+              alice.dip.kafka.dto.Events.Ev_RoleEvent, alice.dip.kafka.dto.Events.Ev_RoleEvent.Builder, alice.dip.kafka.dto.Events.Ev_RoleEventOrBuilder>(
+                  (alice.dip.kafka.dto.Events.Ev_RoleEvent) payload_,
                   getParentForChildren(),
                   isClean());
           payload_ = null;
@@ -15331,7 +15331,7 @@ public alice.dip.kafka.events.Events.Ev_RoleEventOrBuilder getRoleEventOrBuilder
       }
 
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_CallEvent, alice.dip.kafka.events.Events.Ev_CallEvent.Builder, alice.dip.kafka.events.Events.Ev_CallEventOrBuilder> callEventBuilder_;
+          alice.dip.kafka.dto.Events.Ev_CallEvent, alice.dip.kafka.dto.Events.Ev_CallEvent.Builder, alice.dip.kafka.dto.Events.Ev_CallEventOrBuilder> callEventBuilder_;
       /**
        * .events.Ev_CallEvent callEvent = 14;
        * @return Whether the callEvent field is set.
@@ -15345,23 +15345,23 @@ public boolean hasCallEvent() {
        * @return The callEvent.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_CallEvent getCallEvent() {
+      public alice.dip.kafka.dto.Events.Ev_CallEvent getCallEvent() {
         if (callEventBuilder_ == null) {
           if (payloadCase_ == 14) {
-            return (alice.dip.kafka.events.Events.Ev_CallEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_CallEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_CallEvent.getDefaultInstance();
         } else {
           if (payloadCase_ == 14) {
             return callEventBuilder_.getMessage();
           }
-          return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_CallEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_CallEvent callEvent = 14;
        */
-      public Builder setCallEvent(alice.dip.kafka.events.Events.Ev_CallEvent value) {
+      public Builder setCallEvent(alice.dip.kafka.dto.Events.Ev_CallEvent value) {
         if (callEventBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -15378,7 +15378,7 @@ public Builder setCallEvent(alice.dip.kafka.events.Events.Ev_CallEvent value) {
        * .events.Ev_CallEvent callEvent = 14;
        */
       public Builder setCallEvent(
-          alice.dip.kafka.events.Events.Ev_CallEvent.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Ev_CallEvent.Builder builderForValue) {
         if (callEventBuilder_ == null) {
           payload_ = builderForValue.build();
           onChanged();
@@ -15391,11 +15391,11 @@ public Builder setCallEvent(
       /**
        * .events.Ev_CallEvent callEvent = 14;
        */
-      public Builder mergeCallEvent(alice.dip.kafka.events.Events.Ev_CallEvent value) {
+      public Builder mergeCallEvent(alice.dip.kafka.dto.Events.Ev_CallEvent value) {
         if (callEventBuilder_ == null) {
           if (payloadCase_ == 14 &&
-              payload_ != alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance()) {
-            payload_ = alice.dip.kafka.events.Events.Ev_CallEvent.newBuilder((alice.dip.kafka.events.Events.Ev_CallEvent) payload_)
+              payload_ != alice.dip.kafka.dto.Events.Ev_CallEvent.getDefaultInstance()) {
+            payload_ = alice.dip.kafka.dto.Events.Ev_CallEvent.newBuilder((alice.dip.kafka.dto.Events.Ev_CallEvent) payload_)
                 .mergeFrom(value).buildPartial();
           } else {
             payload_ = value;
@@ -15433,36 +15433,36 @@ public Builder clearCallEvent() {
       /**
        * .events.Ev_CallEvent callEvent = 14;
        */
-      public alice.dip.kafka.events.Events.Ev_CallEvent.Builder getCallEventBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_CallEvent.Builder getCallEventBuilder() {
         return internalGetCallEventFieldBuilder().getBuilder();
       }
       /**
        * .events.Ev_CallEvent callEvent = 14;
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_CallEventOrBuilder getCallEventOrBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_CallEventOrBuilder getCallEventOrBuilder() {
         if ((payloadCase_ == 14) && (callEventBuilder_ != null)) {
           return callEventBuilder_.getMessageOrBuilder();
         } else {
           if (payloadCase_ == 14) {
-            return (alice.dip.kafka.events.Events.Ev_CallEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_CallEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_CallEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_CallEvent callEvent = 14;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_CallEvent, alice.dip.kafka.events.Events.Ev_CallEvent.Builder, alice.dip.kafka.events.Events.Ev_CallEventOrBuilder> 
+          alice.dip.kafka.dto.Events.Ev_CallEvent, alice.dip.kafka.dto.Events.Ev_CallEvent.Builder, alice.dip.kafka.dto.Events.Ev_CallEventOrBuilder> 
           internalGetCallEventFieldBuilder() {
         if (callEventBuilder_ == null) {
           if (!(payloadCase_ == 14)) {
-            payload_ = alice.dip.kafka.events.Events.Ev_CallEvent.getDefaultInstance();
+            payload_ = alice.dip.kafka.dto.Events.Ev_CallEvent.getDefaultInstance();
           }
           callEventBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Ev_CallEvent, alice.dip.kafka.events.Events.Ev_CallEvent.Builder, alice.dip.kafka.events.Events.Ev_CallEventOrBuilder>(
-                  (alice.dip.kafka.events.Events.Ev_CallEvent) payload_,
+              alice.dip.kafka.dto.Events.Ev_CallEvent, alice.dip.kafka.dto.Events.Ev_CallEvent.Builder, alice.dip.kafka.dto.Events.Ev_CallEventOrBuilder>(
+                  (alice.dip.kafka.dto.Events.Ev_CallEvent) payload_,
                   getParentForChildren(),
                   isClean());
           payload_ = null;
@@ -15473,7 +15473,7 @@ public alice.dip.kafka.events.Events.Ev_CallEventOrBuilder getCallEventOrBuilder
       }
 
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent, alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder, alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder> integratedServiceEventBuilder_;
+          alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent, alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.Builder, alice.dip.kafka.dto.Events.Ev_IntegratedServiceEventOrBuilder> integratedServiceEventBuilder_;
       /**
        * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15;
        * @return Whether the integratedServiceEvent field is set.
@@ -15487,23 +15487,23 @@ public boolean hasIntegratedServiceEvent() {
        * @return The integratedServiceEvent.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent getIntegratedServiceEvent() {
+      public alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent getIntegratedServiceEvent() {
         if (integratedServiceEventBuilder_ == null) {
           if (payloadCase_ == 15) {
-            return (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
         } else {
           if (payloadCase_ == 15) {
             return integratedServiceEventBuilder_.getMessage();
           }
-          return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15;
        */
-      public Builder setIntegratedServiceEvent(alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent value) {
+      public Builder setIntegratedServiceEvent(alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent value) {
         if (integratedServiceEventBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -15520,7 +15520,7 @@ public Builder setIntegratedServiceEvent(alice.dip.kafka.events.Events.Ev_Integr
        * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15;
        */
       public Builder setIntegratedServiceEvent(
-          alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.Builder builderForValue) {
         if (integratedServiceEventBuilder_ == null) {
           payload_ = builderForValue.build();
           onChanged();
@@ -15533,11 +15533,11 @@ public Builder setIntegratedServiceEvent(
       /**
        * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15;
        */
-      public Builder mergeIntegratedServiceEvent(alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent value) {
+      public Builder mergeIntegratedServiceEvent(alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent value) {
         if (integratedServiceEventBuilder_ == null) {
           if (payloadCase_ == 15 &&
-              payload_ != alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance()) {
-            payload_ = alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.newBuilder((alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_)
+              payload_ != alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.getDefaultInstance()) {
+            payload_ = alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.newBuilder((alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent) payload_)
                 .mergeFrom(value).buildPartial();
           } else {
             payload_ = value;
@@ -15575,36 +15575,36 @@ public Builder clearIntegratedServiceEvent() {
       /**
        * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15;
        */
-      public alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder getIntegratedServiceEventBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.Builder getIntegratedServiceEventBuilder() {
         return internalGetIntegratedServiceEventFieldBuilder().getBuilder();
       }
       /**
        * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15;
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder getIntegratedServiceEventOrBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_IntegratedServiceEventOrBuilder getIntegratedServiceEventOrBuilder() {
         if ((payloadCase_ == 15) && (integratedServiceEventBuilder_ != null)) {
           return integratedServiceEventBuilder_.getMessageOrBuilder();
         } else {
           if (payloadCase_ == 15) {
-            return (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_IntegratedServiceEvent integratedServiceEvent = 15;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent, alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder, alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder> 
+          alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent, alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.Builder, alice.dip.kafka.dto.Events.Ev_IntegratedServiceEventOrBuilder> 
           internalGetIntegratedServiceEventFieldBuilder() {
         if (integratedServiceEventBuilder_ == null) {
           if (!(payloadCase_ == 15)) {
-            payload_ = alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
+            payload_ = alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.getDefaultInstance();
           }
           integratedServiceEventBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent, alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent.Builder, alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder>(
-                  (alice.dip.kafka.events.Events.Ev_IntegratedServiceEvent) payload_,
+              alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent, alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent.Builder, alice.dip.kafka.dto.Events.Ev_IntegratedServiceEventOrBuilder>(
+                  (alice.dip.kafka.dto.Events.Ev_IntegratedServiceEvent) payload_,
                   getParentForChildren(),
                   isClean());
           payload_ = null;
@@ -15615,7 +15615,7 @@ public alice.dip.kafka.events.Events.Ev_IntegratedServiceEventOrBuilder getInteg
       }
 
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_RunEvent, alice.dip.kafka.events.Events.Ev_RunEvent.Builder, alice.dip.kafka.events.Events.Ev_RunEventOrBuilder> runEventBuilder_;
+          alice.dip.kafka.dto.Events.Ev_RunEvent, alice.dip.kafka.dto.Events.Ev_RunEvent.Builder, alice.dip.kafka.dto.Events.Ev_RunEventOrBuilder> runEventBuilder_;
       /**
        * .events.Ev_RunEvent runEvent = 16;
        * @return Whether the runEvent field is set.
@@ -15629,23 +15629,23 @@ public boolean hasRunEvent() {
        * @return The runEvent.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_RunEvent getRunEvent() {
+      public alice.dip.kafka.dto.Events.Ev_RunEvent getRunEvent() {
         if (runEventBuilder_ == null) {
           if (payloadCase_ == 16) {
-            return (alice.dip.kafka.events.Events.Ev_RunEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_RunEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_RunEvent.getDefaultInstance();
         } else {
           if (payloadCase_ == 16) {
             return runEventBuilder_.getMessage();
           }
-          return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_RunEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_RunEvent runEvent = 16;
        */
-      public Builder setRunEvent(alice.dip.kafka.events.Events.Ev_RunEvent value) {
+      public Builder setRunEvent(alice.dip.kafka.dto.Events.Ev_RunEvent value) {
         if (runEventBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -15662,7 +15662,7 @@ public Builder setRunEvent(alice.dip.kafka.events.Events.Ev_RunEvent value) {
        * .events.Ev_RunEvent runEvent = 16;
        */
       public Builder setRunEvent(
-          alice.dip.kafka.events.Events.Ev_RunEvent.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Ev_RunEvent.Builder builderForValue) {
         if (runEventBuilder_ == null) {
           payload_ = builderForValue.build();
           onChanged();
@@ -15675,11 +15675,11 @@ public Builder setRunEvent(
       /**
        * .events.Ev_RunEvent runEvent = 16;
        */
-      public Builder mergeRunEvent(alice.dip.kafka.events.Events.Ev_RunEvent value) {
+      public Builder mergeRunEvent(alice.dip.kafka.dto.Events.Ev_RunEvent value) {
         if (runEventBuilder_ == null) {
           if (payloadCase_ == 16 &&
-              payload_ != alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance()) {
-            payload_ = alice.dip.kafka.events.Events.Ev_RunEvent.newBuilder((alice.dip.kafka.events.Events.Ev_RunEvent) payload_)
+              payload_ != alice.dip.kafka.dto.Events.Ev_RunEvent.getDefaultInstance()) {
+            payload_ = alice.dip.kafka.dto.Events.Ev_RunEvent.newBuilder((alice.dip.kafka.dto.Events.Ev_RunEvent) payload_)
                 .mergeFrom(value).buildPartial();
           } else {
             payload_ = value;
@@ -15717,36 +15717,36 @@ public Builder clearRunEvent() {
       /**
        * .events.Ev_RunEvent runEvent = 16;
        */
-      public alice.dip.kafka.events.Events.Ev_RunEvent.Builder getRunEventBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_RunEvent.Builder getRunEventBuilder() {
         return internalGetRunEventFieldBuilder().getBuilder();
       }
       /**
        * .events.Ev_RunEvent runEvent = 16;
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_RunEventOrBuilder getRunEventOrBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_RunEventOrBuilder getRunEventOrBuilder() {
         if ((payloadCase_ == 16) && (runEventBuilder_ != null)) {
           return runEventBuilder_.getMessageOrBuilder();
         } else {
           if (payloadCase_ == 16) {
-            return (alice.dip.kafka.events.Events.Ev_RunEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_RunEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_RunEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_RunEvent runEvent = 16;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_RunEvent, alice.dip.kafka.events.Events.Ev_RunEvent.Builder, alice.dip.kafka.events.Events.Ev_RunEventOrBuilder> 
+          alice.dip.kafka.dto.Events.Ev_RunEvent, alice.dip.kafka.dto.Events.Ev_RunEvent.Builder, alice.dip.kafka.dto.Events.Ev_RunEventOrBuilder> 
           internalGetRunEventFieldBuilder() {
         if (runEventBuilder_ == null) {
           if (!(payloadCase_ == 16)) {
-            payload_ = alice.dip.kafka.events.Events.Ev_RunEvent.getDefaultInstance();
+            payload_ = alice.dip.kafka.dto.Events.Ev_RunEvent.getDefaultInstance();
           }
           runEventBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Ev_RunEvent, alice.dip.kafka.events.Events.Ev_RunEvent.Builder, alice.dip.kafka.events.Events.Ev_RunEventOrBuilder>(
-                  (alice.dip.kafka.events.Events.Ev_RunEvent) payload_,
+              alice.dip.kafka.dto.Events.Ev_RunEvent, alice.dip.kafka.dto.Events.Ev_RunEvent.Builder, alice.dip.kafka.dto.Events.Ev_RunEventOrBuilder>(
+                  (alice.dip.kafka.dto.Events.Ev_RunEvent) payload_,
                   getParentForChildren(),
                   isClean());
           payload_ = null;
@@ -15757,7 +15757,7 @@ public alice.dip.kafka.events.Events.Ev_RunEventOrBuilder getRunEventOrBuilder()
       }
 
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder> frameworkEventBuilder_;
+          alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent, alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.Builder, alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEventOrBuilder> frameworkEventBuilder_;
       /**
        * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101;
        * @return Whether the frameworkEvent field is set.
@@ -15771,23 +15771,23 @@ public boolean hasFrameworkEvent() {
        * @return The frameworkEvent.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent getFrameworkEvent() {
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent getFrameworkEvent() {
         if (frameworkEventBuilder_ == null) {
           if (payloadCase_ == 101) {
-            return (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
         } else {
           if (payloadCase_ == 101) {
             return frameworkEventBuilder_.getMessage();
           }
-          return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101;
        */
-      public Builder setFrameworkEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent value) {
+      public Builder setFrameworkEvent(alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent value) {
         if (frameworkEventBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -15804,7 +15804,7 @@ public Builder setFrameworkEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_Fram
        * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101;
        */
       public Builder setFrameworkEvent(
-          alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.Builder builderForValue) {
         if (frameworkEventBuilder_ == null) {
           payload_ = builderForValue.build();
           onChanged();
@@ -15817,11 +15817,11 @@ public Builder setFrameworkEvent(
       /**
        * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101;
        */
-      public Builder mergeFrameworkEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent value) {
+      public Builder mergeFrameworkEvent(alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent value) {
         if (frameworkEventBuilder_ == null) {
           if (payloadCase_ == 101 &&
-              payload_ != alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance()) {
-            payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.newBuilder((alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_)
+              payload_ != alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance()) {
+            payload_ = alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.newBuilder((alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent) payload_)
                 .mergeFrom(value).buildPartial();
           } else {
             payload_ = value;
@@ -15859,36 +15859,36 @@ public Builder clearFrameworkEvent() {
       /**
        * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101;
        */
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder getFrameworkEventBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.Builder getFrameworkEventBuilder() {
         return internalGetFrameworkEventFieldBuilder().getBuilder();
       }
       /**
        * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101;
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder getFrameworkEventOrBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEventOrBuilder getFrameworkEventOrBuilder() {
         if ((payloadCase_ == 101) && (frameworkEventBuilder_ != null)) {
           return frameworkEventBuilder_.getMessageOrBuilder();
         } else {
           if (payloadCase_ == 101) {
-            return (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_MetaEvent_FrameworkEvent frameworkEvent = 101;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder> 
+          alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent, alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.Builder, alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEventOrBuilder> 
           internalGetFrameworkEventFieldBuilder() {
         if (frameworkEventBuilder_ == null) {
           if (!(payloadCase_ == 101)) {
-            payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
+            payload_ = alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.getDefaultInstance();
           }
           frameworkEventBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder>(
-                  (alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEvent) payload_,
+              alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent, alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent.Builder, alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEventOrBuilder>(
+                  (alice.dip.kafka.dto.Events.Ev_MetaEvent_FrameworkEvent) payload_,
                   getParentForChildren(),
                   isClean());
           payload_ = null;
@@ -15899,7 +15899,7 @@ public alice.dip.kafka.events.Events.Ev_MetaEvent_FrameworkEventOrBuilder getFra
       }
 
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder> mesosHeartbeatEventBuilder_;
+          alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat, alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.Builder, alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder> mesosHeartbeatEventBuilder_;
       /**
        * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102;
        * @return Whether the mesosHeartbeatEvent field is set.
@@ -15913,23 +15913,23 @@ public boolean hasMesosHeartbeatEvent() {
        * @return The mesosHeartbeatEvent.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat getMesosHeartbeatEvent() {
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat getMesosHeartbeatEvent() {
         if (mesosHeartbeatEventBuilder_ == null) {
           if (payloadCase_ == 102) {
-            return (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
         } else {
           if (payloadCase_ == 102) {
             return mesosHeartbeatEventBuilder_.getMessage();
           }
-          return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102;
        */
-      public Builder setMesosHeartbeatEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat value) {
+      public Builder setMesosHeartbeatEvent(alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat value) {
         if (mesosHeartbeatEventBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -15946,7 +15946,7 @@ public Builder setMesosHeartbeatEvent(alice.dip.kafka.events.Events.Ev_MetaEvent
        * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102;
        */
       public Builder setMesosHeartbeatEvent(
-          alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.Builder builderForValue) {
         if (mesosHeartbeatEventBuilder_ == null) {
           payload_ = builderForValue.build();
           onChanged();
@@ -15959,11 +15959,11 @@ public Builder setMesosHeartbeatEvent(
       /**
        * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102;
        */
-      public Builder mergeMesosHeartbeatEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat value) {
+      public Builder mergeMesosHeartbeatEvent(alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat value) {
         if (mesosHeartbeatEventBuilder_ == null) {
           if (payloadCase_ == 102 &&
-              payload_ != alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance()) {
-            payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.newBuilder((alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_)
+              payload_ != alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance()) {
+            payload_ = alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.newBuilder((alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat) payload_)
                 .mergeFrom(value).buildPartial();
           } else {
             payload_ = value;
@@ -16001,36 +16001,36 @@ public Builder clearMesosHeartbeatEvent() {
       /**
        * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102;
        */
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder getMesosHeartbeatEventBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.Builder getMesosHeartbeatEventBuilder() {
         return internalGetMesosHeartbeatEventFieldBuilder().getBuilder();
       }
       /**
        * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102;
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder getMesosHeartbeatEventOrBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder getMesosHeartbeatEventOrBuilder() {
         if ((payloadCase_ == 102) && (mesosHeartbeatEventBuilder_ != null)) {
           return mesosHeartbeatEventBuilder_.getMessageOrBuilder();
         } else {
           if (payloadCase_ == 102) {
-            return (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_MetaEvent_MesosHeartbeat mesosHeartbeatEvent = 102;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder> 
+          alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat, alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.Builder, alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder> 
           internalGetMesosHeartbeatEventFieldBuilder() {
         if (mesosHeartbeatEventBuilder_ == null) {
           if (!(payloadCase_ == 102)) {
-            payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
+            payload_ = alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.getDefaultInstance();
           }
           mesosHeartbeatEventBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder>(
-                  (alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeat) payload_,
+              alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat, alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat.Builder, alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder>(
+                  (alice.dip.kafka.dto.Events.Ev_MetaEvent_MesosHeartbeat) payload_,
                   getParentForChildren(),
                   isClean());
           payload_ = null;
@@ -16041,7 +16041,7 @@ public alice.dip.kafka.events.Events.Ev_MetaEvent_MesosHeartbeatOrBuilder getMes
       }
 
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder> coreStartEventBuilder_;
+          alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart, alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.Builder, alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStartOrBuilder> coreStartEventBuilder_;
       /**
        * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103;
        * @return Whether the coreStartEvent field is set.
@@ -16055,23 +16055,23 @@ public boolean hasCoreStartEvent() {
        * @return The coreStartEvent.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart getCoreStartEvent() {
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart getCoreStartEvent() {
         if (coreStartEventBuilder_ == null) {
           if (payloadCase_ == 103) {
-            return (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
         } else {
           if (payloadCase_ == 103) {
             return coreStartEventBuilder_.getMessage();
           }
-          return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103;
        */
-      public Builder setCoreStartEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart value) {
+      public Builder setCoreStartEvent(alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart value) {
         if (coreStartEventBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -16088,7 +16088,7 @@ public Builder setCoreStartEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_Core
        * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103;
        */
       public Builder setCoreStartEvent(
-          alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.Builder builderForValue) {
         if (coreStartEventBuilder_ == null) {
           payload_ = builderForValue.build();
           onChanged();
@@ -16101,11 +16101,11 @@ public Builder setCoreStartEvent(
       /**
        * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103;
        */
-      public Builder mergeCoreStartEvent(alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart value) {
+      public Builder mergeCoreStartEvent(alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart value) {
         if (coreStartEventBuilder_ == null) {
           if (payloadCase_ == 103 &&
-              payload_ != alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance()) {
-            payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.newBuilder((alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_)
+              payload_ != alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.getDefaultInstance()) {
+            payload_ = alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.newBuilder((alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart) payload_)
                 .mergeFrom(value).buildPartial();
           } else {
             payload_ = value;
@@ -16143,36 +16143,36 @@ public Builder clearCoreStartEvent() {
       /**
        * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103;
        */
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder getCoreStartEventBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.Builder getCoreStartEventBuilder() {
         return internalGetCoreStartEventFieldBuilder().getBuilder();
       }
       /**
        * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103;
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder getCoreStartEventOrBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStartOrBuilder getCoreStartEventOrBuilder() {
         if ((payloadCase_ == 103) && (coreStartEventBuilder_ != null)) {
           return coreStartEventBuilder_.getMessageOrBuilder();
         } else {
           if (payloadCase_ == 103) {
-            return (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_MetaEvent_CoreStart coreStartEvent = 103;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder> 
+          alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart, alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.Builder, alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStartOrBuilder> 
           internalGetCoreStartEventFieldBuilder() {
         if (coreStartEventBuilder_ == null) {
           if (!(payloadCase_ == 103)) {
-            payload_ = alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
+            payload_ = alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.getDefaultInstance();
           }
           coreStartEventBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart.Builder, alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder>(
-                  (alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStart) payload_,
+              alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart, alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart.Builder, alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStartOrBuilder>(
+                  (alice.dip.kafka.dto.Events.Ev_MetaEvent_CoreStart) payload_,
                   getParentForChildren(),
                   isClean());
           payload_ = null;
@@ -16183,7 +16183,7 @@ public alice.dip.kafka.events.Events.Ev_MetaEvent_CoreStartOrBuilder getCoreStar
       }
 
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_BeamModeEvent, alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder, alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder> beamModeEventBuilder_;
+          alice.dip.kafka.dto.Events.Ev_BeamModeEvent, alice.dip.kafka.dto.Events.Ev_BeamModeEvent.Builder, alice.dip.kafka.dto.Events.Ev_BeamModeEventOrBuilder> beamModeEventBuilder_;
       /**
        * .events.Ev_BeamModeEvent beamModeEvent = 110;
        * @return Whether the beamModeEvent field is set.
@@ -16197,23 +16197,23 @@ public boolean hasBeamModeEvent() {
        * @return The beamModeEvent.
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_BeamModeEvent getBeamModeEvent() {
+      public alice.dip.kafka.dto.Events.Ev_BeamModeEvent getBeamModeEvent() {
         if (beamModeEventBuilder_ == null) {
           if (payloadCase_ == 110) {
-            return (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_BeamModeEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_BeamModeEvent.getDefaultInstance();
         } else {
           if (payloadCase_ == 110) {
             return beamModeEventBuilder_.getMessage();
           }
-          return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_BeamModeEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_BeamModeEvent beamModeEvent = 110;
        */
-      public Builder setBeamModeEvent(alice.dip.kafka.events.Events.Ev_BeamModeEvent value) {
+      public Builder setBeamModeEvent(alice.dip.kafka.dto.Events.Ev_BeamModeEvent value) {
         if (beamModeEventBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -16230,7 +16230,7 @@ public Builder setBeamModeEvent(alice.dip.kafka.events.Events.Ev_BeamModeEvent v
        * .events.Ev_BeamModeEvent beamModeEvent = 110;
        */
       public Builder setBeamModeEvent(
-          alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder builderForValue) {
+          alice.dip.kafka.dto.Events.Ev_BeamModeEvent.Builder builderForValue) {
         if (beamModeEventBuilder_ == null) {
           payload_ = builderForValue.build();
           onChanged();
@@ -16243,11 +16243,11 @@ public Builder setBeamModeEvent(
       /**
        * .events.Ev_BeamModeEvent beamModeEvent = 110;
        */
-      public Builder mergeBeamModeEvent(alice.dip.kafka.events.Events.Ev_BeamModeEvent value) {
+      public Builder mergeBeamModeEvent(alice.dip.kafka.dto.Events.Ev_BeamModeEvent value) {
         if (beamModeEventBuilder_ == null) {
           if (payloadCase_ == 110 &&
-              payload_ != alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance()) {
-            payload_ = alice.dip.kafka.events.Events.Ev_BeamModeEvent.newBuilder((alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_)
+              payload_ != alice.dip.kafka.dto.Events.Ev_BeamModeEvent.getDefaultInstance()) {
+            payload_ = alice.dip.kafka.dto.Events.Ev_BeamModeEvent.newBuilder((alice.dip.kafka.dto.Events.Ev_BeamModeEvent) payload_)
                 .mergeFrom(value).buildPartial();
           } else {
             payload_ = value;
@@ -16285,36 +16285,36 @@ public Builder clearBeamModeEvent() {
       /**
        * .events.Ev_BeamModeEvent beamModeEvent = 110;
        */
-      public alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder getBeamModeEventBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_BeamModeEvent.Builder getBeamModeEventBuilder() {
         return internalGetBeamModeEventFieldBuilder().getBuilder();
       }
       /**
        * .events.Ev_BeamModeEvent beamModeEvent = 110;
        */
       @java.lang.Override
-      public alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder getBeamModeEventOrBuilder() {
+      public alice.dip.kafka.dto.Events.Ev_BeamModeEventOrBuilder getBeamModeEventOrBuilder() {
         if ((payloadCase_ == 110) && (beamModeEventBuilder_ != null)) {
           return beamModeEventBuilder_.getMessageOrBuilder();
         } else {
           if (payloadCase_ == 110) {
-            return (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_;
+            return (alice.dip.kafka.dto.Events.Ev_BeamModeEvent) payload_;
           }
-          return alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance();
+          return alice.dip.kafka.dto.Events.Ev_BeamModeEvent.getDefaultInstance();
         }
       }
       /**
        * .events.Ev_BeamModeEvent beamModeEvent = 110;
        */
       private com.google.protobuf.SingleFieldBuilder<
-          alice.dip.kafka.events.Events.Ev_BeamModeEvent, alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder, alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder> 
+          alice.dip.kafka.dto.Events.Ev_BeamModeEvent, alice.dip.kafka.dto.Events.Ev_BeamModeEvent.Builder, alice.dip.kafka.dto.Events.Ev_BeamModeEventOrBuilder> 
           internalGetBeamModeEventFieldBuilder() {
         if (beamModeEventBuilder_ == null) {
           if (!(payloadCase_ == 110)) {
-            payload_ = alice.dip.kafka.events.Events.Ev_BeamModeEvent.getDefaultInstance();
+            payload_ = alice.dip.kafka.dto.Events.Ev_BeamModeEvent.getDefaultInstance();
           }
           beamModeEventBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              alice.dip.kafka.events.Events.Ev_BeamModeEvent, alice.dip.kafka.events.Events.Ev_BeamModeEvent.Builder, alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder>(
-                  (alice.dip.kafka.events.Events.Ev_BeamModeEvent) payload_,
+              alice.dip.kafka.dto.Events.Ev_BeamModeEvent, alice.dip.kafka.dto.Events.Ev_BeamModeEvent.Builder, alice.dip.kafka.dto.Events.Ev_BeamModeEventOrBuilder>(
+                  (alice.dip.kafka.dto.Events.Ev_BeamModeEvent) payload_,
                   getParentForChildren(),
                   isClean());
           payload_ = null;
@@ -16328,12 +16328,12 @@ public alice.dip.kafka.events.Events.Ev_BeamModeEventOrBuilder getBeamModeEventO
     }
 
     // @@protoc_insertion_point(class_scope:events.Event)
-    private static final alice.dip.kafka.events.Events.Event DEFAULT_INSTANCE;
+    private static final alice.dip.kafka.dto.Events.Event DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new alice.dip.kafka.events.Events.Event();
+      DEFAULT_INSTANCE = new alice.dip.kafka.dto.Events.Event();
     }
 
-    public static alice.dip.kafka.events.Events.Event getDefaultInstance() {
+    public static alice.dip.kafka.dto.Events.Event getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
@@ -16369,7 +16369,7 @@ public com.google.protobuf.Parser getParserForType() {
     }
 
     @java.lang.Override
-    public alice.dip.kafka.events.Events.Event getDefaultInstanceForType() {
+    public alice.dip.kafka.dto.Events.Event getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -16515,7 +16515,7 @@ public alice.dip.kafka.events.Events.Event getDefaultInstanceForType() {
     descriptor = com.google.protobuf.Descriptors.FileDescriptor
       .internalBuildGeneratedFileFrom(descriptorData,
         new com.google.protobuf.Descriptors.FileDescriptor[] {
-          alice.dip.kafka.events.Common.getDescriptor(),
+          alice.dip.kafka.dto.Common.getDescriptor(),
         });
     internal_static_events_Ev_MetaEvent_MesosHeartbeat_descriptor =
       getDescriptor().getMessageTypes().get(0);
@@ -16596,7 +16596,7 @@ public alice.dip.kafka.events.Events.Event getDefaultInstanceForType() {
         internal_static_events_Event_descriptor,
         new java.lang.String[] { "Timestamp", "TimestampNano", "EnvironmentEvent", "TaskEvent", "RoleEvent", "CallEvent", "IntegratedServiceEvent", "RunEvent", "FrameworkEvent", "MesosHeartbeatEvent", "CoreStartEvent", "BeamModeEvent", "Payload", });
     descriptor.resolveAllFeaturesImmutable();
-    alice.dip.kafka.events.Common.getDescriptor();
+    alice.dip.kafka.dto.Common.getDescriptor();
   }
 
   // @@protoc_insertion_point(outer_class_scope)

From 78ca1e066eef8fe92b30dff3b7c2079236302b56 Mon Sep 17 00:00:00 2001
From: George Raduta 
Date: Tue, 7 Oct 2025 18:41:22 +0200
Subject: [PATCH 11/16] Use final for kafka topic

---
 src/alice/dip/kafka/BeamModeEventsKafkaProducer.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java b/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java
index 8f3576f..2181669 100644
--- a/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java
+++ b/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java
@@ -25,7 +25,7 @@
  * Kafka producer for LHC Beam Mode events, serialized using Protocol Buffers.
  */
 public class BeamModeEventsKafkaProducer extends KafkaProducerInterface {
-	public static String KAFKA_PRODUCER_TOPIC_DIP = "dip.lhc.beam_mode";
+	public static final String KAFKA_PRODUCER_TOPIC_DIP = "dip.lhc.beam_mode";
 
     /**
      * Constructor to create a BeamModeEventsKafkaProducer

From 4f954738b04e2c623a87f4f7f6c7d87de51dface Mon Sep 17 00:00:00 2001
From: George Raduta 
Date: Tue, 7 Oct 2025 18:42:09 +0200
Subject: [PATCH 12/16] Fix spelling mistakes

---
 src/alice/dip/kafka/dto/Events.java | 36 ++++++++++++++---------------
 src/alice/dip/protos/events.proto   |  4 ++--
 2 files changed, 20 insertions(+), 20 deletions(-)

diff --git a/src/alice/dip/kafka/dto/Events.java b/src/alice/dip/kafka/dto/Events.java
index ea31d8b..0286522 100644
--- a/src/alice/dip/kafka/dto/Events.java
+++ b/src/alice/dip/kafka/dto/Events.java
@@ -4913,7 +4913,7 @@ public interface Ev_TaskEventOrBuilder extends
 
     /**
      * 
-     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+     * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
      * 
* * string status = 4; @@ -4922,7 +4922,7 @@ public interface Ev_TaskEventOrBuilder extends java.lang.String getStatus(); /** *
-     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+     * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
      * 
* * string status = 4; @@ -5202,7 +5202,7 @@ public java.lang.String getState() { private volatile java.lang.Object status_ = ""; /** *
-     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+     * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
      * 
* * string status = 4; @@ -5223,7 +5223,7 @@ public java.lang.String getStatus() { } /** *
-     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+     * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
      * 
* * string status = 4; @@ -6228,7 +6228,7 @@ public Builder setStateBytes( private java.lang.Object status_ = ""; /** *
-       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+       * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
        * 
* * string status = 4; @@ -6248,7 +6248,7 @@ public java.lang.String getStatus() { } /** *
-       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+       * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
        * 
* * string status = 4; @@ -6269,7 +6269,7 @@ public java.lang.String getStatus() { } /** *
-       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+       * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
        * 
* * string status = 4; @@ -6286,7 +6286,7 @@ public Builder setStatus( } /** *
-       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+       * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
        * 
* * string status = 4; @@ -6300,7 +6300,7 @@ public Builder clearStatus() { } /** *
-       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
+       * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go.
        * 
* * string status = 4; @@ -8620,7 +8620,7 @@ public interface Ev_RoleEventOrBuilder extends /** *
-     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+     * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
      * 
* * string status = 2; @@ -8629,7 +8629,7 @@ public interface Ev_RoleEventOrBuilder extends java.lang.String getStatus(); /** *
-     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+     * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
      * 
* * string status = 2; @@ -8784,7 +8784,7 @@ public java.lang.String getName() { private volatile java.lang.Object status_ = ""; /** *
-     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+     * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
      * 
* * string status = 2; @@ -8805,7 +8805,7 @@ public java.lang.String getStatus() { } /** *
-     * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+     * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
      * 
* * string status = 2; @@ -9444,7 +9444,7 @@ public Builder setNameBytes( private java.lang.Object status_ = ""; /** *
-       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+       * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
        * 
* * string status = 2; @@ -9464,7 +9464,7 @@ public java.lang.String getStatus() { } /** *
-       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+       * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
        * 
* * string status = 2; @@ -9485,7 +9485,7 @@ public java.lang.String getStatus() { } /** *
-       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+       * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
        * 
* * string status = 2; @@ -9502,7 +9502,7 @@ public Builder setStatus( } /** *
-       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+       * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
        * 
* * string status = 2; @@ -9516,7 +9516,7 @@ public Builder clearStatus() { } /** *
-       * posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
+       * possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles
        * 
* * string status = 2; diff --git a/src/alice/dip/protos/events.proto b/src/alice/dip/protos/events.proto index a40ecb6..8928b98 100644 --- a/src/alice/dip/protos/events.proto +++ b/src/alice/dip/protos/events.proto @@ -78,7 +78,7 @@ message Ev_TaskEvent { string name = 1; // task name, based on the name of the task class string taskid = 2; // task id, unique string state = 3; // state machine state for this task - string status = 4; // posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. + string status = 4; // possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. string hostname = 5; string className = 6; // name of the task class from which this task was spawned Traits traits = 7; @@ -99,7 +99,7 @@ message Ev_CallEvent { message Ev_RoleEvent { string name = 1; // role name - string status = 2; // posible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles + string status = 2; // possible values: ACTIVE/INACTIVE/PARTIAL/UNDEFINED/UNDEPLOYABLE as defined in status.go. Derived from the state of child tasks, calls or other roles string state = 3; // state machine state for this role string rolePath = 4; // path to this role within the environment string environmentId = 5; From d3827a8be3da823feabdf44515bea5d1167f66fb Mon Sep 17 00:00:00 2001 From: George Raduta Date: Wed, 8 Oct 2025 16:40:19 +0200 Subject: [PATCH 13/16] Improve logging for safe beam messages --- src/alice/dip/DipMessagesProcessor.java | 29 ++++++++++--------------- 1 file changed, 12 insertions(+), 17 deletions(-) diff --git a/src/alice/dip/DipMessagesProcessor.java b/src/alice/dip/DipMessagesProcessor.java index 69bf358..cc8a176 100644 --- a/src/alice/dip/DipMessagesProcessor.java +++ b/src/alice/dip/DipMessagesProcessor.java @@ -318,27 +318,22 @@ private void handleSafeBeamMessage(DipData dipData) throws BadParameter, TypeMis if (currentFill == null) return; String bm = currentFill.getBeamMode(); - - if (bm.contentEquals("STABLE BEAMS")) { - AliDip2BK.log( - 0, - "ProcData.newSafeBeams", - " VAL=" + safeBeamPayload + " isB1=" + isBeam1 + " isB2=" + isBeam2 + " isSB=" + isStableBeams - ); - - if (!isBeam1 || !isBeam2) { + AliDip2BK.log( + 1, + "ProcData.newSafeBeams", + " VAL=" + safeBeamPayload + " isB1=" + isBeam1 + " isB2=" + isBeam2 + " isSB=" + isStableBeams + ); + if ((bm.contentEquals("STABLE BEAMS") && (!isBeam1 || !isBeam2))) { currentFill.setBeamMode(time, "LOST BEAMS"); - if (beamModeEventsKafkaProducer != null) { - beamModeEventsKafkaProducer.sendEvent(currentFill.fillNo, currentFill, time); + if (this.beamModeEventsKafkaProducer != null) { + this.beamModeEventsKafkaProducer.sendEvent(currentFill.fillNo, currentFill, time); } AliDip2BK.log(5, "ProcData.newSafeBeams", " CHANGE BEAM MODE TO LOST BEAMS !!! "); - } - - return; - } - - if (bm.contentEquals("LOST BEAMS") && isBeam1 && isBeam2) { + } else if (bm.contentEquals("LOST BEAMS") && isBeam1 && isBeam2) { currentFill.setBeamMode(time, "STABLE BEAMS"); + if (this.beamModeEventsKafkaProducer != null) { + this.beamModeEventsKafkaProducer.sendEvent(currentFill.fillNo, currentFill, time); + } AliDip2BK.log(5, "ProcData.newSafeBeams", " RECOVER FROM BEAM LOST TO STABLE BEAMS "); } } From 92bb33991b67afc0923b47ebab0fdf21a62d4f79 Mon Sep 17 00:00:00 2001 From: George Raduta Date: Wed, 8 Oct 2025 16:58:22 +0200 Subject: [PATCH 14/16] Add event for each new beam mode received --- src/alice/dip/DipMessagesProcessor.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/alice/dip/DipMessagesProcessor.java b/src/alice/dip/DipMessagesProcessor.java index cc8a176..a69c448 100644 --- a/src/alice/dip/DipMessagesProcessor.java +++ b/src/alice/dip/DipMessagesProcessor.java @@ -589,6 +589,9 @@ public void newBeamMode(long date, String BeamMode) { if (currentFill != null) { currentFill.setBeamMode(date, BeamMode); + if (this.beamModeEventsKafkaProducer != null) { + this.beamModeEventsKafkaProducer.sendEvent(currentFill.fillNo, currentFill, date); + } int mc = -1; for (int i = 0; i < AliDip2BK.endFillCases.length; i++) { From d9fcf08f58bea3e56ee08922631c94f6c674b3c2 Mon Sep 17 00:00:00 2001 From: George Raduta Date: Wed, 8 Oct 2025 17:00:47 +0200 Subject: [PATCH 15/16] Remove dead code --- src/alice/dip/DipMessagesProcessor.java | 38 ++++++------------------- 1 file changed, 8 insertions(+), 30 deletions(-) diff --git a/src/alice/dip/DipMessagesProcessor.java b/src/alice/dip/DipMessagesProcessor.java index a69c448..8a73e0f 100644 --- a/src/alice/dip/DipMessagesProcessor.java +++ b/src/alice/dip/DipMessagesProcessor.java @@ -588,40 +588,18 @@ public void newFillNo(long date, String strFno, String par1, String par2, String public void newBeamMode(long date, String BeamMode) { if (currentFill != null) { + AliDip2BK.log( + 2, + "ProcData.newBeamMode", + "New beam mode=" + BeamMode + " for FILL_NO=" + currentFill.fillNo + ); currentFill.setBeamMode(date, BeamMode); + bookkeepingClient.updateLhcFill(currentFill); + saveState(); + if (this.beamModeEventsKafkaProducer != null) { this.beamModeEventsKafkaProducer.sendEvent(currentFill.fillNo, currentFill, date); } - - int mc = -1; - for (int i = 0; i < AliDip2BK.endFillCases.length; i++) { - if (AliDip2BK.endFillCases[i].equalsIgnoreCase(BeamMode)) mc = i; - } - if (mc < 0) { - - AliDip2BK.log( - 2, - "ProcData.newBeamMode", - "New beam mode=" + BeamMode + " for FILL_NO=" + currentFill.fillNo - ); - bookkeepingClient.updateLhcFill(currentFill); - saveState(); - if (beamModeEventsKafkaProducer != null) { - beamModeEventsKafkaProducer.sendEvent(currentFill.fillNo, currentFill, date); - } - } else { - currentFill.endedTime = date; - bookkeepingClient.updateLhcFill(currentFill); - if (AliDip2BK.KEEP_FILLS_HISTORY_DIRECTORY != null) { - writeFillHistFile(currentFill); - } - AliDip2BK.log( - 3, - "ProcData.newBeamMode", - "CLOSE Fill_NO=" + currentFill.fillNo + " Based on new beam mode=" + BeamMode - ); - currentFill = null; - } } else { AliDip2BK.log(4, "ProcData.newBeamMode", " ERROR new beam mode=" + BeamMode + " NO FILL NO for it"); } From a8fa782c181415b651709f53a26f0660c6f50719 Mon Sep 17 00:00:00 2001 From: George Raduta Date: Wed, 8 Oct 2025 17:37:34 +0200 Subject: [PATCH 16/16] Use adapter for beam mode --- src/alice/dip/LhcInfoObj.java | 13 ----- .../dip/adapters/BeamModeProtoAdapter.java | 40 +++++++++++++++ src/alice/dip/enums/BeamModeEnum.java | 49 +++++++++++++++++++ .../kafka/BeamModeEventsKafkaProducer.java | 9 +++- 4 files changed, 96 insertions(+), 15 deletions(-) create mode 100644 src/alice/dip/adapters/BeamModeProtoAdapter.java create mode 100644 src/alice/dip/enums/BeamModeEnum.java diff --git a/src/alice/dip/LhcInfoObj.java b/src/alice/dip/LhcInfoObj.java index 1965600..b7bb8c7 100644 --- a/src/alice/dip/LhcInfoObj.java +++ b/src/alice/dip/LhcInfoObj.java @@ -317,19 +317,6 @@ public String getBeamMode() { return last.value; } - /** - * Get the beam mode as a key suitable for enum conversion usage in protobuf - * @return Beam mode string with spaces replaced by underscores, or "UNKNOWN" if beam mode is null - */ - public String getBeamModeAsKey() { - String bm = getBeamMode(); - if (bm == null) { - return "UNKNOWN"; - } - bm = bm.replace(" ", "_"); - return bm; - } - public String getStableBeamStartStr() { long t = getStableBeamStart(); diff --git a/src/alice/dip/adapters/BeamModeProtoAdapter.java b/src/alice/dip/adapters/BeamModeProtoAdapter.java new file mode 100644 index 0000000..9369bfd --- /dev/null +++ b/src/alice/dip/adapters/BeamModeProtoAdapter.java @@ -0,0 +1,40 @@ +/** + * @license + * Copyright CERN and copyright holders of ALICE O2. This software is + * distributed under the terms of the GNU General Public License v3 (GPL + * Version 3), copied verbatim in the file "COPYING". + * + * See http://alice-o2.web.cern.ch/license for full licensing information. + * + * In applying this license CERN does not waive the privileges and immunities + * granted to it by virtue of its status as an Intergovernmental Organization + * or submit itself to any jurisdiction. + */ + +package alice.dip.adapters; + +import alice.dip.enums.BeamModeEnum; + +/** + * Adapter class to convert between string representations of beam modes and the BeamModeEnum. + */ +public class BeamModeProtoAdapter { + + /** + * Returns the enum constant matching the given string, or UNKNOWN if not found. + * Accepts both space and underscore separated names, case-insensitive. + * @param beamMode The beam mode string to convert. + * @return The corresponding BeamModeEnum constant, or UNKNOWN if not recognized. + */ + public static BeamModeEnum fromStringToEnum(String beamMode) { + if (beamMode == null || beamMode.trim().isEmpty()) { + return BeamModeEnum.UNKNOWN; + } + for (BeamModeEnum value : BeamModeEnum.values()) { + if (value.label.equalsIgnoreCase(beamMode)) { + return value; + } + } + return BeamModeEnum.UNKNOWN; + } +} diff --git a/src/alice/dip/enums/BeamModeEnum.java b/src/alice/dip/enums/BeamModeEnum.java new file mode 100644 index 0000000..873d8d9 --- /dev/null +++ b/src/alice/dip/enums/BeamModeEnum.java @@ -0,0 +1,49 @@ +/** + * @license + * Copyright CERN and copyright holders of ALICE O2. This software is + * distributed under the terms of the GNU General Public License v3 (GPL + * Version 3), copied verbatim in the file "COPYING". + * + * See http://alice-o2.web.cern.ch/license for full licensing information. + * + * In applying this license CERN does not waive the privileges and immunities + * granted to it by virtue of its status as an Intergovernmental Organization + * or submit itself to any jurisdiction. + */ + +package alice.dip.enums; + +/** + * Java enum matching the BeamMode values from DIP service and common.proto + * @enum BeamModeEnum + */ +public enum BeamModeEnum { + UNKNOWN("UNKNOWN"), + SETUP("SETUP"), + ABORT("ABORT"), + INJECTION_PROBE_BEAM("INJECTION PROBE BEAM"), + INJECTION_SETUP_BEAM("INJECTION SETUP BEAM"), + INJECTION_PHYSICS_BEAM("INJECTION PHYSICS BEAM"), + PREPARE_RAMP("PREPARE RAMP"), + RAMP("RAMP"), + FLAT_TOP("FLAT TOP"), + SQUEEZE("SQUEEZE"), + ADJUST("ADJUST"), + STABLE_BEAMS("STABLE BEAMS"), + LOST_BEAMS("LOST BEAMS"), + UNSTABLE_BEAMS("UNSTABLE BEAMS"), + BEAM_DUMP_WARNING("BEAM DUMP WARNING"), + BEAM_DUMP("BEAM DUMP"), + RAMP_DOWN("RAMP DOWN"), + CYCLING("CYCLING"), + RECOVERY("RECOVERY"), + INJECT_AND_DUMP("INJECT AND DUMP"), + CIRCULATE_AND_DUMP("CIRCULATE AND DUMP"), + NO_BEAM("NO BEAM"); + + public final String label; + + private BeamModeEnum(String label) { + this.label = label; + } +} diff --git a/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java b/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java index 2181669..5531a9a 100644 --- a/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java +++ b/src/alice/dip/kafka/BeamModeEventsKafkaProducer.java @@ -16,10 +16,12 @@ import org.apache.kafka.common.serialization.ByteArraySerializer; import org.apache.kafka.common.serialization.IntegerSerializer; +import alice.dip.adapters.BeamModeProtoAdapter; import alice.dip.AliDip2BK; -import alice.dip.LhcInfoObj; +import alice.dip.enums.BeamModeEnum; import alice.dip.kafka.dto.Common; import alice.dip.kafka.dto.Events; +import alice.dip.LhcInfoObj; /** * Kafka producer for LHC Beam Mode events, serialized using Protocol Buffers. @@ -44,12 +46,15 @@ public BeamModeEventsKafkaProducer(String bootstrapServers) { * @param timestamp - event timestamp at which the beam mode change event was received from DIP */ public void sendEvent(Integer fillNumber, LhcInfoObj fill, long timestamp) { + String beamModeStr = fill.getBeamMode(); + BeamModeEnum beamMode = BeamModeProtoAdapter.fromStringToEnum(beamModeStr); + Common.BeamInfo beamInfo = Common.BeamInfo.newBuilder() .setStableBeamsStart(fill.getStableBeamStart()) .setStableBeamsEnd(fill.getStableBeamStop()) .setFillNumber(fill.fillNo) .setFillingSchemeName(fill.LHCFillingSchemeName) - .setBeamMode(Common.BeamMode.valueOf(fill.getBeamModeAsKey())) + .setBeamMode(Common.BeamMode.valueOf(beamMode.label)) .setBeamType(fill.beamType) .build();