Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
e4af603
add convenience method
baltzell Apr 22, 2026
f8b341f
remove unused stuff
baltzell Apr 22, 2026
7195985
add decoding engine
baltzell Apr 22, 2026
98bb7ea
add it to the clara yaml
baltzell Apr 22, 2026
2fbcf2e
use a pool
baltzell Apr 23, 2026
3e55ec2
hmm
baltzell Apr 23, 2026
409dfe7
allow decoder instances to share ConstantsManagers
baltzell Apr 23, 2026
891476b
share ConstantsManagers
baltzell Apr 23, 2026
ff30fd3
kludge test
baltzell Apr 23, 2026
312afc8
cleanup
baltzell Apr 23, 2026
2fa9257
try this
baltzell Apr 23, 2026
7cc0094
inherit ConstantsManagers
baltzell Apr 23, 2026
664a5e2
cleanup
baltzell Apr 23, 2026
32bf136
only check tables if not shared
baltzell Apr 23, 2026
01c2158
higher ports on macos, cleanup process dpe process
baltzell Apr 23, 2026
b772320
remove ineffective pid trap
baltzell Apr 23, 2026
f68d8b9
Revert "higher ports on macos, cleanup process dpe process"
baltzell Apr 24, 2026
7587bf8
Revert "fix job name"
baltzell Apr 24, 2026
2aced8e
Revert "decouple ubuntu/macos builds to reduce wait"
baltzell Apr 24, 2026
3ddee53
Revert "remove unnecessary reader class"
baltzell Apr 24, 2026
80d4e85
Revert "remove example engine"
baltzell Apr 24, 2026
6fd4a8f
restore reported data type
baltzell Apr 25, 2026
5d094fc
rename class
baltzell Apr 27, 2026
42708d3
undo
baltzell Apr 27, 2026
80133f9
fix rebase oops
baltzell Apr 27, 2026
176158a
rename class
baltzell Apr 27, 2026
74d5849
fix rebase oops
baltzell Apr 27, 2026
87203c6
cleanup
baltzell Apr 27, 2026
281f621
cleanup
baltzell Apr 27, 2026
467a729
stf
baltzell Apr 27, 2026
b5301a2
just events, not per thread
baltzell Apr 28, 2026
57cf6ce
reduce constants sharing
baltzell Apr 28, 2026
b18a3aa
bugfix
baltzell Apr 28, 2026
40aca56
share one RCDBManager
baltzell Apr 28, 2026
0ce3f4a
bugfix
baltzell Apr 28, 2026
ef9d6d9
disable gnuplot logfile
baltzell Apr 28, 2026
d9f6ed5
Merge branch 'decoder-engine-2' into decoder-engine
baltzell Apr 28, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public int readEventCount() throws EventReaderException {

@Override
public ByteOrder readByteOrder() throws EventReaderException {
return reader.getFileByteOrder();
return ByteOrder.LITTLE_ENDIAN; //reader.getFileByteOrder();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@

import org.jlab.io.base.DataEvent;
import org.jlab.io.evio.EvioDataEvent;
import org.jlab.io.hipo.HipoDataEvent;
import org.jlab.io.hipo.HipoDataSync;

import org.jlab.jnp.hipo4.data.Bank;
import org.jlab.jnp.hipo4.data.Event;
Expand All @@ -38,27 +36,28 @@ public class CLASDecoder {
protected SchemaFactory schemaFactory = new SchemaFactory();
private CodaEventDecoder codaDecoder = null;
private List<DetectorDataDgtz> dataList = new ArrayList<>();
private HipoDataSync writer = null;
private HipoDataEvent hipoEvent = null;
private boolean isRunNumberFixed = false;
private int decoderDebugMode = 0;
private ModeAHDC ahdcExtractor = new ModeAHDC();
private RCDBManager rcdbManager = new RCDBManager();
private static RCDBManager rcdbManager = new RCDBManager();

public CLASDecoder(boolean development){
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder(development);
writer = new HipoDataSync();
hipoEvent = (HipoDataEvent) writer.createEvent();
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}

public CLASDecoder(){
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder();
writer = new HipoDataSync();
hipoEvent = (HipoDataEvent) writer.createEvent();
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}

public CLASDecoder(CLASDecoder d) {
codaDecoder = new CodaEventDecoder();
detectorDecoder = new DetectorEventDecoder(d.detectorDecoder);
String dir = ClasUtilsFile.getResourceDir("CLAS12DIR", "etc/bankdefs/hipo4");
schemaFactory.initFromDirectory(dir);
}
Expand Down Expand Up @@ -792,4 +791,8 @@ public Event getDecodedEvent(EvioDataEvent rawEvent, int run, int counter, Doubl

return decodedEvent;
}

public Event getDecodedEvent(EvioDataEvent rawEvent) {
return getDecodedEvent(rawEvent, -1, -1, null, null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/
public class DetectorEventDecoder {

boolean sharedManagers = false;
ConstantsManager translationManager = new ConstantsManager();
ConstantsManager fitterManager = new ConstantsManager();
ConstantsManager scalerManager = new ConstantsManager();
Expand Down Expand Up @@ -45,6 +46,18 @@ public DetectorEventDecoder(boolean development){
}
}

public DetectorEventDecoder(){
this.initDecoder();
}

public DetectorEventDecoder(DetectorEventDecoder d) {
translationManager = d.translationManager;
fitterManager = d.fitterManager;
scalerManager = d.scalerManager;
sharedManagers = true;
initDecoder();
}

public void setTimestamp(String timestamp) {
translationManager.setTimeStamp(timestamp);
fitterManager.setTimeStamp(timestamp);
Expand Down Expand Up @@ -80,10 +93,6 @@ public float getRcdbSolenoidScale() {
getValue()).floatValue();
}

public DetectorEventDecoder(){
this.initDecoder();
}

public final void initDecoderDev(){
keysTrans = Arrays.asList(new DetectorType[]{ DetectorType.HTCC,DetectorType.BST,DetectorType.RTPC} );
tablesTrans = Arrays.asList(new String[]{ "/daq/tt/clasdev/htcc","/daq/tt/clasdev/svt","/daq/tt/clasdev/rtpc" });
Expand All @@ -109,7 +118,6 @@ public final void initDecoder(){
"/daq/tt/rf","/daq/tt/bmt","/daq/tt/fmt","/daq/tt/rich2","/daq/tt/hel","/daq/tt/band","/daq/tt/rtpc",
"/daq/tt/raster","/daq/tt/atof","/daq/tt/ahdc"
});
translationManager.init(tablesTrans);

// ADC waveform fitter translation table
keysFitter = Arrays.asList(new DetectorType[]{DetectorType.FTCAL,DetectorType.FTHODO,DetectorType.FTTRK,DetectorType.FTOF,DetectorType.LTCC,
Expand All @@ -122,17 +130,20 @@ public final void initDecoder(){
"/daq/config/fmt","/daq/fadc/hel","/daq/fadc/rf","/daq/fadc/band","/daq/fadc/raster",
"/daq/config/ahdc"
});
fitterManager.init(tablesFitter);

// Data filter list
keysFilter = Arrays.asList(new DetectorType[]{DetectorType.DC});

scalerManager.init(Arrays.asList(new String[]{"/runcontrol/fcup","/runcontrol/slm","/runcontrol/hwp",
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));


keysMicromega = Arrays.asList(new DetectorType[]{DetectorType.BMT,DetectorType.FMT,DetectorType.FTTRK});

checkTables();
if (!sharedManagers) {
translationManager.init(tablesTrans);
fitterManager.init(tablesFitter);
scalerManager.init(Arrays.asList(new String[]{"/runcontrol/fcup","/runcontrol/slm","/runcontrol/hwp",
"/runcontrol/helicity","/daq/config/scalers/dsc1"}));
checkTables();
}

}

public void checkTables() {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
package org.jlab.clas.reco;

import java.util.Set;
import java.util.HashSet;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import org.jlab.clara.base.ClaraUtil;
import org.jlab.clara.engine.Engine;
import org.jlab.clara.engine.EngineData;
import org.jlab.clara.engine.EngineDataType;
import org.jlab.clara.engine.EngineStatus;
import org.jlab.detector.decode.CLASDecoder;
import org.jlab.io.evio.EvioDataEvent;
import org.jlab.io.hipo.HipoDataEvent;
import org.jlab.jnp.hipo4.data.SchemaFactory;
import org.json.JSONObject;

/**
*
* @author baltzell
*/
public class DecoderEngine implements Engine {

static final int POOL_SIZE = 64;
static final Set<EngineDataType> ED_TYPES = ClaraUtil.buildDataTypes(
Clas12Types.EVIO,Clas12Types.HIPO,EngineDataType.JSON,EngineDataType.STRING);

SchemaFactory schema;
BlockingQueue<CLASDecoder> pool;
int constantsShared = 16;

public DecoderEngine() {
schema = new SchemaFactory();
schema.initFromDirectory(System.getenv("CLAS12DIR") + "/etc/bankdefs/hipo4");
}

@Override
public Set<EngineDataType> getInputDataTypes() { return ED_TYPES; }
@Override
public Set<EngineDataType> getOutputDataTypes() { return ED_TYPES; }
@Override
public EngineData executeGroup(Set<EngineData> set) { return null; }
@Override
public Set<String> getStates() { return new HashSet<>(); }
@Override
public String getDescription() { return "decoder engine"; }
@Override
public String getVersion() { return "1.0"; }
@Override
public String getAuthor() { return "baltzell"; }
@Override
public void reset() {}
@Override
public void destroy() {}

@Override
public EngineData configure(EngineData ed) {
JSONObject json = new JSONObject(ed.getData());
pool = new ArrayBlockingQueue<>(POOL_SIZE);
CLASDecoder d0 = null;
for (int i=0; i<POOL_SIZE; i++) {
CLASDecoder d;
if (i % constantsShared == 0) {
d0 = new CLASDecoder();
if (json.has("variation")) d0.setVariation(json.getString("variation"));
if (json.has("timestamp")) d0.setVariation(json.getString("timestamp"));
d = d0;
}
else {
d = new CLASDecoder(d0);
}
pool.add(d);
}
return ed;
}

@Override
public EngineData execute(EngineData input) {
EngineData output = input;
EvioDataEvent evio;
HipoDataEvent hipo;
try {
ByteBuffer bb = (ByteBuffer) input.getData();
//evio = new EvioDataEvent(bb.array(), bb.order());
evio = new EvioDataEvent(bb.array(), ByteOrder.LITTLE_ENDIAN);
} catch (Exception e) {
String msg = String.format("Error reading input event%n%n%s", ClaraUtil.reportException(e));
output.setStatus(EngineStatus.ERROR);
output.setDescription(msg);
return output;
}
try {
CLASDecoder d = pool.take();
hipo = new HipoDataEvent(d.getDecodedEvent(evio),schema);
pool.put(d);
output.setData("binary/data-hipo", hipo.getHipoEvent());
} catch (Exception e) {
String msg = String.format("Error processing input event%n%n%s", ClaraUtil.reportException(e));
output.setStatus(EngineStatus.ERROR);
output.setDescription(msg);
return output;
}
return output;
}
}
6 changes: 4 additions & 2 deletions etc/services/rgd-clarode.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,14 @@ configuration:
outputBankPrefix: "HB"
io-services:
reader:
class: org.jlab.io.clara.DecoderReader
name: DecoderReader
class: org.jlab.io.clara.EvioToEvioReader
name: EvioReader
writer:
class: org.jlab.io.clara.DecoderWriter
name: DecoderWriter
services:
- class: org.jlab.clas.reco.DecoderEngine
name: DECO
- class: org.jlab.service.ai.DCDenoiseEngine
name: DCDN
- class: org.jlab.clas.swimtools.MagFieldsEngine
Expand Down
4 changes: 2 additions & 2 deletions libexec/scaling
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ def cli():
cli.add_argument('-y','--yaml', metavar='YAML',help='path to YAML file',required=True)
cli.add_argument('-c','--clara', metavar='DIR',help='CLARA_HOME path (default=$CLARA_HOME)',default=os.getenv('CLARA_HOME',None))
cli.add_argument('-t','--threads',metavar='#',help='threads (default=4,8)',default='4,8')
cli.add_argument('-e','--events', metavar='#',help='events per thread (default=555)',default=555,type=int)
cli.add_argument('-e','--events', metavar='#',help='events per threads (default=2550)',default=555,type=int)
cli.add_argument('-N','--numa', metavar='#',help='NUMA socket (default=None, choices=[0,1])',default=None,type=int,choices=[0,1])
cli.add_argument('datafile', help='input EVIO/HIPO data file')
cfg = cli.parse_args()
Expand Down Expand Up @@ -58,7 +58,7 @@ def benchmark(cfg, threads, log):
# add the run-clara command:
cmd.extend([cfg.run_clara,
'-c',cfg.clara,
'-n',str(cfg.events*int(threads)),
'-n',str(cfg.events),
'-t',str(threads),
'-l',
'-y',cfg.yaml,
Expand Down
1 change: 1 addition & 0 deletions libexec/scaling.gpl
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ set key nobox inside top left
f(x) = m*x
m = 2

set fit nolog
fit [0:24] f(x) datafile using 1:(1/$2*1e3) via m
rate = sprintf('%.1f Hz/CPU',m)
plot datafile using 1:(1/$2*1e3) pt 7 notitle, f(x) title rate
Expand Down