Page Menu
Home
HEPForge
Search
Configure Global Search
Log In
Files
F19251667
HDF5Writer.cc
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Award Token
Flag For Later
Size
12 KB
Referenced Files
None
Subscribers
None
HDF5Writer.cc
View Options
/**
* \authors The HEJ collaboration (see AUTHORS for details)
* \date 2019
* \copyright GPLv2 or later
*/
#include
"HEJ/HDF5Writer.hh"
#include
<cassert>
#include
"LHEF/LHEF.h"
#ifdef HEJ_BUILD_WITH_HDF5
#include
<type_traits>
#include
"HEJ/event_types.hh"
#include
"HEJ/Event.hh"
#include
"highfive/H5File.hpp"
namespace
HEJ
{
using
HighFive
::
File
;
using
HighFive
::
DataSpace
;
namespace
{
constexpr
std
::
size_t
chunk_size
=
1000
;
constexpr
unsigned
compression_level
=
3
;
size_t
to_index
(
event_type
::
EventType
const
type
){
return
type
==
0
?
0
:
floor
(
log2
(
type
))
+
1
;
}
template
<
typename
T
>
void
write_dataset
(
HighFive
::
Group
&
group
,
std
::
string
const
&
name
,
T
val
)
{
using
data_t
=
std
::
decay_t
<
T
>
;
group
.
createDataSet
<
data_t
>
(
name
,
DataSpace
::
From
(
val
)).
write
(
val
);
}
template
<
typename
T
>
void
write_dataset
(
HighFive
::
Group
&
group
,
std
::
string
const
&
name
,
std
::
vector
<
T
>
const
&
val
)
{
using
data_t
=
std
::
decay_t
<
T
>
;
group
.
createDataSet
<
data_t
>
(
name
,
DataSpace
::
From
(
val
)).
write
(
val
);
}
struct
Cache
{
explicit
Cache
(
size_t
capacity
)
:
capacity
{
capacity
}
{
nparticles
.
reserve
(
capacity
);
start
.
reserve
(
capacity
);
pid
.
reserve
(
capacity
);
weight
.
reserve
(
capacity
);
scale
.
reserve
(
capacity
);
fscale
.
reserve
(
capacity
);
rscale
.
reserve
(
capacity
);
aqed
.
reserve
(
capacity
);
aqcd
.
reserve
(
capacity
);
trials
.
reserve
(
capacity
);
}
void
fill
(
HEJ
::
Event
ev
)
{
const
auto
hepeup
=
to_HEPEUP
(
ev
,
nullptr
);
fill_event_params
(
hepeup
);
fill_event_particles
(
hepeup
);
}
void
fill_event_params
(
LHEF
::
HEPEUP
const
&
ev
)
{
nparticles
.
emplace_back
(
ev
.
NUP
);
start
.
emplace_back
(
particle_pos
);
pid
.
emplace_back
(
ev
.
IDPRUP
);
weight
.
emplace_back
(
ev
.
XWGTUP
);
scale
.
emplace_back
(
ev
.
SCALUP
);
fscale
.
emplace_back
(
ev
.
scales
.
muf
);
rscale
.
emplace_back
(
ev
.
scales
.
mur
);
aqed
.
emplace_back
(
ev
.
AQEDUP
);
aqcd
.
emplace_back
(
ev
.
AQCDUP
);
// set first trial=1 for first event
// -> sum(trials) = 1 -> xs=sum(weights)/sum(trials) as in Sherpa
if
(
particle_pos
==
0
){
trials
.
emplace_back
(
1.
);
}
else
{
trials
.
emplace_back
(
0.
);
}
particle_pos
+=
ev
.
NUP
;
}
void
fill_event_particles
(
LHEF
::
HEPEUP
const
&
ev
)
{
id
.
insert
(
end
(
id
),
begin
(
ev
.
IDUP
),
end
(
ev
.
IDUP
));
status
.
insert
(
end
(
status
),
begin
(
ev
.
ISTUP
),
end
(
ev
.
ISTUP
));
lifetime
.
insert
(
end
(
lifetime
),
begin
(
ev
.
VTIMUP
),
end
(
ev
.
VTIMUP
));
spin
.
insert
(
end
(
spin
),
begin
(
ev
.
SPINUP
),
end
(
ev
.
SPINUP
));
for
(
int
i
=
0
;
i
<
ev
.
NUP
;
++
i
)
{
mother1
.
emplace_back
(
ev
.
MOTHUP
[
i
].
first
);
mother2
.
emplace_back
(
ev
.
MOTHUP
[
i
].
second
);
color1
.
emplace_back
(
ev
.
ICOLUP
[
i
].
first
);
color2
.
emplace_back
(
ev
.
ICOLUP
[
i
].
second
);
px
.
emplace_back
(
ev
.
PUP
[
i
][
0
]);
py
.
emplace_back
(
ev
.
PUP
[
i
][
1
]);
pz
.
emplace_back
(
ev
.
PUP
[
i
][
2
]);
e
.
emplace_back
(
ev
.
PUP
[
i
][
3
]);
m
.
emplace_back
(
ev
.
PUP
[
i
][
4
]);
}
}
bool
is_full
()
const
{
return
nparticles
.
size
()
>=
capacity
;
}
void
clear
()
{
nparticles
.
clear
();
start
.
clear
();
pid
.
clear
();
id
.
clear
();
status
.
clear
();
mother1
.
clear
();
mother2
.
clear
();
color1
.
clear
();
color2
.
clear
();
weight
.
clear
();
scale
.
clear
();
fscale
.
clear
();
rscale
.
clear
();
aqed
.
clear
();
aqcd
.
clear
();
trials
.
clear
();
px
.
clear
();
py
.
clear
();
pz
.
clear
();
e
.
clear
();
m
.
clear
();
lifetime
.
clear
();
spin
.
clear
();
}
size_t
capacity
;
std
::
vector
<
int
>
nparticles
,
start
,
pid
,
id
,
status
,
mother1
,
mother2
,
color1
,
color2
;
std
::
vector
<
double
>
weight
,
scale
,
fscale
,
rscale
,
aqed
,
aqcd
,
trials
,
px
,
py
,
pz
,
e
,
m
,
lifetime
,
spin
;
private
:
size_t
particle_pos
=
0
;
};
}
struct
HDF5Writer
::
HDF5WriterImpl
{
File
file
;
LHEF
::
HEPRUP
heprup
;
Cache
cache
{
chunk_size
};
size_t
event_idx
=
0
;
size_t
particle_idx
=
0
;
HDF5WriterImpl
(
std
::
string
const
&
filename
,
LHEF
::
HEPRUP
&&
hepr
)
:
file
{
filename
,
File
::
ReadWrite
|
File
::
Create
|
File
::
Truncate
},
heprup
{
std
::
move
(
hepr
)}
{
// TODO: code duplication with Les Houches Writer
const
int
max_number_types
=
to_index
(
event_type
::
last_type
)
+
1
;
heprup
.
NPRUP
=
max_number_types
;
// ids of event types
heprup
.
LPRUP
.
clear
();
heprup
.
LPRUP
.
reserve
(
max_number_types
);
heprup
.
LPRUP
.
emplace_back
(
0
);
for
(
size_t
i
=
event_type
::
first_type
+
1
;
i
<=
event_type
::
last_type
;
i
*=
2
)
{
heprup
.
LPRUP
.
emplace_back
(
i
);
}
heprup
.
XSECUP
=
std
::
vector
<
double
>
(
max_number_types
,
0.
);
heprup
.
XERRUP
=
std
::
vector
<
double
>
(
max_number_types
,
0.
);
heprup
.
XMAXUP
=
std
::
vector
<
double
>
(
max_number_types
,
0.
);
write_init
();
create_event_group
();
create_particle_group
();
}
void
write_init
()
{
auto
init
=
file
.
createGroup
(
"init"
);
write_dataset
(
init
,
"PDFgroupA"
,
heprup
.
PDFGUP
.
first
);
write_dataset
(
init
,
"PDFgroupB"
,
heprup
.
PDFGUP
.
second
);
write_dataset
(
init
,
"PDFsetA"
,
heprup
.
PDFSUP
.
first
);
write_dataset
(
init
,
"PDFsetB"
,
heprup
.
PDFSUP
.
second
);
write_dataset
(
init
,
"beamA"
,
heprup
.
IDBMUP
.
first
);
write_dataset
(
init
,
"beamB"
,
heprup
.
IDBMUP
.
second
);
write_dataset
(
init
,
"energyA"
,
heprup
.
EBMUP
.
first
);
write_dataset
(
init
,
"energyB"
,
heprup
.
EBMUP
.
second
);
write_dataset
(
init
,
"numProcesses"
,
heprup
.
NPRUP
);
write_dataset
(
init
,
"weightingStrategy"
,
heprup
.
IDWTUP
);
auto
proc_info
=
file
.
createGroup
(
"procInfo"
);
write_dataset
(
proc_info
,
"procId"
,
heprup
.
LPRUP
);
}
static
HighFive
::
DataSetCreateProps
const
&
hdf5_chunk
()
{
static
const
auto
props
=
[](){
HighFive
::
DataSetCreateProps
props
;
props
.
add
(
HighFive
::
Chunking
({
chunk_size
}));
props
.
add
(
HighFive
::
Deflate
(
compression_level
));
return
props
;
}();
return
props
;
}
void
create_event_group
()
{
static
const
auto
dim
=
DataSpace
({
0
},
{
DataSpace
::
UNLIMITED
});
auto
events
=
file
.
createGroup
(
"event"
);
events
.
createDataSet
<
int
>
(
"nparticles"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
int
>
(
"start"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
int
>
(
"pid"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"weight"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"scale"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"fscale"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"rscale"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"aqed"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"aqcd"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"trials"
,
dim
,
hdf5_chunk
());
}
void
resize_event_group
(
size_t
new_size
)
{
auto
events
=
file
.
getGroup
(
"event"
);
events
.
getDataSet
(
"nparticles"
).
resize
({
new_size
});
events
.
getDataSet
(
"start"
).
resize
({
new_size
});
events
.
getDataSet
(
"pid"
).
resize
({
new_size
});
events
.
getDataSet
(
"weight"
).
resize
({
new_size
});
events
.
getDataSet
(
"scale"
).
resize
({
new_size
});
events
.
getDataSet
(
"fscale"
).
resize
({
new_size
});
events
.
getDataSet
(
"rscale"
).
resize
({
new_size
});
events
.
getDataSet
(
"aqed"
).
resize
({
new_size
});
events
.
getDataSet
(
"aqcd"
).
resize
({
new_size
});
events
.
getDataSet
(
"trials"
).
resize
({
new_size
});
}
void
create_particle_group
()
{
static
const
auto
dim
=
DataSpace
({
0
},
{
DataSpace
::
UNLIMITED
});
auto
particles
=
file
.
createGroup
(
"particle"
);
particles
.
createDataSet
<
int
>
(
"id"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
int
>
(
"status"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
int
>
(
"mother1"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
int
>
(
"mother2"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
int
>
(
"color1"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
int
>
(
"color2"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"px"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"py"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"pz"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"e"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"m"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"lifetime"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"spin"
,
dim
,
hdf5_chunk
());
}
void
resize_particle_group
(
size_t
new_size
)
{
auto
particles
=
file
.
getGroup
(
"particle"
);
particles
.
getDataSet
(
"id"
).
resize
({
new_size
});
particles
.
getDataSet
(
"status"
).
resize
({
new_size
});
particles
.
getDataSet
(
"mother1"
).
resize
({
new_size
});
particles
.
getDataSet
(
"mother2"
).
resize
({
new_size
});
particles
.
getDataSet
(
"color1"
).
resize
({
new_size
});
particles
.
getDataSet
(
"color2"
).
resize
({
new_size
});
particles
.
getDataSet
(
"px"
).
resize
({
new_size
});
particles
.
getDataSet
(
"py"
).
resize
({
new_size
});
particles
.
getDataSet
(
"pz"
).
resize
({
new_size
});
particles
.
getDataSet
(
"e"
).
resize
({
new_size
});
particles
.
getDataSet
(
"m"
).
resize
({
new_size
});
particles
.
getDataSet
(
"lifetime"
).
resize
({
new_size
});
particles
.
getDataSet
(
"spin"
).
resize
({
new_size
});
}
void
write
(
Event
const
&
ev
){
cache
.
fill
(
ev
);
if
(
cache
.
is_full
())
{
dump_cache
();
}
const
double
wt
=
ev
.
central
().
weight
;
const
size_t
idx
=
to_index
(
ev
.
type
());
heprup
.
XSECUP
[
idx
]
+=
wt
;
heprup
.
XERRUP
[
idx
]
+=
wt
*
wt
;
if
(
wt
>
heprup
.
XMAXUP
[
idx
]){
heprup
.
XMAXUP
[
idx
]
=
wt
;
}
}
void
dump_cache
()
{
write_event_params
();
write_event_particles
();
cache
.
clear
();
}
void
write_event_params
()
{
auto
events
=
file
.
getGroup
(
"event"
);
// choose arbitrary dataset to find size
const
auto
dataset
=
events
.
getDataSet
(
"nparticles"
);
const
size_t
size
=
dataset
.
getSpace
().
getDimensions
().
front
();
resize_event_group
(
size
+
cache
.
nparticles
.
size
());
#define WRITE_FROM_CACHE(GROUP, PROPERTY) \
GROUP.getDataSet(#PROPERTY).select({size}, {cache.PROPERTY.size()}).write(cache.PROPERTY)
WRITE_FROM_CACHE
(
events
,
nparticles
);
WRITE_FROM_CACHE
(
events
,
start
);
WRITE_FROM_CACHE
(
events
,
pid
);
WRITE_FROM_CACHE
(
events
,
weight
);
WRITE_FROM_CACHE
(
events
,
scale
);
WRITE_FROM_CACHE
(
events
,
fscale
);
WRITE_FROM_CACHE
(
events
,
rscale
);
WRITE_FROM_CACHE
(
events
,
aqed
);
WRITE_FROM_CACHE
(
events
,
aqcd
);
WRITE_FROM_CACHE
(
events
,
trials
);
}
void
write_event_particles
()
{
auto
particles
=
file
.
getGroup
(
"particle"
);
// choose arbitrary dataset to find size
const
auto
dataset
=
particles
.
getDataSet
(
"id"
);
const
size_t
size
=
dataset
.
getSpace
().
getDimensions
().
front
();
resize_particle_group
(
size
+
cache
.
id
.
size
());
WRITE_FROM_CACHE
(
particles
,
id
);
WRITE_FROM_CACHE
(
particles
,
status
);
WRITE_FROM_CACHE
(
particles
,
lifetime
);
WRITE_FROM_CACHE
(
particles
,
spin
);
WRITE_FROM_CACHE
(
particles
,
mother1
);
WRITE_FROM_CACHE
(
particles
,
mother2
);
WRITE_FROM_CACHE
(
particles
,
color1
);
WRITE_FROM_CACHE
(
particles
,
color2
);
WRITE_FROM_CACHE
(
particles
,
px
);
WRITE_FROM_CACHE
(
particles
,
py
);
WRITE_FROM_CACHE
(
particles
,
pz
);
WRITE_FROM_CACHE
(
particles
,
e
);
WRITE_FROM_CACHE
(
particles
,
m
);
}
#undef WRITE_FROM_CACHE
~
HDF5WriterImpl
(){
dump_cache
();
auto
proc_info
=
file
.
getGroup
(
"procInfo"
);
write_dataset
(
proc_info
,
"xSection"
,
heprup
.
XSECUP
);
write_dataset
(
proc_info
,
"error"
,
heprup
.
XERRUP
);
write_dataset
(
proc_info
,
"unitWeight"
,
heprup
.
XMAXUP
);
}
};
HDF5Writer
::
HDF5Writer
(
std
::
string
const
&
filename
,
LHEF
::
HEPRUP
heprup
)
:
impl_
{
new
HDF5Writer
::
HDF5WriterImpl
{
filename
,
std
::
move
(
heprup
)}}
{}
void
HDF5Writer
::
write
(
Event
const
&
ev
){
impl_
->
write
(
ev
);
}
}
#else
// no HDF5 support
namespace
HEJ
{
class
HDF5Writer
::
HDF5WriterImpl
{};
HDF5Writer
::
HDF5Writer
(
std
::
string
const
&
,
LHEF
::
HEPRUP
){
throw
std
::
invalid_argument
{
"Failed to create HDF5 writer: "
"HEJ 2 was built without HDF5 support"
};
}
void
HDF5Writer
::
write
(
Event
const
&
){
assert
(
false
);
}
}
#endif
namespace
HEJ
{
HDF5Writer
::~
HDF5Writer
()
=
default
;
}
File Metadata
Details
Attached
Mime Type
text/x-c
Expires
Tue, Sep 30, 6:09 AM (1 d, 12 h)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
6455035
Default Alt Text
HDF5Writer.cc (12 KB)
Attached To
Mode
rHEJ HEJ
Attached
Detach File
Event Timeline
Log In to Comment