Page Menu
Home
HEPForge
Search
Configure Global Search
Log In
Files
F19252214
HDF5Writer.cc
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Award Token
Flag For Later
Size
15 KB
Referenced Files
None
Subscribers
None
HDF5Writer.cc
View Options
/**
* \authors The HEJ collaboration (see AUTHORS for details)
* \date 2019-2020
* \copyright GPLv2 or later
*/
#include
"HEJ/HDF5Writer.hh"
#include
<cassert>
#include
<string_view>
#include
"LHEF/LHEF.h"
#include
"HEJ/ConfigFlags.hh"
#ifdef HEJ_BUILD_WITH_HDF5
#include
<algorithm>
#include
<cctype>
#include
<cmath>
#include
<cstddef>
#include
<iterator>
#include
<type_traits>
#include
<utility>
#include
<vector>
#include
"highfive/H5File.hpp"
#include
"HEJ/Event.hh"
#include
"HEJ/event_types.hh"
#else
#include
"HEJ/exceptions.hh"
#endif
#ifdef HEJ_BUILD_WITH_HDF5
namespace
HEJ
{
using
HighFive
::
File
;
using
HighFive
::
DataSpace
;
namespace
{
using
std
::
size_t
;
constexpr
size_t
CHUNK_SIZE
=
1000
;
constexpr
unsigned
COMPRESSION_LEVEL
=
3
;
size_t
to_index
(
event_type
::
EventType
const
type
){
return
type
==
0
?
0
:
std
::
floor
(
std
::
log2
(
static_cast
<
size_t
>
(
type
)))
+
1
;
}
template
<
typename
T
>
void
write_dataset
(
HighFive
::
Group
&
group
,
std
::
string
const
&
name
,
T
val
)
{
using
data_t
=
std
::
decay_t
<
T
>
;
group
.
createDataSet
<
data_t
>
(
name
,
DataSpace
::
From
(
val
)).
write
(
val
);
}
template
<
typename
T
>
void
write_dataset
(
HighFive
::
Group
&
group
,
std
::
string
const
&
name
,
std
::
vector
<
T
>
const
&
val
)
{
using
data_t
=
std
::
decay_t
<
T
>
;
group
.
createDataSet
<
data_t
>
(
name
,
DataSpace
::
From
(
val
)).
write
(
val
);
}
struct
Cache
{
explicit
Cache
(
size_t
capacity
)
:
capacity
{
capacity
}
{
nparticles
.
reserve
(
capacity
);
start
.
reserve
(
capacity
);
pid
.
reserve
(
capacity
);
weight
.
reserve
(
capacity
);
scale
.
reserve
(
capacity
);
fscale
.
reserve
(
capacity
);
rscale
.
reserve
(
capacity
);
aqed
.
reserve
(
capacity
);
aqcd
.
reserve
(
capacity
);
trials
.
reserve
(
capacity
);
npLO
.
reserve
(
capacity
);
npNLO
.
reserve
(
capacity
);
}
void
fill
(
Event
const
&
ev
)
{
const
auto
hepeup
=
to_HEPEUP
(
ev
,
nullptr
);
// HEJ event to get nice wrapper
const
auto
num_partons
=
std
::
distance
(
ev
.
cbegin_partons
(),
ev
.
cend_partons
());
assert
(
num_partons
>
0
);
// Number of partons for LO matching, HEJ requires at least 2 partons
npLO
.
emplace_back
(
num_partons
>
1
?
num_partons
-
2
:
num_partons
);
// Number of real emissions in NLO, HEJ is LO -> -1
npNLO
.
emplace_back
(
-
1
);
fill_event_params
(
hepeup
);
fill_event_particles
(
hepeup
);
}
void
fill_event_params
(
LHEF
::
HEPEUP
const
&
ev
)
{
nparticles
.
emplace_back
(
ev
.
NUP
);
start
.
emplace_back
(
particle_pos
);
pid
.
emplace_back
(
ev
.
IDPRUP
);
weight
.
emplace_back
(
ev
.
XWGTUP
);
scale
.
emplace_back
(
ev
.
SCALUP
);
fscale
.
emplace_back
(
ev
.
scales
.
muf
);
rscale
.
emplace_back
(
ev
.
scales
.
mur
);
aqed
.
emplace_back
(
ev
.
AQEDUP
);
aqcd
.
emplace_back
(
ev
.
AQCDUP
);
// set first trial=1 for first event
// -> sum(trials) = 1 -> xs=sum(weights)/sum(trials) as in Sherpa
if
(
particle_pos
==
0
){
trials
.
emplace_back
(
1.
);
}
else
{
trials
.
emplace_back
(
0.
);
}
particle_pos
+=
ev
.
NUP
;
}
void
fill_event_particles
(
LHEF
::
HEPEUP
const
&
ev
)
{
id
.
insert
(
end
(
id
),
begin
(
ev
.
IDUP
),
end
(
ev
.
IDUP
));
status
.
insert
(
end
(
status
),
begin
(
ev
.
ISTUP
),
end
(
ev
.
ISTUP
));
lifetime
.
insert
(
end
(
lifetime
),
begin
(
ev
.
VTIMUP
),
end
(
ev
.
VTIMUP
));
spin
.
insert
(
end
(
spin
),
begin
(
ev
.
SPINUP
),
end
(
ev
.
SPINUP
));
for
(
int
i
=
0
;
i
<
ev
.
NUP
;
++
i
)
{
mother1
.
emplace_back
(
ev
.
MOTHUP
[
i
].
first
);
mother2
.
emplace_back
(
ev
.
MOTHUP
[
i
].
second
);
color1
.
emplace_back
(
ev
.
ICOLUP
[
i
].
first
);
color2
.
emplace_back
(
ev
.
ICOLUP
[
i
].
second
);
px
.
emplace_back
(
ev
.
PUP
[
i
][
0
]);
py
.
emplace_back
(
ev
.
PUP
[
i
][
1
]);
pz
.
emplace_back
(
ev
.
PUP
[
i
][
2
]);
e
.
emplace_back
(
ev
.
PUP
[
i
][
3
]);
m
.
emplace_back
(
ev
.
PUP
[
i
][
4
]);
}
}
bool
is_full
()
const
{
return
nparticles
.
size
()
>=
capacity
;
}
void
clear
()
{
nparticles
.
clear
();
start
.
clear
();
pid
.
clear
();
id
.
clear
();
status
.
clear
();
mother1
.
clear
();
mother2
.
clear
();
color1
.
clear
();
color2
.
clear
();
weight
.
clear
();
scale
.
clear
();
fscale
.
clear
();
rscale
.
clear
();
aqed
.
clear
();
aqcd
.
clear
();
trials
.
clear
();
npLO
.
clear
();
npNLO
.
clear
();
px
.
clear
();
py
.
clear
();
pz
.
clear
();
e
.
clear
();
m
.
clear
();
lifetime
.
clear
();
spin
.
clear
();
}
size_t
capacity
;
std
::
vector
<
int
>
nparticles
,
start
,
pid
,
id
,
status
,
mother1
,
mother2
,
color1
,
color2
,
npLO
,
npNLO
;
std
::
vector
<
double
>
weight
,
scale
,
fscale
,
rscale
,
aqed
,
aqcd
,
trials
,
px
,
py
,
pz
,
e
,
m
,
lifetime
,
spin
;
size_t
particle_pos
=
0
;
};
}
// namespace
struct
HDF5Writer
::
HDF5WriterImpl
:
EventWriter
{
File
file
;
LHEF
::
HEPRUP
heprup
;
Cache
cache
{
CHUNK_SIZE
};
size_t
event_idx
=
0
;
size_t
particle_idx
=
0
;
double
xs_scale
=
1.
;
HDF5WriterImpl
(
std
::
string
const
&
filename
,
LHEF
::
HEPRUP
&&
hepr
,
std
::
string_view
conf
)
:
file
{
filename
,
File
::
ReadWrite
|
File
::
Create
|
File
::
Truncate
},
heprup
{
std
::
forward
<
LHEF
::
HEPRUP
>
(
hepr
)}
{
create_header
(
heprup
,
conf
);
// TODO: code duplication with Les Houches Writer
const
size_t
max_number_types
=
to_index
(
event_type
::
last_type
)
+
1
;
heprup
.
NPRUP
=
max_number_types
;
// ids of event types
heprup
.
LPRUP
.
clear
();
heprup
.
LPRUP
.
reserve
(
max_number_types
);
heprup
.
LPRUP
.
emplace_back
(
0
);
for
(
size_t
i
=
event_type
::
first_type
+
1
;
i
<=
event_type
::
last_type
;
i
*=
2
)
{
heprup
.
LPRUP
.
emplace_back
(
i
);
}
heprup
.
XSECUP
=
std
::
vector
<
double
>
(
max_number_types
,
0.
);
heprup
.
XERRUP
=
std
::
vector
<
double
>
(
max_number_types
,
0.
);
heprup
.
XMAXUP
=
std
::
vector
<
double
>
(
max_number_types
,
0.
);
write_init
();
create_event_group
();
create_particle_group
();
}
void
create_header
(
LHEF
::
HEPRUP
const
&
heprup
,
std
::
string_view
conf
)
{
auto
header
=
file
.
createGroup
(
"header"
);
if
(
!
heprup
.
generators
.
empty
())
{
std
::
string
name
=
heprup
.
generators
.
back
().
name
;
// Remove spaces for consistency with `LesHouchesWriter`
name
.
erase
(
std
::
remove_if
(
name
.
begin
(),
name
.
end
(),
[](
char
c
)
{
return
std
::
isspace
(
c
);
}
),
name
.
end
()
);
write_dataset
(
header
,
name
+
"Config"
,
std
::
string
{
conf
});
}
}
void
write_init
()
{
auto
init
=
file
.
createGroup
(
"init"
);
write_dataset
(
init
,
"PDFgroupA"
,
heprup
.
PDFGUP
.
first
);
write_dataset
(
init
,
"PDFgroupB"
,
heprup
.
PDFGUP
.
second
);
write_dataset
(
init
,
"PDFsetA"
,
heprup
.
PDFSUP
.
first
);
write_dataset
(
init
,
"PDFsetB"
,
heprup
.
PDFSUP
.
second
);
write_dataset
(
init
,
"beamA"
,
heprup
.
IDBMUP
.
first
);
write_dataset
(
init
,
"beamB"
,
heprup
.
IDBMUP
.
second
);
write_dataset
(
init
,
"energyA"
,
heprup
.
EBMUP
.
first
);
write_dataset
(
init
,
"energyB"
,
heprup
.
EBMUP
.
second
);
write_dataset
(
init
,
"numProcesses"
,
heprup
.
NPRUP
);
write_dataset
(
init
,
"weightingStrategy"
,
heprup
.
IDWTUP
);
auto
proc_info
=
file
.
createGroup
(
"procInfo"
);
write_dataset
(
proc_info
,
"procId"
,
heprup
.
LPRUP
);
}
static
HighFive
::
DataSetCreateProps
const
&
hdf5_chunk
()
{
static
const
auto
props
=
[](){
HighFive
::
DataSetCreateProps
props
;
props
.
add
(
HighFive
::
Chunking
({
CHUNK_SIZE
}));
props
.
add
(
HighFive
::
Deflate
(
COMPRESSION_LEVEL
));
return
props
;
}();
return
props
;
}
void
create_event_group
()
{
static
const
auto
dim
=
DataSpace
({
0
},
{
DataSpace
::
UNLIMITED
});
auto
events
=
file
.
createGroup
(
"event"
);
events
.
createDataSet
<
int
>
(
"nparticles"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
int
>
(
"start"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
int
>
(
"pid"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"weight"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"scale"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"fscale"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"rscale"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"aqed"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"aqcd"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
double
>
(
"trials"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
int
>
(
"npLO"
,
dim
,
hdf5_chunk
());
events
.
createDataSet
<
int
>
(
"npNLO"
,
dim
,
hdf5_chunk
());
}
void
resize_event_group
(
size_t
new_size
)
{
auto
events
=
file
.
getGroup
(
"event"
);
events
.
getDataSet
(
"nparticles"
).
resize
({
new_size
});
events
.
getDataSet
(
"start"
).
resize
({
new_size
});
events
.
getDataSet
(
"pid"
).
resize
({
new_size
});
events
.
getDataSet
(
"weight"
).
resize
({
new_size
});
events
.
getDataSet
(
"scale"
).
resize
({
new_size
});
events
.
getDataSet
(
"fscale"
).
resize
({
new_size
});
events
.
getDataSet
(
"rscale"
).
resize
({
new_size
});
events
.
getDataSet
(
"aqed"
).
resize
({
new_size
});
events
.
getDataSet
(
"aqcd"
).
resize
({
new_size
});
events
.
getDataSet
(
"trials"
).
resize
({
new_size
});
events
.
getDataSet
(
"npLO"
).
resize
({
new_size
});
events
.
getDataSet
(
"npNLO"
).
resize
({
new_size
});
}
void
create_particle_group
()
{
static
const
auto
dim
=
DataSpace
({
0
},
{
DataSpace
::
UNLIMITED
});
auto
particles
=
file
.
createGroup
(
"particle"
);
particles
.
createDataSet
<
int
>
(
"id"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
int
>
(
"status"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
int
>
(
"mother1"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
int
>
(
"mother2"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
int
>
(
"color1"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
int
>
(
"color2"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"px"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"py"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"pz"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"e"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"m"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"lifetime"
,
dim
,
hdf5_chunk
());
particles
.
createDataSet
<
double
>
(
"spin"
,
dim
,
hdf5_chunk
());
}
void
resize_particle_group
(
size_t
new_size
)
{
auto
particles
=
file
.
getGroup
(
"particle"
);
particles
.
getDataSet
(
"id"
).
resize
({
new_size
});
particles
.
getDataSet
(
"status"
).
resize
({
new_size
});
particles
.
getDataSet
(
"mother1"
).
resize
({
new_size
});
particles
.
getDataSet
(
"mother2"
).
resize
({
new_size
});
particles
.
getDataSet
(
"color1"
).
resize
({
new_size
});
particles
.
getDataSet
(
"color2"
).
resize
({
new_size
});
particles
.
getDataSet
(
"px"
).
resize
({
new_size
});
particles
.
getDataSet
(
"py"
).
resize
({
new_size
});
particles
.
getDataSet
(
"pz"
).
resize
({
new_size
});
particles
.
getDataSet
(
"e"
).
resize
({
new_size
});
particles
.
getDataSet
(
"m"
).
resize
({
new_size
});
particles
.
getDataSet
(
"lifetime"
).
resize
({
new_size
});
particles
.
getDataSet
(
"spin"
).
resize
({
new_size
});
}
void
write
(
Event
const
&
ev
)
override
{
cache
.
fill
(
ev
);
if
(
cache
.
is_full
())
{
dump_cache
();
}
const
double
wt
=
ev
.
central
().
weight
;
const
size_t
idx
=
to_index
(
ev
.
type
());
heprup
.
XSECUP
[
idx
]
+=
wt
;
heprup
.
XERRUP
[
idx
]
+=
wt
*
wt
;
if
(
wt
>
heprup
.
XMAXUP
[
idx
]){
heprup
.
XMAXUP
[
idx
]
=
wt
;
}
}
void
set_xs_scale
(
const
double
scale
)
override
{
xs_scale
=
scale
;
}
void
dump_cache
()
{
write_event_params
();
write_event_particles
();
cache
.
clear
();
}
void
write_event_params
()
{
auto
events
=
file
.
getGroup
(
"event"
);
// choose arbitrary dataset to find size
const
auto
dataset
=
events
.
getDataSet
(
"nparticles"
);
const
size_t
size
=
dataset
.
getSpace
().
getDimensions
().
front
();
resize_event_group
(
size
+
cache
.
nparticles
.
size
());
// NOLINTNEXTLINE
#define WRITE_FROM_CACHE(GROUP, PROPERTY) \
GROUP.getDataSet(#PROPERTY).select({size}, {cache.PROPERTY.size()}).write(cache.PROPERTY)
WRITE_FROM_CACHE
(
events
,
nparticles
);
WRITE_FROM_CACHE
(
events
,
start
);
WRITE_FROM_CACHE
(
events
,
pid
);
WRITE_FROM_CACHE
(
events
,
weight
);
WRITE_FROM_CACHE
(
events
,
scale
);
WRITE_FROM_CACHE
(
events
,
fscale
);
WRITE_FROM_CACHE
(
events
,
rscale
);
WRITE_FROM_CACHE
(
events
,
aqed
);
WRITE_FROM_CACHE
(
events
,
aqcd
);
WRITE_FROM_CACHE
(
events
,
trials
);
WRITE_FROM_CACHE
(
events
,
npLO
);
WRITE_FROM_CACHE
(
events
,
npNLO
);
}
void
write_event_particles
()
{
auto
particles
=
file
.
getGroup
(
"particle"
);
// choose arbitrary dataset to find size
const
auto
dataset
=
particles
.
getDataSet
(
"id"
);
const
size_t
size
=
dataset
.
getSpace
().
getDimensions
().
front
();
resize_particle_group
(
size
+
cache
.
id
.
size
());
WRITE_FROM_CACHE
(
particles
,
id
);
WRITE_FROM_CACHE
(
particles
,
status
);
WRITE_FROM_CACHE
(
particles
,
lifetime
);
WRITE_FROM_CACHE
(
particles
,
spin
);
WRITE_FROM_CACHE
(
particles
,
mother1
);
WRITE_FROM_CACHE
(
particles
,
mother2
);
WRITE_FROM_CACHE
(
particles
,
color1
);
WRITE_FROM_CACHE
(
particles
,
color2
);
WRITE_FROM_CACHE
(
particles
,
px
);
WRITE_FROM_CACHE
(
particles
,
py
);
WRITE_FROM_CACHE
(
particles
,
pz
);
WRITE_FROM_CACHE
(
particles
,
e
);
WRITE_FROM_CACHE
(
particles
,
m
);
}
#undef WRITE_FROM_CACHE
void
finish
()
override
{
if
(
finished
())
throw
std
::
ios_base
::
failure
(
"HDF5Writer writer already finished."
);
EventWriter
::
finish
();
dump_cache
();
auto
proc_info
=
file
.
getGroup
(
"procInfo"
);
for
(
auto
&
xs
:
heprup
.
XSECUP
)
{
xs
*=
xs_scale
;
}
write_dataset
(
proc_info
,
"xSection"
,
heprup
.
XSECUP
);
for
(
auto
&
xs_err
:
heprup
.
XERRUP
)
{
xs_err
=
xs_scale
*
std
::
sqrt
(
xs_err
);
}
write_dataset
(
proc_info
,
"error"
,
heprup
.
XERRUP
);
write_dataset
(
proc_info
,
"unitWeight"
,
heprup
.
XMAXUP
);
file
.
flush
();
}
~
HDF5WriterImpl
()
override
{
finish_or_abort
(
this
,
"HDF5Writer"
);
}
};
HDF5Writer
::
HDF5Writer
(
std
::
string
const
&
filename
,
LHEF
::
HEPRUP
heprup
)
:
HDF5Writer
::
HDF5Writer
{
filename
,
heprup
,
""
}
{}
HDF5Writer
::
HDF5Writer
(
std
::
string
const
&
filename
,
LHEF
::
HEPRUP
heprup
,
std
::
string_view
config
)
:
impl_
{
std
::
make_unique
<
HDF5Writer
::
HDF5WriterImpl
>
(
filename
,
std
::
move
(
heprup
),
config
)
}
{}
void
HDF5Writer
::
write
(
Event
const
&
ev
){
impl_
->
write
(
ev
);
}
void
HDF5Writer
::
set_xs_scale
(
const
double
scale
)
{
impl_
->
set_xs_scale
(
scale
);
}
void
HDF5Writer
::
finish
(){
impl_
->
finish
();
}
}
// namespace HEJ
#else
// no HDF5 support
namespace
HEJ
{
struct
HDF5Writer
::
HDF5WriterImpl
{};
HDF5Writer
::
HDF5Writer
(
std
::
string
const
&
filename
,
LHEF
::
HEPRUP
heprup
)
:
HDF5Writer
::
HDF5Writer
{
filename
,
heprup
,
""
}
{}
HDF5Writer
::
HDF5Writer
(
std
::
string
const
&
/*file*/
,
LHEF
::
HEPRUP
/*heprup*/
,
std
::
string_view
/* config */
){
throw
std
::
invalid_argument
{
"Failed to create HDF5 writer: "
"HEJ 2 was built without HDF5 support"
};
}
void
HDF5Writer
::
write
(
Event
const
&
/*ev*/
){
assert
(
false
);
}
void
HDF5Writer
::
set_xs_scale
(
double
/* scale */
)
{
assert
(
false
);
}
void
HDF5Writer
::
finish
(){
assert
(
false
);
}
}
#endif
namespace
HEJ
{
HDF5Writer
::~
HDF5Writer
()
=
default
;
}
File Metadata
Details
Attached
Mime Type
text/x-c
Expires
Tue, Sep 30, 6:15 AM (2 h, 37 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
6400791
Default Alt Text
HDF5Writer.cc (15 KB)
Attached To
Mode
rHEJ HEJ
Attached
Detach File
Event Timeline
Log In to Comment