17import graph_nets
as gn
20from graph_nets
import utils_tf
25snd =
np.array([1, 2, 3, 4, 2, 3, 4, 3, 4, 4, 0, 0, 0, 0, 1, 1, 1, 2, 2, 3], dtype=
"int32")
26rec =
np.array([0, 0, 0, 0, 1, 1, 1, 2, 2, 3, 1, 2, 3, 4, 2, 3, 4, 3, 4, 4], dtype=
"int32")
36def get_graph_data_dict(num_nodes, num_edges, NODE_FEATURE_SIZE=2, EDGE_FEATURE_SIZE=2, GLOBAL_FEATURE_SIZE=1):
50 snt.nets.MLP([LATENT_SIZE] * NUM_LAYERS, activate_final=
True),
51 snt.LayerNorm(axis=-1, create_offset=
True, create_scale=
True),
58 def __init__(self, name="MLPGraphIndependent"):
59 super(MLPGraphIndependent, self).__init__(name=name)
61 edge_model_fn=
lambda:
snt.nets.MLP([LATENT_SIZE] * NUM_LAYERS, activate_final=
True),
62 node_model_fn=
lambda:
snt.nets.MLP([LATENT_SIZE] * NUM_LAYERS, activate_final=
True),
63 global_model_fn=
lambda:
snt.nets.MLP([LATENT_SIZE] * NUM_LAYERS, activate_final=
True),
67 return self._network(inputs)
72 def __init__(self, name="MLPGraphNetwork"):
73 super(MLPGraphNetwork, self).__init__(name=name)
75 edge_model_fn=make_mlp_model, node_model_fn=make_mlp_model, global_model_fn=make_mlp_model
79 return self._network(inputs)
85 def __init__(self, name="EncodeProcessDecode"):
86 super(EncodeProcessDecode, self).__init__(name=name)
92 def __call__(self, input_op, num_processing_steps):
93 latent = self._encoder(input_op)
96 for _
in range(num_processing_steps):
98 latent = self._core(core_input)
99 decoded_op = self._decoder(latent)
115CoreGraphData =
get_graph_data_dict(num_nodes, num_edges, 2 * LATENT_SIZE, 2 * LATENT_SIZE, 2 * LATENT_SIZE)
119DecodeGraphData =
get_graph_data_dict(num_nodes, num_edges, LATENT_SIZE, LATENT_SIZE, LATENT_SIZE)
122output_gn =
ep_model(input_graph_data, processing_steps)
153gen_code =
'''#pragma cling optimize(2)
154#include "gnn_encoder.hxx"
155#include "gnn_core.hxx"
156#include "gnn_decoder.hxx"
157#include "gnn_output_transform.hxx"'''
201 def infer(self, graphData):
206 self.encoder_session.
infer(input_data)
210 for _
in range(processing_steps):
212 self.core_session.
infer(core_input)
214 self.decoder_session.
infer(core_input)
215 self.output_transform_session.
infer(core_input)
230for i
in range(0, numevts):
237for i
in range(0, numevts):
238 graphData = dataSet[i]
245 output_gn =
ep_model(inputGraphData, processing_steps)
250hG =
ROOT.TH1D(
"hG",
"Result from graphnet", 20, 1, 0)
251for i
in range(0, numevts):
257print(
"elapsed time for ", numevts,
"events = ", end - start)
261for i
in range(0, numevts):
262 graphData = dataSet[i]
272print(
"time to convert data to SOFIE format", endSC - end)
274hS =
ROOT.TH1D(
"hS",
"Result from SOFIE", 20, 1, 0)
278print(
"time to create SOFIE GNN class", start - start0)
279for i
in range(0, numevts):
286print(
"elapsed time for ", numevts,
"events = ", end - start)
297hDe =
ROOT.TH1D(
"hDe",
"Difference for edge data", 40, 1, 0)
298hDn =
ROOT.TH1D(
"hDn",
"Difference for node data", 40, 1, 0)
299hDg =
ROOT.TH1D(
"hDg",
"Difference for global data", 40, 1, 0)
301for i
in range(0, numevts):
310 hDe.Fill(edgesG[j, k] - edgesS[j, k])
316 hDn.Fill(nodesG[j, k] - nodesS[j, k])
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.