Geant4 Cross Reference

Cross-Referencing   Geant4
Geant4/examples/extended/parameterisations/Par04/include/Par04OnnxInference.hh

Version: [ ReleaseNotes ] [ 1.0 ] [ 1.1 ] [ 2.0 ] [ 3.0 ] [ 3.1 ] [ 3.2 ] [ 4.0 ] [ 4.0.p1 ] [ 4.0.p2 ] [ 4.1 ] [ 4.1.p1 ] [ 5.0 ] [ 5.0.p1 ] [ 5.1 ] [ 5.1.p1 ] [ 5.2 ] [ 5.2.p1 ] [ 5.2.p2 ] [ 6.0 ] [ 6.0.p1 ] [ 6.1 ] [ 6.2 ] [ 6.2.p1 ] [ 6.2.p2 ] [ 7.0 ] [ 7.0.p1 ] [ 7.1 ] [ 7.1.p1 ] [ 8.0 ] [ 8.0.p1 ] [ 8.1 ] [ 8.1.p1 ] [ 8.1.p2 ] [ 8.2 ] [ 8.2.p1 ] [ 8.3 ] [ 8.3.p1 ] [ 8.3.p2 ] [ 9.0 ] [ 9.0.p1 ] [ 9.0.p2 ] [ 9.1 ] [ 9.1.p1 ] [ 9.1.p2 ] [ 9.1.p3 ] [ 9.2 ] [ 9.2.p1 ] [ 9.2.p2 ] [ 9.2.p3 ] [ 9.2.p4 ] [ 9.3 ] [ 9.3.p1 ] [ 9.3.p2 ] [ 9.4 ] [ 9.4.p1 ] [ 9.4.p2 ] [ 9.4.p3 ] [ 9.4.p4 ] [ 9.5 ] [ 9.5.p1 ] [ 9.5.p2 ] [ 9.6 ] [ 9.6.p1 ] [ 9.6.p2 ] [ 9.6.p3 ] [ 9.6.p4 ] [ 10.0 ] [ 10.0.p1 ] [ 10.0.p2 ] [ 10.0.p3 ] [ 10.0.p4 ] [ 10.1 ] [ 10.1.p1 ] [ 10.1.p2 ] [ 10.1.p3 ] [ 10.2 ] [ 10.2.p1 ] [ 10.2.p2 ] [ 10.2.p3 ] [ 10.3 ] [ 10.3.p1 ] [ 10.3.p2 ] [ 10.3.p3 ] [ 10.4 ] [ 10.4.p1 ] [ 10.4.p2 ] [ 10.4.p3 ] [ 10.5 ] [ 10.5.p1 ] [ 10.6 ] [ 10.6.p1 ] [ 10.6.p2 ] [ 10.6.p3 ] [ 10.7 ] [ 10.7.p1 ] [ 10.7.p2 ] [ 10.7.p3 ] [ 10.7.p4 ] [ 11.0 ] [ 11.0.p1 ] [ 11.0.p2 ] [ 11.0.p3, ] [ 11.0.p4 ] [ 11.1 ] [ 11.1.1 ] [ 11.1.2 ] [ 11.1.3 ] [ 11.2 ] [ 11.2.1 ] [ 11.2.2 ] [ 11.3.0 ]

  1 //
  2 // ********************************************************************
  3 // * License and Disclaimer                                           *
  4 // *                                                                  *
  5 // * The  Geant4 software  is  copyright of the Copyright Holders  of *
  6 // * the Geant4 Collaboration.  It is provided  under  the terms  and *
  7 // * conditions of the Geant4 Software License,  included in the file *
  8 // * LICENSE and available at  http://cern.ch/geant4/license .  These *
  9 // * include a list of copyright holders.                             *
 10 // *                                                                  *
 11 // * Neither the authors of this software system, nor their employing *
 12 // * institutes,nor the agencies providing financial support for this *
 13 // * work  make  any representation or  warranty, express or implied, *
 14 // * regarding  this  software system or assume any liability for its *
 15 // * use.  Please see the license in the file  LICENSE  and URL above *
 16 // * for the full disclaimer and the limitation of liability.         *
 17 // *                                                                  *
 18 // * This  code  implementation is the result of  the  scientific and *
 19 // * technical work of the GEANT4 collaboration.                      *
 20 // * By using,  copying,  modifying or  distributing the software (or *
 21 // * any work based  on the software)  you  agree  to acknowledge its *
 22 // * use  in  resulting  scientific  publications,  and indicate your *
 23 // * acceptance of all terms of the Geant4 Software license.          *
 24 // ********************************************************************
 25 //
 26 
 27 #ifdef USE_INFERENCE_ONNX
 28 #  ifndef PAR04ONNXINFERENCE_HH
 29 #    define PAR04ONNXINFERENCE_HH
 30 #    include "Par04InferenceInterface.hh"  // for Par04InferenceInterface
 31 #    include "core/session/onnxruntime_cxx_api.h"  // for Env, Session, SessionO...
 32 
 33 #    include <G4String.hh>  // for G4String
 34 #    include <G4Types.hh>  // for G4int, G4double
 35 #    include <memory>  // for unique_ptr
 36 #    include <vector>  // for vector
 37 
 38 #    include <core/session/onnxruntime_c_api.h>  // for OrtMemoryInfo
 39 
 40 /**
 41  * @brief Inference using the ONNX runtime.
 42  *
 43  * Creates an enviroment whcih manages an internal thread pool and creates an
 44  * inference session for the model saved as an ONNX file.
 45  * Runs the inference in the session using the input vector from Par04InferenceSetup.
 46  *
 47  **/
 48 
 49 class Par04OnnxInference : public Par04InferenceInterface
 50 {
 51   public:
 52     Par04OnnxInference(G4String, G4int, G4int, G4int,
 53                        G4int,  // For Execution Provider Runtime Flags (for now only CUDA)
 54                        std::vector<const char*>& cuda_keys, std::vector<const char*>& cuda_values,
 55                        G4String, G4String);
 56 
 57     Par04OnnxInference();
 58 
 59     /// Run inference
 60     /// @param[in] aGenVector Input latent space and conditions
 61     /// @param[out] aEnergies Model output = generated shower energies
 62     /// @param[in] aSize Size of the output
 63     void RunInference(std::vector<float> aGenVector, std::vector<G4double>& aEnergies, int aSize);
 64 
 65   private:
 66     /// Pointer to the ONNX enviroment
 67     std::unique_ptr<Ort::Env> fEnv;
 68     /// Pointer to the ONNX inference session
 69     std::unique_ptr<Ort::Session> fSession;
 70     /// ONNX settings
 71     Ort::SessionOptions fSessionOptions;
 72     /// ONNX memory info
 73     const OrtMemoryInfo* fInfo;
 74     struct MemoryInfo;
 75     /// the input names represent the names given to the model
 76     /// when defining  the model's architecture (if applicable)
 77     /// they can also be retrieved from model.summary()
 78     std::vector<const char*> fInames;
 79 };
 80 
 81 #  endif /* PAR04ONNXINFERENCE_HH */
 82 #endif
 83