aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Analysis/NoInferenceModelRunner.cpp
blob: 7178120ebe4fc9c9467b75de8379269ebf52bc48 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
//===- NoInferenceModelRunner.cpp - noop ML model runner   ----------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// A pseudo model runner. We use it to store feature values when collecting
// logs for the default policy, in 'development' mode, but never ask it to
// 'run'.
//===----------------------------------------------------------------------===//
#include "llvm/Config/config.h"
#if defined(LLVM_HAVE_TF_API)

#include "llvm/Analysis/NoInferenceModelRunner.h"
#include "llvm/Analysis/Utils/TFUtils.h"

using namespace llvm;

NoInferenceModelRunner::NoInferenceModelRunner(
    LLVMContext &Ctx, const std::vector<TensorSpec> &Inputs)
    : MLModelRunner(Ctx, MLModelRunner::Kind::NoOp) {
  ValuesBuffer.reserve(Inputs.size());
  for (const auto &TS : Inputs)
    ValuesBuffer.push_back(std::make_unique<char[]>(TS.getElementCount() *
                                                    TS.getElementByteSize()));
}

void *NoInferenceModelRunner::getTensorUntyped(size_t Index) {
  return ValuesBuffer[Index].get();
}
#endif // defined(LLVM_HAVE_TF_API)