1 /* 2 * Copyright (C) 2020 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_DEVICE_H 18 #define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_DEVICE_H 19 20 #include <android/hardware/neuralnetworks/1.3/IDevice.h> 21 #include <nnapi/IBuffer.h> 22 #include <nnapi/IDevice.h> 23 #include <nnapi/OperandTypes.h> 24 #include <nnapi/Result.h> 25 #include <nnapi/Types.h> 26 #include <nnapi/hal/CommonUtils.h> 27 #include <nnapi/hal/ProtectCallback.h> 28 29 #include <functional> 30 #include <memory> 31 #include <optional> 32 #include <string> 33 #include <vector> 34 35 // See hardware/interfaces/neuralnetworks/utils/README.md for more information on HIDL interface 36 // lifetimes across processes and for protecting asynchronous calls across HIDL. 37 38 namespace android::hardware::neuralnetworks::V1_3::utils { 39 40 // Class that adapts V1_3::IDevice to nn::IDevice. 41 class Device final : public nn::IDevice { 42 struct PrivateConstructorTag {}; 43 44 public: 45 static nn::GeneralResult<std::shared_ptr<const Device>> create(std::string name, 46 sp<V1_3::IDevice> device); 47 48 Device(PrivateConstructorTag tag, std::string name, std::string versionString, 49 nn::DeviceType deviceType, std::vector<nn::Extension> extensions, 50 nn::Capabilities capabilities, std::pair<uint32_t, uint32_t> numberOfCacheFilesNeeded, 51 sp<V1_3::IDevice> device, hal::utils::DeathHandler deathHandler); 52 53 const std::string& getName() const override; 54 const std::string& getVersionString() const override; 55 nn::Version getFeatureLevel() const override; 56 nn::DeviceType getType() const override; 57 const std::vector<nn::Extension>& getSupportedExtensions() const override; 58 const nn::Capabilities& getCapabilities() const override; 59 std::pair<uint32_t, uint32_t> getNumberOfCacheFilesNeeded() const override; 60 61 nn::GeneralResult<void> wait() const override; 62 63 nn::GeneralResult<std::vector<bool>> getSupportedOperations( 64 const nn::Model& model) const override; 65 66 nn::GeneralResult<nn::SharedPreparedModel> prepareModel( 67 const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority, 68 nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache, 69 const std::vector<nn::SharedHandle>& dataCache, 70 const nn::CacheToken& token) const override; 71 72 nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache( 73 nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache, 74 const std::vector<nn::SharedHandle>& dataCache, 75 const nn::CacheToken& token) const override; 76 77 nn::GeneralResult<nn::SharedBuffer> allocate( 78 const nn::BufferDesc& desc, const std::vector<nn::SharedPreparedModel>& preparedModels, 79 const std::vector<nn::BufferRole>& inputRoles, 80 const std::vector<nn::BufferRole>& outputRoles) const override; 81 82 private: 83 const std::string kName; 84 const std::string kVersionString; 85 const nn::DeviceType kDeviceType; 86 const std::vector<nn::Extension> kExtensions; 87 const nn::Capabilities kCapabilities; 88 const std::pair<uint32_t, uint32_t> kNumberOfCacheFilesNeeded; 89 const sp<V1_3::IDevice> kDevice; 90 const hal::utils::DeathHandler kDeathHandler; 91 }; 92 93 } // namespace android::hardware::neuralnetworks::V1_3::utils 94 95 #endif // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_1_3_UTILS_DEVICE_H 96