Snap for 8730993 from 326aa670f812a052a94f1a7d3ae4266950f49ed1 to mainline-tzdata3-release

Change-Id: I3f2b76cb805995c3af1c88ab991917702c94357a
diff --git a/Android.bp b/Android.bp
index 8ed3f9d..827ddfa 100644
--- a/Android.bp
+++ b/Android.bp
@@ -29,20 +29,3 @@
         "LICENSE",
     ],
 }
-
-java_library {
-    name: "NeuralNetworksApiBenchmark_Lib",
-    installable: true,
-    sdk_version: "28",
-    srcs: [
-        "src/com/android/nn/benchmark/core/**/*.java",
-        "src/com/android/nn/benchmark/evaluators/**/*.java",
-        "src/com/android/nn/benchmark/imageprocessors/**/*.java",
-        "src/com/android/nn/benchmark/util/**/*.java",
-    ],
-    static_libs: [
-      "androidx.test.rules",
-      "android.test.runner.stubs",
-      "android.test.base.stubs",
-    ]
-}
diff --git a/Android.mk b/Android.mk
index 9d19497..996cdfb 100644
--- a/Android.mk
+++ b/Android.mk
@@ -23,19 +23,8 @@
 LOCAL_MODULE_TAGS := tests
 LOCAL_COMPATIBILITY_SUITE += device-tests
 
-# List of NNAPI SL libraries for different chipsets, which are determined by SL release notes.
-QC_SM8350_NNAPI_SL_LIBS := libnnapi_sl_driver libQnnGpu libQnnHtp libQnnHtpPrepare libQnnHtpV68Skel libQnnHtpV68Stub libUnnhalAccGpu libUnnhalAccHtp
-QC_SM8450_NNAPI_SL_LIBS := libnnapi_sl_driver libQnnGpu libQnnHtp libQnnHtpPrepare libQnnHtpV69Skel libQnnHtpV69Stub libUnnhalAccGpu libUnnhalAccHtp
-
-# Set the set of SL libraries to use in this test. By default including all
-# chipsets, you can override this variable with a chipset specific one (see
-# QC_SMxxxx_NNAPI_SL_LIBS) test a more realistic NNAPI SL distribution scenario.
-ifeq ($(strip $(SL_LIBS)),)
-	SL_LIBS := $(patsubst $(LOCAL_PATH)/sl_prebuilt/%.so,%,$(wildcard $(LOCAL_PATH)/sl_prebuilt/*.so))
-endif
-
 LOCAL_SRC_FILES := $(call all-java-files-under, src/com/android/nn/benchmark)
-LOCAL_JNI_SHARED_LIBRARIES := libnnbenchmark_jni libsupport_library_jni $(SL_LIBS)
+LOCAL_JNI_SHARED_LIBRARIES := libnnbenchmark_jni
 
 # need fread_unlocked in version 28
 LOCAL_SDK_VERSION := 28
@@ -46,15 +35,6 @@
 LOCAL_ASSET_DIR += $(GOOGLE_TEST_MODELS_DIR)
 endif
 
-# This folder contains metadata describing the SL library distribution.
-# Currently the only one is a file sl_prebuilt_filelist.txt with the list of libraries
-# in this SL. The file is generated by the build_and_run_benchmark.sh script
-# and is used to know the list of files to extract if the user
-SL_PREBUILT_METADATA_DIR := $(LOCAL_PATH)/sl_prebuilt/assets
-ifneq ($(wildcard $(SL_PREBUILT_METADATA_DIR)),)
-LOCAL_ASSET_DIR += $(SL_PREBUILT_METADATA_DIR)
-endif
-
 LOCAL_PACKAGE_NAME := NeuralNetworksApiBenchmark
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 SPDX-license-identifier-MIT
 LOCAL_LICENSE_CONDITIONS := notice
@@ -70,7 +50,7 @@
 LOCAL_COMPATIBILITY_SUITE += device-tests
 
 LOCAL_SRC_FILES := $(call all-java-files-under, src)
-LOCAL_JNI_SHARED_LIBRARIES := libnnbenchmark_jni libsupport_library_jni librandom_graph_test_jni $(SL_LIBS)
+LOCAL_JNI_SHARED_LIBRARIES := libnnbenchmark_jni librandom_graph_test_jni
 
 # need fread_unlocked in version 28
 LOCAL_SDK_VERSION := 28
@@ -81,15 +61,23 @@
 LOCAL_ASSET_DIR += $(GOOGLE_TEST_MODELS_DIR)
 endif
 
-SL_PREBUILT_METADATA_DIR := $(LOCAL_PATH)/sl_prebuilt/assets
-ifneq ($(wildcard $(SL_PREBUILT_METADATA_DIR)),)
-LOCAL_ASSET_DIR += $(SL_PREBUILT_METADATA_DIR)
-endif
-
 LOCAL_PACKAGE_NAME := NeuralNetworksApiCrashTest
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 SPDX-license-identifier-MIT
 LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_NOTICE_FILE := $(LOCAL_PATH)/LICENSE
 include $(BUILD_PACKAGE)
 
+include $(CLEAR_VARS)
+LOCAL_SDK_VERSION := 28
+LOCAL_SRC_FILES := $(call all-java-files-under, src/com/android/nn/benchmark/core) \
+    $(call all-java-files-under, src/com/android/nn/benchmark/evaluators) \
+    $(call all-java-files-under, src/com/android/nn/benchmark/imageprocessors) \
+    $(call all-java-files-under, src/com/android/nn/benchmark/util)
+LOCAL_JNI_SHARED_LIBRARIES := libnnbenchmark_jni
+LOCAL_MODULE := NeuralNetworksApiBenchmark_Lib
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := $(LOCAL_PATH)/LICENSE
+include $(BUILD_JAVA_LIBRARY)
+
 include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/AndroidManifest.xml b/AndroidManifest.xml
index 4295a3e..46e0388 100644
--- a/AndroidManifest.xml
+++ b/AndroidManifest.xml
@@ -24,8 +24,7 @@
     <application android:label="NeuralNetworksBenchmark"
                  android:name=".BenchmarkApplication"
                  android:hardwareAccelerated="true"
-                 android:largeHeap="true"
-                 android:debuggable="true">
+                 android:largeHeap="true">
         <uses-library android:name="android.test.runner" android:required="false"/>
         <uses-library android:name="android.test.base" android:required="false"/>
         <activity android:name=".NNBenchmark">
diff --git a/README.txt b/README.txt
index 0591b5f..f9e2c95 100644
--- a/README.txt
+++ b/README.txt
@@ -74,24 +74,4 @@
 * inference-random-stress: test running a large set of randomly generated models
 
 * performance-degradation-stress: verifies that accelerator inference speed is not degrading over
-a certain threshold when running concurrent workload
-
-# Testing a NNAPI Support Library implementation
-
-All tests documented above can be run using a NNAPI Support Library implementation.
-To do so you need to:
-
-- copy all the shared objects part of the libraries under the `sl_prebuilt`
-  folder
-- Use the `-s` or `--use-nnapi-sl` option when running `build_and_run_benchmark.sh`.
-
-By default the system will use the sl_prebuilt/Android.bp.template to map
-every library under sl_prebuilt to a native library to include in the APK.
-The file is already configured for the Qualcomm NNAPI SL binaries.
-If you have different libraries that the ones defined under sl_prebuilt/Android.bp.template
-you should
-
-- configure a sl_prebuilt/Android.bp with the list of binaries you added.
-  You can use the sl_prebuilt/Android.bp.template file as an example template.
-
-- Set in in Android.mk the SL_LIBS variable with the list of drivers
+a certain threshold when running concurrent workload
\ No newline at end of file
diff --git a/build_and_run_benchmark.sh b/build_and_run_benchmark.sh
index 4e889ff..c6be04b 100755
--- a/build_and_run_benchmark.sh
+++ b/build_and_run_benchmark.sh
@@ -7,9 +7,9 @@
 # which is not logged.
 
 if [[ "$OSTYPE" == "darwin"* ]]; then
-  OPTS="$(getopt f:rbsm:x -- "$*")"
+  OPTS="$(getopt f:rb -- "$*")"
 else
-  OPTS="$(getopt -o f:rbsm:x -l filter-driver:,include-nnapi-reference,nnapi-reference-only,skip-build,use-nnapi-sl,filter-model,extract-nnapi-sl -- "$@")"
+  OPTS="$(getopt -o f:rb -l filter-driver:,include-nnapi-reference,nnapi-reference-only,skip-build -- "$@")"
 fi
 
 if [ $? -ne 0 ]; then
@@ -18,17 +18,11 @@
     echo " -f <regex> : to run crash tests only on the drivers (ignoring nnapi-reference) matching the specified regular expression"
     echo " -r : to include nnapi-reference in target drivers"
     echo " -b : skip build and installation of tests"
-    echo " -s : use NNAPI Support Library drivers (embedded in the benchmark APK unless -x is specified)"
-    echo " -x : extract NNAPI Support Library drivers from the APK"
-    echo " -m <regex> : to filter the models used in the tests"
   else
     echo " -f <regex> | --filter-driver <regex> : to run crash tests only on the drivers (ignoring nnapi-reference) matching the specified regular expression"
     echo " -r | --include-nnapi-reference : to include nnapi-reference in target drivers"
     echo " --nnapi-reference-only : to run tests only vs nnapi-reference"
     echo " -b | --skip-build : skip build and installation of tests"
-    echo " -s | --use-nnapi-sl : use NNAPI Support Library drivers (embedded in the benchmark APK unless -x is specified)"
-    echo " -x | --extract-nnapi-sl : extract NNAPI Support Library drivers from the APK"
-    echo " -m <regex> : to filter the models used in the tests"
   fi
   exit
 fi
@@ -38,8 +32,6 @@
 DRIVER_FILTER_OPT=""
 INCLUDE_NNAPI_REF_OPT=""
 BUILD_AND_INSTALL=true
-NNAPI_SL_FILTER_OPT=""
-MODEL_FILTER_OPT=""
 while [ $# -gt 0 ] ; do
   case "$1" in
     -f|--filter-driver)
@@ -55,26 +47,10 @@
       INCLUDE_NNAPI_REF_OPT="-e nnCrashtestIncludeNnapiReference true"
       shift
       ;;
-    -m|--filter-model)
-      MODEL_FILTER_OPT="-e nnBenchmarkModelFilter $2"
-      shift 2
-      ;;
     -b|--skip-build)
       BUILD_AND_INSTALL=false
       shift
       ;;
-    -s|--use-nnapi-sl)
-      NNAPI_SL_FILTER_OPT+=" -e useNnApiSupportLibrary true"
-      shift
-      ;;
-    -x|--extract-nnapi-sl)
-      NNAPI_SL_FILTER_OPT+=" -e extractNnApiSupportLibrary true"
-      shift
-
-      echo "Creating configuration file with list of libraries"
-      mkdir sl_prebuilt/assets
-      ls sl_prebuilt/ 2>/dev/null | grep '.so'  >sl_prebuilt/assets/sl_prebuilt_filelist.txt
-      ;;
     --)
       shift
       break
@@ -158,22 +134,6 @@
 cd $ANDROID_BUILD_TOP
 
 if [ "$BUILD_AND_INSTALL" = true ]; then
-   if [ ! -z "$NNAPI_SL_FILTER_OPT" ]; then
-    SL_PREBUILT=test/mlts/benchmark/sl_prebuilt
-    if [ ! -n "$(ls -A $SL_PREBUILT/*.so 2>/dev/null)" ]; then
-      echo "There is no NNAPI SL binary file under $ANDROID_BUILD_TOP/$SL_PREBUILT, cannot test using NNAPI SL"
-      exit
-    fi
-    if [ ! -f "$SL_PREBUILT/Android.bp" ]; then
-      echo "================================================================"
-      echo "Enabling build of NNAPI SL libraries using template definition."
-      echo  "If the definitions in $SL_PREBUILT/Android.bp don't match the libraries you copied"
-      echo " please define your own version of $SL_PREBUILT/Android.bp"
-      echo "================================================================"
-      mv $SL_PREBUILT/Android.bp.template $SL_PREBUILT/Android.bp
-    fi
-  fi
-
   # Build and install benchmark app
   TMPFILE=$(mktemp)
   build/soong/soong_ui.bash --make-mode ${APP} 2>&1 | tee ${TMPFILE}
@@ -183,16 +143,6 @@
   else
       APK_DIR=${TARGET_ARCH}
   fi
-
-  if [ ! -z "$NNAPI_SL_FILTER_OPT" ]; then
-    if [ "$(unzip -l $OUT/testcases/${APP}/${APK_DIR}/${APP}.apk | grep libnnapi_sl_driver | wc -l)" -ne 1 ]; then
-      echo "NNAPI SL Libraries are not included in the APK" \
-          "please check the library list is included in the LOCAL_JNI_SHARED_LIBRARIES list " \
-          "for ${APP}. Please check the value of SL_LIBS in Android.mk"
-      exit
-    fi
-  fi
-
   if ! adb install -r $OUT/testcases/${APP}/${APK_DIR}/${APP}.apk; then
     adb uninstall com.android.nn.benchmark.app
     adb install -r $OUT/testcases/${APP}/${APK_DIR}/${APP}.apk
@@ -217,7 +167,7 @@
 
 # Pass --no-isolated-storage to am instrument?
 BUILD_VERSION_RELEASE=`adb shell getprop ro.build.version.release`
-AM_INSTRUMENT_FLAGS="$DRIVER_FILTER_OPT $INCLUDE_NNAPI_REF_OPT $NNAPI_SL_FILTER_OPT $MODEL_FILTER_OPT"
+AM_INSTRUMENT_FLAGS="$DRIVER_FILTER_OPT $INCLUDE_NNAPI_REF_OPT"
 if [[ $BUILD_VERSION_RELEASE == "Q" ]]; then
   AM_INSTRUMENT_FLAGS+=" --no-isolated-storage"
 fi
diff --git a/crashtest/Android.mk b/crashtest/Android.mk
index 29aa86e..79689d8 100644
--- a/crashtest/Android.mk
+++ b/crashtest/Android.mk
@@ -30,9 +30,9 @@
     $(call all-java-files-under, ../src/com/android/nn/benchmark/imageprocessors) \
     $(call all-java-files-under, ../src/com/android/nn/benchmark/util) \
     $(call all-java-files-under, ../src/com/android/nn/crashtest/core) \
-    ../src/com/android/nn/benchmark/app/AcceleratorSpecificTestSupport.java
+    ../src/com/android/nn/crashtest/app/AcceleratorSpecificTestSupport.java
 
-LOCAL_JNI_SHARED_LIBRARIES := libnnbenchmark_jni libsupport_library_jni
+LOCAL_JNI_SHARED_LIBRARIES := libnnbenchmark_jni
 
 LOCAL_SDK_VERSION := 27
 LOCAL_ASSET_DIR := $(LOCAL_PATH)/../../models/assets
diff --git a/crashtest/AndroidManifest.xml b/crashtest/AndroidManifest.xml
index 88890f5..e002d95 100644
--- a/crashtest/AndroidManifest.xml
+++ b/crashtest/AndroidManifest.xml
@@ -10,7 +10,6 @@
       android:label="@string/app_name"
       android:roundIcon="@mipmap/ic_launcher_round"
       android:supportsRtl="true"
-      android:debuggable="true"
       android:theme="@style/AppTheme">
     <activity android:name="com.android.nn.crashtest.MainActivity">
       <intent-filter>
@@ -24,12 +23,6 @@
         android:process=":CrashTest" />
     <service android:name="com.android.nn.crashtest.core.InProcessCrashTestService"
         android:process=".CrashTest" />
-    <!--provider entry is a workaround for b/195309961-->
-    <provider
-            xmlns:tools="http://schemas.android.com/tools"
-            android:name="androidx.startup.InitializationProvider"
-            android:authorities="com.google.android.storagemanager.androidx-startup"
-            tools:replace="android:authorities" />
   </application>
 
   <uses-sdk android:minSdkVersion="27" />
diff --git a/crashtest/src/com/android/nn/crashtest/MainActivity.java b/crashtest/src/com/android/nn/crashtest/MainActivity.java
index d4a9d82..d6f5e9c 100644
--- a/crashtest/src/com/android/nn/crashtest/MainActivity.java
+++ b/crashtest/src/com/android/nn/crashtest/MainActivity.java
@@ -275,16 +275,12 @@
         final String acceleratorName = mAcceleratorName.get();
         final boolean mmapModel = mMmapModel.isChecked();
         final boolean runModelCompilationOnly = mCompileModelsOnly.isChecked();
-        final boolean useNnapiSl = NNTestBase.shouldUseNnApiSupportLibrary();
-        final boolean extractNnapiSl = NNTestBase.shouldExtractNnApiSupportLibrary();
         coordinator.startTest(RunModelsInParallel.class,
-            RunModelsInParallel.intentInitializer(testList, threadCount,
-                Duration.ofMinutes(testDurationMinutes),
-                testName, acceleratorName, false,
-                runModelCompilationOnly, mmapModel, TestModels.getModelFilterRegex(), useNnapiSl,
-                extractNnapiSl),
-            testCompletionListener,
-            mUseSeparateProcess.get(), testName);
+                RunModelsInParallel.intentInitializer(testList, threadCount,
+                        Duration.ofMinutes(testDurationMinutes),
+                        testName, acceleratorName, false, runModelCompilationOnly, mmapModel),
+                testCompletionListener,
+                mUseSeparateProcess.get(), testName);
 
         mMessage.setText(
                 String.format(
diff --git a/dogfood/Android.mk b/dogfood/Android.mk
index 6cd07d8..52d4633 100644
--- a/dogfood/Android.mk
+++ b/dogfood/Android.mk
@@ -29,7 +29,7 @@
     $(call all-java-files-under, ../src/com/android/nn/benchmark/evaluators) \
     $(call all-java-files-under, ../src/com/android/nn/benchmark/imageprocessors) \
     $(call all-java-files-under, ../src/com/android/nn/benchmark/util)
-LOCAL_JNI_SHARED_LIBRARIES := libnnbenchmark_jni libsupport_library_jni
+LOCAL_JNI_SHARED_LIBRARIES := libnnbenchmark_jni
 
 LOCAL_SDK_VERSION := 28
 LOCAL_ASSET_DIR := $(LOCAL_PATH)/../../models/assets
diff --git a/dogfood/AndroidManifest.xml b/dogfood/AndroidManifest.xml
index c45bda3..5573e52 100644
--- a/dogfood/AndroidManifest.xml
+++ b/dogfood/AndroidManifest.xml
@@ -25,12 +25,6 @@
 	android:permission="android.permission.BIND_JOB_SERVICE" >
 
     </service>
-    <!--provider entry is a workaround for b/195309961-->
-    <provider
-            xmlns:tools="http://schemas.android.com/tools"
-            android:name="androidx.startup.InitializationProvider"
-            android:authorities="com.google.android.storagemanager.androidx-startup"
-            tools:replace="android:authorities" />
   </application>
 
 </manifest>
diff --git a/jni/Android.bp b/jni/Android.bp
index 1dc575c..b9d4914 100644
--- a/jni/Android.bp
+++ b/jni/Android.bp
@@ -25,7 +25,6 @@
 cc_defaults {
     name: "libnnbenchmark_jni_defaults",
     sdk_version: "current",
-    min_sdk_version: "29",
     srcs: [
         "benchmark_jni.cpp",
         "crashtest_jni.cpp",
@@ -97,31 +96,3 @@
         keep_symbols: true,
     },
 }
-
-cc_library {
-    name: "libsupport_library_jni",
-    sdk_version: "current",
-    min_sdk_version: "29",
-    srcs: [
-        "support_library_jni.cpp",
-    ],
-    header_libs: [
-        "flatbuffer_headers",
-        "jni_headers",
-        "tensorflow_headers",
-    ],
-    shared_libs: [
-        "liblog",
-    ],
-    static_libs: [
-        "libtflite_static",
-    ],
-    cflags: [
-        "-Wno-sign-compare",
-        "-Wno-unused-parameter",
-    ],
-    stl: "libc++_static",
-    strip: {
-        keep_symbols: true,
-    },
-}
\ No newline at end of file
diff --git a/jni/benchmark_jni.cpp b/jni/benchmark_jni.cpp
index 344c7c0..21bf2d4 100644
--- a/jni/benchmark_jni.cpp
+++ b/jni/benchmark_jni.cpp
@@ -16,6 +16,8 @@
 
 #include "run_tflite.h"
 
+#include "tensorflow/lite/nnapi/nnapi_implementation.h"
+
 #include <jni.h>
 #include <string>
 #include <iomanip>
@@ -26,6 +28,7 @@
 #include <android/log.h>
 #include <android/sharedmem.h>
 #include <sys/mman.h>
+#include "tensorflow/lite/nnapi/nnapi_implementation.h"
 
 extern "C" JNIEXPORT jboolean JNICALL
 Java_com_android_nn_benchmark_core_NNTestBase_hasNnApiDevice(
@@ -66,8 +69,7 @@
         jboolean _enableIntermediateTensorsDump,
         jstring _nnApiDeviceName,
         jboolean _mmapModel,
-        jstring _nnApiCacheDir,
-        jlong _nnApiSlHandle) {
+        jstring _nnApiCacheDir) {
     const char *modelFileName = env->GetStringUTFChars(_modelFileName, NULL);
     const char *nnApiDeviceName =
         _nnApiDeviceName == NULL
@@ -77,12 +79,10 @@
         _nnApiCacheDir == NULL
             ? NULL
             : env->GetStringUTFChars(_nnApiCacheDir, NULL);
-     const tflite::nnapi::NnApiSupportLibrary *nnApiSlHandle =
-          (const tflite::nnapi::NnApiSupportLibrary *)_nnApiSlHandle;
     int nnapiErrno = 0;
     void *handle = BenchmarkModel::create(
         modelFileName, _tfliteBackend, _enableIntermediateTensorsDump, &nnapiErrno,
-        nnApiDeviceName, _mmapModel, nnApiCacheDir, nnApiSlHandle);
+        nnApiDeviceName, _mmapModel, nnApiCacheDir);
     env->ReleaseStringUTFChars(_modelFileName, modelFileName);
     if (_nnApiDeviceName != NULL) {
         env->ReleaseStringUTFChars(_nnApiDeviceName, nnApiDeviceName);
@@ -101,8 +101,6 @@
     return (jlong)(uintptr_t)handle;
 }
 
-
-
 extern "C"
 JNIEXPORT void
 JNICALL
@@ -501,8 +499,7 @@
     jlong _modelHandle,
     jint maxNumIterations,
     jfloat warmupTimeoutSec,
-    jfloat runTimeoutSec,
-    jboolean useNnapiSl) {
+    jfloat runTimeoutSec) {
   BenchmarkModel* model = reinterpret_cast<BenchmarkModel*>(_modelHandle);
 
   jclass result_class = env->FindClass("com/android/nn/benchmark/core/CompilationBenchmarkResult");
@@ -512,8 +509,7 @@
 
   CompilationBenchmarkResult result;
   bool success =
-          model->benchmarkCompilation(maxNumIterations, warmupTimeoutSec,
-                                      runTimeoutSec, useNnapiSl, &result);
+          model->benchmarkCompilation(maxNumIterations, warmupTimeoutSec, runTimeoutSec, &result);
   if (!success) return nullptr;
 
   // Convert cpp CompilationBenchmarkResult struct to java.
diff --git a/jni/run_tflite.cpp b/jni/run_tflite.cpp
index 360a9ca..dfc3c82 100644
--- a/jni/run_tflite.cpp
+++ b/jni/run_tflite.cpp
@@ -31,7 +31,6 @@
 #include "tensorflow/lite/nnapi/NeuralNetworksTypes.h"
 
 #include "tensorflow/lite/kernels/register.h"
-#include "tensorflow/lite/nnapi/sl/include/SupportLibrary.h"
 
 #define LOG_TAG "NN_BENCHMARK"
 
@@ -69,43 +68,15 @@
 }
 static TraceFunc kTraceFunc{setupTraceFunc()};
 
-// Returns the number of partitions associated, as result of a call to
-// ModifyGraphWithDelegate, to the given delegate.
-int CountPartitionsDelegatedTo(tflite::Subgraph* subgraph,
-                               const TfLiteDelegate* delegate) {
-  return std::count_if(
-      subgraph->nodes_and_registration().begin(),
-      subgraph->nodes_and_registration().end(),
-      [delegate](
-          std::pair<TfLiteNode, TfLiteRegistration> node_and_registration) {
-        return node_and_registration.first.delegate == delegate;
-      });
-}
-
-// Returns the number of partitions associated, as result of a call to
-// ModifyGraphWithDelegate, to the given delegate.
-int CountPartitionsDelegatedTo(tflite::Interpreter* interpreter,
-                               const TfLiteDelegate* delegate) {
-  int result = 0;
-  for (int i = 0; i < interpreter->subgraphs_size(); i++) {
-    tflite::Subgraph* subgraph = interpreter->subgraph(i);
-
-    result += CountPartitionsDelegatedTo(subgraph, delegate);
-  }
-
-  return result;
-}
-
 }  // namespace
 
 BenchmarkModel* BenchmarkModel::create(const char* modelfile, int tfliteBackend,
                                        bool enable_intermediate_tensors_dump, int* nnapiErrno,
                                        const char* nnapi_device_name, bool mmapModel,
-                                       const char* nnapi_cache_dir,
-                                       const tflite::nnapi::NnApiSupportLibrary* nnApiSl) {
+                                       const char* nnapi_cache_dir) {
   BenchmarkModel* model = new BenchmarkModel();
   if (!model->init(modelfile, tfliteBackend, enable_intermediate_tensors_dump, nnapiErrno,
-                   nnapi_device_name, mmapModel, nnapi_cache_dir, nnApiSl)) {
+                   nnapi_device_name, mmapModel, nnapi_cache_dir)) {
     __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Failed to init model %s", modelfile);
     delete model;
     return nullptr;
@@ -116,8 +87,7 @@
 bool BenchmarkModel::init(const char* modelfile, int tfliteBackend,
                           bool enable_intermediate_tensors_dump, int* nnapiErrno,
                           const char* nnapi_device_name, bool mmapModel,
-                          const char* nnapi_cache_dir,
-                          const tflite::nnapi::NnApiSupportLibrary* nnApiSl) {
+                          const char* nnapi_cache_dir) {
   __android_log_print(ANDROID_LOG_INFO, LOG_TAG, "BenchmarkModel %s",
                       modelfile);
   mModelFile = modelfile;
@@ -171,44 +141,18 @@
   switch (mTfliteBackend) {
     case TFLITE_NNAPI: {
       tflite::StatefulNnApiDelegate::Options nnapi_options;
-      nnapi_options.accelerator_name = mNnApiDeviceName.empty() ? nullptr : mNnApiDeviceName.c_str();
-      __android_log_print(ANDROID_LOG_INFO, LOG_TAG,
-          "Delegating to NNAPI device '%s'", mNnApiDeviceName.c_str());
-      if (nnApiSl) {
-        mNnApiSl = nnApiSl;
-        __android_log_print(ANDROID_LOG_INFO, LOG_TAG, "Using NNAPI SL");
-      }
-      mTfliteNnapiDelegate =
-          nnApiSl
-              ? std::make_unique<tflite::StatefulNnApiDelegate>(nnApiSl, nnapi_options)
-              : std::make_unique<tflite::StatefulNnApiDelegate>(nnapi_options);
+      nnapi_options.accelerator_name = nnapi_device_name;
+      mTfliteNnapiDelegate = std::make_unique<tflite::StatefulNnApiDelegate>(nnapi_options);
       int delegationStatus = mTfliteInterpreter->ModifyGraphWithDelegate(mTfliteNnapiDelegate.get());
       *nnapiErrno = mTfliteNnapiDelegate->GetNnApiErrno();
-      if ((delegationStatus == kTfLiteOk) &&
-          (*nnapiErrno == ANEURALNETWORKS_NO_ERROR)) {
-        int nnapiPartitions =
-          CountPartitionsDelegatedTo(mTfliteInterpreter.get(), mTfliteNnapiDelegate.get());
-        if (nnapiPartitions == 0) {
-          __android_log_print(
-              ANDROID_LOG_ERROR, LOG_TAG,
-              "NNAPI Delegate (%s) for model %s was delegated with %d partitions delegated to NNAPI!!",
-              nnapi_device_name, modelfile, nnapiPartitions);
-
-              return false;
-        } else {
-          __android_log_print(
-              ANDROID_LOG_INFO, LOG_TAG,
-              "NNAPI Delegate (%s) for model %s initialized successfully with %d partitions delegated to NNAPI",
-              nnapi_device_name, modelfile, nnapiPartitions);
-        }
-      } else {
+      if (delegationStatus != kTfLiteOk ||
+          *nnapiErrno != ANEURALNETWORKS_NO_ERROR) {
         __android_log_print(
-              ANDROID_LOG_ERROR, LOG_TAG,
-              "Failed to initialize NNAPI Delegate for model %s, nnapi_errno is %d",
-              modelfile, *nnapiErrno);
-          return false;
+            ANDROID_LOG_ERROR, LOG_TAG,
+            "Failed to initialize NNAPI Delegate for model %s, nnapi_errno is %d",
+            modelfile, *nnapiErrno);
+        return false;
       }
-
     } break;
     case TFLITE_GPU: {
 #if defined(NN_BENCHMARK_ENABLE_GPU)
@@ -218,15 +162,6 @@
         __android_log_print(ANDROID_LOG_ERROR, LOG_TAG,
                             "Failed to initialize GPU Delegate");
         return false;
-      } else {
-        int gpuPartitions =
-          CountPartitionsDelegatedTo(mTfliteInterpreter.get(), mGpuDelegate.get());
-        if (gpuPartitions == 0) {
-              ANDROID_LOG_INFO, LOG_TAG,
-                "GPU Delegate (%s) for model %s initialized successfully with %d partitions delegated",
-                nnapi_device_name, modelfile, gpuPartitions);
-          return false;
-        }
       }
 #else  // !defined(NN_BENCHMARK_ENABLE_GPU)
         __android_log_print(ANDROID_LOG_ERROR, LOG_TAG,
@@ -447,8 +382,7 @@
 }
 
 // If cacheDir is not nullptr, compilation caching will be used with NNAPI.
-bool BenchmarkModel::runCompilation(const char* cacheDir, bool useNnapiSl) {
-  std::unique_ptr<tflite::StatefulNnApiDelegate> delegate;
+bool BenchmarkModel::runCompilation(const char* cacheDir) {
   std::unique_ptr<tflite::Interpreter> interpreter;
   tflite::ops::builtin::BuiltinOpResolver resolver;
   tflite::InterpreterBuilder(*mTfliteModel, resolver)(&interpreter);
@@ -467,36 +401,14 @@
       nnapi_options.cache_dir = cacheDir;
       nnapi_options.model_token = mModelFile.c_str();
     }
-    if (useNnapiSl) {
-      __android_log_print(ANDROID_LOG_INFO, LOG_TAG,
-                          "Use NNAPI SL in compilation caching benchmark.");
-      if (!mNnApiSl) {
-        __android_log_print(ANDROID_LOG_ERROR,
-                            LOG_TAG,
-                            "NNAPI SL is null pointer when running compilation caching benchmark.");
-        return false;
-      }
-      delegate = std::make_unique<tflite::StatefulNnApiDelegate>(mNnApiSl, nnapi_options);
-    } else {
-      delegate = std::make_unique<tflite::StatefulNnApiDelegate>(nnapi_options);
-    }
-    int delegationStatus = interpreter->ModifyGraphWithDelegate(delegate.get());
-    auto nnapiErrno = delegate->GetNnApiErrno();
+    tflite::StatefulNnApiDelegate delegate(nnapi_options);
+    int delegationStatus = interpreter->ModifyGraphWithDelegate(&delegate);
+    auto nnapiErrno = delegate.GetNnApiErrno();
     if (delegationStatus != kTfLiteOk || nnapiErrno != ANEURALNETWORKS_NO_ERROR) {
       __android_log_print(ANDROID_LOG_ERROR, LOG_TAG,
                           "Failed to initialize NNAPI Delegate for model %s, nnapi_errno is %d",
                           mModelFile.c_str(), nnapiErrno);
       return false;
-    } else {
-      int nnapiPartitions =
-        CountPartitionsDelegatedTo(interpreter.get(), delegate.get());
-      if (nnapiPartitions == 0) {
-        __android_log_print(
-            ANDROID_LOG_ERROR, LOG_TAG,
-            "NNAPI Delegate (%s) for model %s was delegated with %d partitions delegated to NNAPI!!",
-            mNnApiDeviceName.c_str(), mModelFile.c_str(), nnapiPartitions);
-            return false;
-      }
     }
   }
   return true;
@@ -534,13 +446,13 @@
   std::string mTempDir;
 };
 
-bool BenchmarkModel::getCompilationCacheSize(int* cacheSizeBytes, bool useNnapiSl) {
+bool BenchmarkModel::getCompilationCacheSize(int* cacheSizeBytes) {
   if (cacheSizeBytes == nullptr) return false;
 
   // Create cache files.
   ScopedTempDirectory tempDir(mCacheDir.value());
   tempDir.recreate();
-  const bool success = runCompilation(tempDir.get(), useNnapiSl);
+  const bool success = runCompilation(tempDir.get());
   if (!success) {
     __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Save to cache failed");
     return false;
@@ -576,7 +488,6 @@
 
 bool BenchmarkModel::benchmarkSingleTypeOfCompilation(CompilationBenchmarkType type,
                                                       int maxNumIterations, float timeout,
-                                                      bool useNnapiSl,
                                                       std::vector<float>* results) {
   if (results != nullptr) {
     results->clear();
@@ -586,7 +497,7 @@
   // Initialize cache files to benchmark cache hit.
   if (type == CompilationBenchmarkType::PREPARE_FROM_CACHE) {
     tempDir.recreate();
-    const bool success = runCompilation(tempDir.get(), useNnapiSl);
+    const bool success = runCompilation(tempDir.get());
     if (!success) {
       __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Save to cache failed");
       return false;
@@ -615,7 +526,7 @@
 
     kTraceFunc.ATrace_beginSection("[NN_LA_PC]BenchmarkModel::benchmarkCompilation");
     const long long startTime = currentTimeInUsec();
-    const bool success = runCompilation(cacheDir, useNnapiSl);
+    const bool success = runCompilation(cacheDir);
     const long long endTime = currentTimeInUsec();
     kTraceFunc.ATrace_endSection();
     if (!success) {
@@ -641,38 +552,27 @@
                                                                 int maxNumIterations,
                                                                 float warmupTimeout,
                                                                 float runTimeout,
-                                                                bool useNnapiSl,
                                                                 std::vector<float>* results) {
   kTraceFunc.ATrace_beginSection(
-      "[NN_LA_PWM]BenchmarkModel::benchmarkSingleTypeOfCompilationWithWarmup");
-  bool success = benchmarkSingleTypeOfCompilation(type,
-                                                  maxNumIterations,
-                                                  warmupTimeout,
-                                                  useNnapiSl,
-                                                  nullptr);
+          "[NN_LA_PWM]BenchmarkModel::benchmarkSingleTypeOfCompilationWithWarmup");
+  bool success = benchmarkSingleTypeOfCompilation(type, maxNumIterations, warmupTimeout, nullptr);
   kTraceFunc.ATrace_endSection();
   if (!success) return false;
 
   kTraceFunc.ATrace_beginSection(
-      "[NN_LA_PBM]BenchmarkModel::benchmarkSingleTypeOfCompilationWithWarmup");
-  success = benchmarkSingleTypeOfCompilation(type,
-                                             maxNumIterations,
-                                             runTimeout,
-                                             useNnapiSl,
-                                             results);
+          "[NN_LA_PBM]BenchmarkModel::benchmarkSingleTypeOfCompilationWithWarmup");
+  success = benchmarkSingleTypeOfCompilation(type, maxNumIterations, runTimeout, results);
   kTraceFunc.ATrace_endSection();
   return success;
 }
 
 bool BenchmarkModel::benchmarkCompilation(int maxNumIterations, float warmupTimeout,
-                                          float runTimeout, bool useNnapiSl,
-                                          CompilationBenchmarkResult* result) {
+                                          float runTimeout, CompilationBenchmarkResult* result) {
   if (result == nullptr) return false;
 
   // Benchmark compile without cache.
   bool success = benchmarkSingleTypeOfCompilationWithWarmup(
-          CompilationBenchmarkType::WITHOUT_CACHE, maxNumIterations,
-          warmupTimeout, runTimeout, useNnapiSl,
+          CompilationBenchmarkType::WITHOUT_CACHE, maxNumIterations, warmupTimeout, runTimeout,
           &result->compileWithoutCacheTimeSec);
   if (!success) {
     __android_log_print(ANDROID_LOG_ERROR, LOG_TAG,
@@ -681,7 +581,7 @@
   }
 
   // Get compilation cache size.
-  success = getCompilationCacheSize(&result->cacheSizeBytes, useNnapiSl);
+  success = getCompilationCacheSize(&result->cacheSizeBytes);
   if (!success) {
     __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Failed to retrieve compilation cache size");
     return false;
@@ -692,8 +592,7 @@
     // Benchmark saving to cache.
     auto& saveToCacheTimeSec = result->saveToCacheTimeSec.emplace();
     success = benchmarkSingleTypeOfCompilationWithWarmup(
-            CompilationBenchmarkType::SAVE_TO_CACHE, maxNumIterations,
-            warmupTimeout, runTimeout, useNnapiSl,
+            CompilationBenchmarkType::SAVE_TO_CACHE, maxNumIterations, warmupTimeout, runTimeout,
             &saveToCacheTimeSec);
     if (!success) {
       __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Failed to benchmark saving to cache");
@@ -703,9 +602,8 @@
     // Benchmark preparing from cache.
     auto& prepareFromCacheTimeSec = result->prepareFromCacheTimeSec.emplace();
     success = benchmarkSingleTypeOfCompilationWithWarmup(
-            CompilationBenchmarkType::PREPARE_FROM_CACHE, maxNumIterations,
-            warmupTimeout, runTimeout, useNnapiSl,
-            &prepareFromCacheTimeSec);
+            CompilationBenchmarkType::PREPARE_FROM_CACHE, maxNumIterations, warmupTimeout,
+            runTimeout, &prepareFromCacheTimeSec);
     if (!success) {
       __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Failed to benchmark preparing from cache");
       return false;
diff --git a/jni/run_tflite.h b/jni/run_tflite.h
index 79fc954..71c7747 100644
--- a/jni/run_tflite.h
+++ b/jni/run_tflite.h
@@ -21,7 +21,6 @@
 #include "tensorflow/lite/delegates/nnapi/nnapi_delegate.h"
 #include "tensorflow/lite/interpreter.h"
 #include "tensorflow/lite/model.h"
-#include "tensorflow/lite/nnapi/sl/include/SupportLibrary.h"
 
 #include <memory>
 #include <unistd.h>
@@ -99,8 +98,7 @@
   static BenchmarkModel* create(const char* modelfile, int tfliteBackend,
                                 bool enable_intermediate_tensors_dump,
                                 int* nnapiErrno, const char* nnapi_device_name,
-                                bool mmapModel, const char* nnapi_cache_dir,
-                                const tflite::nnapi::NnApiSupportLibrary* nnApiSl = nullptr);
+                                bool mmapModel, const char* nnapi_cache_dir);
 
   bool resizeInputTensors(std::vector<int> shape);
   bool setInput(const uint8_t* dataPtr, size_t length);
@@ -112,10 +110,7 @@
                  int seqInferencesMaxCount, float timeout, int flags,
                  std::vector<InferenceResult>* result);
 
-  bool benchmarkCompilation(int maxNumIterations,
-                            float warmupTimeout,
-                            float runTimeout,
-                            bool useNnapiSl,
+  bool benchmarkCompilation(int maxNumIterations, float warmupTimeout, float runTimeout,
                             CompilationBenchmarkResult* result);
 
   bool dumpAllLayers(const char* path,
@@ -129,31 +124,24 @@
             /* flag to choose between memory mapping the model and initializing
                 the model from programs memory*/
             bool mmapModel,
-            const char* nnapi_cache_dir,
-            const tflite::nnapi::NnApiSupportLibrary* nnApiSl = nullptr);
+            const char* nnapi_cache_dir);
 
   void getOutputError(const uint8_t* dataPtr, size_t length,
                       InferenceResult* result, int output_index);
   void saveInferenceOutput(InferenceResult* result, int output_index);
 
-  bool runCompilation(const char* cacheDir, bool useNnapiSl);
-  bool benchmarkSingleTypeOfCompilation(CompilationBenchmarkType type,
-                                        int maxNumIterations,
-                                        float timeout,
-                                        bool useNnapiSl,
-                                        std::vector<float>* results);
+  bool runCompilation(const char* cacheDir);
+  bool benchmarkSingleTypeOfCompilation(CompilationBenchmarkType type, int maxNumIterations,
+                                        float timeout, std::vector<float>* results);
   bool benchmarkSingleTypeOfCompilationWithWarmup(CompilationBenchmarkType type,
-                                                  int maxNumIterations,
-                                                  float warmupTimeout,
-                                                  float runTimeout,
-                                                  bool useNnapiSl,
-                                                  std::vector<float>* results);
-  bool getCompilationCacheSize(int* cacheSizeBytes, bool useNnapiSl);
+                                                  int maxNumIterations, float warmupTimeout,
+                                                  float runTimeout, std::vector<float>* results);
+  bool getCompilationCacheSize(int* cacheSizeBytes);
 
   std::string mModelBuffer;
   std::unique_ptr<tflite::FlatBufferModel> mTfliteModel;
-  std::unique_ptr<tflite::StatefulNnApiDelegate> mTfliteNnapiDelegate;
   std::unique_ptr<tflite::Interpreter> mTfliteInterpreter;
+  std::unique_ptr<tflite::StatefulNnApiDelegate> mTfliteNnapiDelegate;
   // Store indices of output tensors, used to dump intermediate tensors
   std::vector<int> outputs;
 
@@ -161,7 +149,6 @@
   std::string mModelFile;
   std::optional<std::string> mCacheDir;
   std::string mNnApiDeviceName;
-  const tflite::nnapi::NnApiSupportLibrary* mNnApiSl = nullptr;
 #if defined(NN_BENCHMARK_ENABLE_GPU)
   TfLiteDelegate* mGpuDelegate;
 #endif  // defined(NN_BENCHMARK_ENABLE_GPU)
diff --git a/jni/support_library_jni.cpp b/jni/support_library_jni.cpp
deleted file mode 100644
index 07f1728..0000000
--- a/jni/support_library_jni.cpp
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Copyright 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "run_tflite.h"
-
-#include <jni.h>
-#include <string>
-#include <iomanip>
-#include <sstream>
-#include <fcntl.h>
-
-#include <android/log.h>
-
-#include "tensorflow/lite/nnapi/nnapi_implementation.h"
-
-#define LOG_TAG "NN_BENCHMARK"
-
-
-// This method loads the NNAPI SL from the given path.
-// Is called by a synchronized method in NNTestBase that will cache the
-// result. We expect this to be called only once per JVM and the handle
-// to be released when the JVM is shut down.
-extern "C" JNIEXPORT jlong JNICALL
-Java_com_android_nn_benchmark_core_sl_SupportLibraryDriverHandler_loadNnApiSlHandle(
-    JNIEnv *env, jobject /* clazz */, jstring _nnapiSlDriverPath) {
-  if (_nnapiSlDriverPath != NULL) {
-    const char *nnapiSlDriverPath =
-        env->GetStringUTFChars(_nnapiSlDriverPath, NULL);
-    std::unique_ptr<const tflite::nnapi::NnApiSupportLibrary> tmp =
-        tflite::nnapi::loadNnApiSupportLibrary(nnapiSlDriverPath);
-    if (!tmp) {
-      __android_log_print(ANDROID_LOG_ERROR, LOG_TAG,
-                          "Failed to load NNAPI SL driver from '%s'",
-                          nnapiSlDriverPath);
-      return false;
-    }
-    __android_log_print(ANDROID_LOG_INFO, LOG_TAG, "Loaded NNAPI SL");
-    return (jlong)(uintptr_t)tmp.release();
-  }
-
-  return 0L;
-}
diff --git a/sl_prebuilt/Android.bp.template b/sl_prebuilt/Android.bp.template
deleted file mode 100644
index 1d37685..0000000
--- a/sl_prebuilt/Android.bp.template
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Copyright 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package {
-    // http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // the below license kinds from "test_mlts_benchmark_license":
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["test_mlts_benchmark_license"],
-}
-
-// Definition of NNAPI SL Libraries for QC distribution
-
-cc_prebuilt_library_shared {
-    name:  "libnnapi_sl_driver",
-    check_elf_files: false,
-    target: {
-        android_arm64: {
-            srcs: [  "libnnapi_sl_driver.so"],
-        },
-    },
-}
-
-cc_prebuilt_library_shared {
-    name:  "libQnnGpu",
-    check_elf_files: false,
-    target: {
-        android_arm64: {
-            srcs: [  "libQnnGpu.so" ],
-       },
-   },
-}
-
-cc_prebuilt_library_shared {
-    name:  "libQnnHtp",
-    check_elf_files: false,
-    target: {
-        android_arm64: {
-            srcs: [  "libQnnHtp.so" ],
-       },
-   },
-}
-
-cc_prebuilt_library_shared {
-    name:  "libQnnHtpPrepare",
-    check_elf_files: false,
-    target: {
-        android_arm64: {
-            srcs: [  "libQnnHtpPrepare.so" ],
-       },
-   },
-}
-
-cc_prebuilt_library_shared {
-    name:  "libQnnHtpV68Skel",
-    check_elf_files: false,
-    target: {
-        android_arm64: {
-            srcs: [  "libQnnHtpV68Skel.so" ],
-       },
-   },
-   strip: {
-     none: true
-   },
-}
-
-cc_prebuilt_library_shared {
-    name:  "libQnnHtpV68Stub",
-    check_elf_files: false,
-    target: {
-        android_arm64: {
-            srcs: [  "libQnnHtpV68Stub.so" ],
-       },
-   },
-}
-
-cc_prebuilt_library_shared {
-    name:  "libQnnHtpV69Skel",
-    check_elf_files: false,
-    target: {
-        android_arm64: {
-            srcs: [  "libQnnHtpV69Skel.so" ],
-       },
-   },
-   strip: {
-     none: true
-   },
-}
-
-cc_prebuilt_library_shared {
-    name:  "libQnnHtpV69Stub",
-    check_elf_files: false,
-    target: {
-        android_arm64: {
-            srcs: [  "libQnnHtpV69Stub.so" ],
-       },
-   },
-}
-
-cc_prebuilt_library_shared {
-    name:  "libUnnhalAccGpu",
-    check_elf_files: false,
-    target: {
-        android_arm64: {
-            srcs: [  "libUnnhalAccGpu.so" ],
-        },
-    },
-}
-
-cc_prebuilt_library_shared {
-    name:  "libUnnhalAccHtp",
-    check_elf_files: false,
-    target: {
-        android_arm64: {
-            srcs: [  "libUnnhalAccHtp.so" ],
-        },
-    },
-}
-
-
diff --git a/sl_prebuilt/assets/dsp_loaded_libraries.txt b/sl_prebuilt/assets/dsp_loaded_libraries.txt
deleted file mode 100644
index 427e554..0000000
--- a/sl_prebuilt/assets/dsp_loaded_libraries.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-libQnnHtp.so
-libQnnHtpPrepare.so
-libUnnhalAccHtp.so
-libQnnHtpV68Skel.so
-libQnnHtpV68Stub.so
-libQnnHtpV69Skel.so
-libQnnHtpV69Stub.so
diff --git a/src/com/android/nn/benchmark/app/BenchmarkTestBase.java b/src/com/android/nn/benchmark/app/BenchmarkTestBase.java
index 294e439..859d14d 100644
--- a/src/com/android/nn/benchmark/app/BenchmarkTestBase.java
+++ b/src/com/android/nn/benchmark/app/BenchmarkTestBase.java
@@ -16,12 +16,6 @@
 
 package com.android.nn.benchmark.app;
 
-import static org.junit.Assume.assumeTrue;
-
-import com.android.nn.benchmark.core.NNTestBase;
-import com.android.nn.benchmark.core.NnApiDelegationFailure;
-import com.android.nn.benchmark.core.Processor;
-
 import android.app.Activity;
 import android.content.BroadcastReceiver;
 import android.content.Context;
@@ -39,7 +33,6 @@
 import com.android.nn.benchmark.core.TestModels;
 import com.android.nn.benchmark.core.TestModels.TestModelEntry;
 
-import java.util.stream.Collectors;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.runner.RunWith;
@@ -62,7 +55,6 @@
     // One iteration means running the tests continuous for 1s.
     private NNBenchmark mActivity;
     protected final TestModelEntry mModel;
-    protected final String mAcceleratorName;
 
     // The default 0.3s warmup and 1.0s runtime give reasonably repeatable results (run-to-run
     // variability of ~20%) when run under performance settings (fixed CPU cores enabled and at
@@ -84,21 +76,13 @@
     protected static final float COMPILATION_RUNTIME_SECONDS = 10.f;
     protected static final int COMPILATION_MAX_ITERATIONS = 100;
 
-    public BenchmarkTestBase(TestModelEntry model, String acceleratorName) {
+    public BenchmarkTestBase(TestModelEntry model) {
         super(NNBenchmark.class);
         mModel = model;
-        mAcceleratorName = acceleratorName;
     }
 
     protected void setUseNNApi(boolean useNNApi) {
         mActivity.setUseNNApi(useNNApi);
-        if (useNNApi) {
-            final boolean useNnApiSupportLibrary = NNTestBase.shouldUseNnApiSupportLibrary();
-            final boolean extractNnApiSupportLibrary = NNTestBase.shouldExtractNnApiSupportLibrary();
-            Log.i(NNBenchmark.TAG, "Configuring usage of NNAPI SL to " + useNnApiSupportLibrary);
-            mActivity.setUseNnApiSupportLibrary(useNnApiSupportLibrary);
-            mActivity.setExtractNnApiSupportLibrary(extractNnApiSupportLibrary);
-        }
     }
 
     protected void setNnApiAcceleratorName(String acceleratorName) {
@@ -114,25 +98,11 @@
                 COMPILATION_RUNTIME_SECONDS, COMPILATION_MAX_ITERATIONS);
     }
 
-    private boolean isModelSupported() {
-        try {
-            return Processor.isTestModelSupportedByAccelerator(mActivity, mModel, mAcceleratorName);
-        } catch (NnApiDelegationFailure delegationFailure) {
-            throw new Error(
-                String.format("Failure checking if model %s is supported on accelerator %s ",
-                    mModel.mModelName, mAcceleratorName), delegationFailure);
-        }
-    }
-
     // Initialize the parameter for ImageProcessingActivityJB.
     protected void prepareTest() {
         injectInstrumentation(InstrumentationRegistry.getInstrumentation());
         mActivity = getActivity();
         mActivity.prepareInstrumentationTest();
-        if (mAcceleratorName != null) {
-            assumeTrue(isModelSupported());
-            mActivity.setNnApiAcceleratorName(mAcceleratorName);
-        }
         setUseNNApi(true);
     }
 
@@ -295,13 +265,8 @@
         getInstrumentation().sendStatus(Activity.RESULT_OK, bmValue.toBundle(testName));
     }
 
-    @Parameters(name = "{0} model on accelerator {1}")
-    public static List<Object[]> modelsOnAccelerators() {
-        Log.d(NNBenchmark.TAG, "Calculating list of models");
-        List<Object[]> result = AcceleratorSpecificTestSupport.maybeAddAcceleratorsToTestConfig(
-            TestModels.modelsList().stream().map(model -> new Object[] {model}).collect(Collectors.toList())
-        );
-        Log.d(NNBenchmark.TAG, "Returning list of models of size " + result.size());
-        return result;
+    @Parameters(name = "{0}")
+    public static List<TestModelEntry> modelsList() {
+        return TestModels.modelsList();
     }
 }
diff --git a/src/com/android/nn/benchmark/app/NNBenchmark.java b/src/com/android/nn/benchmark/app/NNBenchmark.java
index a0e6f28..ea563c2 100644
--- a/src/com/android/nn/benchmark/app/NNBenchmark.java
+++ b/src/com/android/nn/benchmark/app/NNBenchmark.java
@@ -48,9 +48,6 @@
 
     private int mTestList[];
 
-    private boolean mUseNnApiSupportLibrary = false;
-    private boolean mExtractNnApiSupportLibrary = false;
-
     private Processor mProcessor;
     private final ExecutorService executorService = Executors.newSingleThreadExecutor();
 
@@ -80,16 +77,6 @@
                 warmupTimeSeconds, runTimeSeconds, maxIterations);
     }
 
-    public void setUseNnApiSupportLibrary(boolean value) {
-        mUseNnApiSupportLibrary = value;
-        mProcessor.setUseNnApiSupportLibrary(mUseNnApiSupportLibrary);
-    }
-
-    public void setExtractNnApiSupportLibrary(boolean value) {
-        mExtractNnApiSupportLibrary = value;
-        mProcessor.setExtractNnApiSupportLibrary(value);
-    }
-
     @SuppressLint("SetTextI18n")
     @Override
     protected void onCreate(Bundle savedInstanceState) {
@@ -144,8 +131,6 @@
             mProcessor.setTogglePause(i.getBooleanExtra(EXTRA_ENABLE_PAUSE, false));
             mProcessor.setTfLiteBackend(!i.getBooleanExtra(EXTRA_DISABLE_NNAPI, false) ? TfLiteBackend.NNAPI : TfLiteBackend.CPU);
             mProcessor.setMaxRunIterations(i.getIntExtra(EXTRA_MAX_ITERATIONS, 0));
-            mProcessor.setUseNnApiSupportLibrary(mUseNnApiSupportLibrary);
-            mProcessor.setExtractNnApiSupportLibrary(mExtractNnApiSupportLibrary);
             executorService.submit(mProcessor);
         } else {
             Log.v(TAG, "No test to run, doing nothing");
diff --git a/src/com/android/nn/benchmark/app/NNCrystalBallTest.java b/src/com/android/nn/benchmark/app/NNCrystalBallTest.java
index 2cabb66..998dd42 100644
--- a/src/com/android/nn/benchmark/app/NNCrystalBallTest.java
+++ b/src/com/android/nn/benchmark/app/NNCrystalBallTest.java
@@ -23,7 +23,6 @@
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
-import android.util.Log;
 
 /**
  * NNAPI benchmark test.
@@ -42,17 +41,13 @@
 @RunWith(Parameterized.class)
 public class NNCrystalBallTest extends BenchmarkTestBase {
 
-    public NNCrystalBallTest(TestModels.TestModelEntry model, String acceleratorName) {
-        super(model, acceleratorName);
+    public NNCrystalBallTest(TestModels.TestModelEntry model) {
+        super(model);
     }
 
     private void test(boolean useNnapi, boolean useCompleteInputSet) throws IOException {
         setUseNNApi(useNnapi);
         setCompleteInputSet(useCompleteInputSet);
-        if (useNnapi && mAcceleratorName != null) {
-            Log.i(NNBenchmark.TAG, "Using accelerator " + mAcceleratorName);
-            setNnApiAcceleratorName(mAcceleratorName);
-        }
         enableCompilationCachingBenchmarks();
         TestAction ta = new TestAction(mModel, WARMUP_REPEATABLE_SECONDS,
             useCompleteInputSet ? COMPLETE_SET_TIMEOUT_SECOND : RUNTIME_REPEATABLE_SECONDS);
diff --git a/src/com/android/nn/benchmark/app/NNInferenceStressTest.java b/src/com/android/nn/benchmark/app/NNInferenceStressTest.java
index e1313b7..270c4fe 100644
--- a/src/com/android/nn/benchmark/app/NNInferenceStressTest.java
+++ b/src/com/android/nn/benchmark/app/NNInferenceStressTest.java
@@ -41,15 +41,19 @@
 
     private static final float WARMUP_SECONDS = 0; // No warmup.
     private static final float RUNTIME_SECONDS = Duration.ofHours(1).getSeconds();
-    private static final long LONG_STRESS_TEST_DURATION_SECONDS = Duration.ofSeconds(60).getSeconds();
+    private static final long LONG_STRESS_TEST_DURATION_SECONDS = Duration.ofHours(4).getSeconds();
 
-    public NNInferenceStressTest(TestModels.TestModelEntry model, String acceleratorName) {
-        super(model, acceleratorName);
+    public NNInferenceStressTest(TestModels.TestModelEntry model) {
+        super(model);
     }
 
-    @Parameters(name = "{0} model on accelerator {1}")
-    public static List<Object[]> modelsList() {
-        return NNModelLoadingStressTest.modelsList();
+    @Parameters(name = "{0}")
+    public static List<TestModels.TestModelEntry> modelsList() {
+        return TestModels.modelsList().stream()
+                .map(TestModels.TestModelEntry::withDisabledEvaluation)
+                .collect(Collectors.collectingAndThen(
+                        Collectors.toList(),
+                        Collections::unmodifiableList));
     }
 
     @Test
diff --git a/src/com/android/nn/benchmark/app/NNModelLoadingStressTest.java b/src/com/android/nn/benchmark/app/NNModelLoadingStressTest.java
index b46a1e9..dd446d6 100644
--- a/src/com/android/nn/benchmark/app/NNModelLoadingStressTest.java
+++ b/src/com/android/nn/benchmark/app/NNModelLoadingStressTest.java
@@ -42,21 +42,17 @@
 
     @Rule public Stopwatch stopwatch = new Stopwatch() {};
 
-    public NNModelLoadingStressTest(TestModels.TestModelEntry model, String acceleratorName) {
-        super(model, acceleratorName);
+    public NNModelLoadingStressTest(TestModels.TestModelEntry model) {
+        super(model);
     }
 
-    @Parameters(name = "{0} model on accelerator {1}")
-    public static List<Object[]> modelsList() {
-        return BenchmarkTestBase.modelsOnAccelerators().stream()
-            .map( modelAndAccelerator -> {
-                TestModels.TestModelEntry modelEntry =
-                    (TestModels.TestModelEntry)modelAndAccelerator[0];
-                return new Object[] { modelEntry.withDisabledEvaluation(), modelAndAccelerator[1] };
-            })
-            .collect(Collectors.collectingAndThen(
-                Collectors.toList(),
-                Collections::unmodifiableList));
+    @Parameters(name = "{0}")
+    public static List<TestModels.TestModelEntry> modelsList() {
+        return TestModels.modelsList().stream()
+                .map(TestModels.TestModelEntry::withDisabledEvaluation)
+                .collect(Collectors.collectingAndThen(
+                        Collectors.toList(),
+                        Collections::unmodifiableList));
     }
 
     @Test
diff --git a/src/com/android/nn/benchmark/app/NNScoringTest.java b/src/com/android/nn/benchmark/app/NNScoringTest.java
index 0447925..d0aad02 100644
--- a/src/com/android/nn/benchmark/app/NNScoringTest.java
+++ b/src/com/android/nn/benchmark/app/NNScoringTest.java
@@ -36,7 +36,6 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import android.util.Log;
 
 /**
  * Tests that run all models/datasets/backend that are required for scoring the device.
@@ -53,8 +52,8 @@
     private static File csvPath;
     private static CSVWriter csvWriter;
 
-    public NNScoringTest(TestModels.TestModelEntry model, String acceleratorName) {
-        super(model, acceleratorName);
+    public NNScoringTest(TestModels.TestModelEntry model) {
+        super(model);
     }
 
     @Override
@@ -70,10 +69,6 @@
         setUseNNApi(useNnapi);
         setCompleteInputSet(useCompleteInputSet);
         enableCompilationCachingBenchmarks();
-        if (useNnapi && mAcceleratorName != null) {
-            Log.i(NNBenchmark.TAG, "Using accelerator " + mAcceleratorName);
-            setNnApiAcceleratorName(mAcceleratorName);
-        }
         TestAction ta = new TestAction(mModel, WARMUP_REPEATABLE_SECONDS,
             useCompleteInputSet ? COMPLETE_SET_TIMEOUT_SECOND : RUNTIME_REPEATABLE_SECONDS);
         runTest(ta, mModel.getTestName());
diff --git a/src/com/android/nn/benchmark/app/NNTest.java b/src/com/android/nn/benchmark/app/NNTest.java
index 8466bd4..c42a9c9 100644
--- a/src/com/android/nn/benchmark/app/NNTest.java
+++ b/src/com/android/nn/benchmark/app/NNTest.java
@@ -39,7 +39,7 @@
 public class NNTest extends BenchmarkTestBase {
 
     public NNTest(TestModels.TestModelEntry model) {
-        super(model, /*acceleratorName=*/null);
+        super(model);
     }
 
     @Test
diff --git a/src/com/android/nn/benchmark/app/TFLiteTest.java b/src/com/android/nn/benchmark/app/TFLiteTest.java
index d64c076..e701a06 100644
--- a/src/com/android/nn/benchmark/app/TFLiteTest.java
+++ b/src/com/android/nn/benchmark/app/TFLiteTest.java
@@ -28,7 +28,7 @@
 public class TFLiteTest extends BenchmarkTestBase {
 
     public TFLiteTest(TestModelEntry model) {
-        super(model, /*acceleratorName=*/null);
+        super(model);
     }
 
     @Override
diff --git a/src/com/android/nn/benchmark/core/NNTestBase.java b/src/com/android/nn/benchmark/core/NNTestBase.java
index fe0bddd..cf61d8c 100644
--- a/src/com/android/nn/benchmark/core/NNTestBase.java
+++ b/src/com/android/nn/benchmark/core/NNTestBase.java
@@ -20,34 +20,20 @@
 import android.content.Context;
 import android.content.res.AssetManager;
 import android.os.Build;
-import android.system.Os;
-import android.system.ErrnoException;
 import android.util.Log;
 import android.util.Pair;
 import android.widget.TextView;
-import androidx.test.InstrumentationRegistry;
-import com.android.nn.benchmark.core.sl.QualcommSupportLibraryDriverHandler;
-import com.android.nn.benchmark.core.sl.SupportLibraryDriverHandler;
-import java.io.BufferedReader;
+
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Optional;
 import java.util.Random;
 import java.util.stream.Collectors;
-import dalvik.system.BaseDexClassLoader;
-import android.content.res.AssetFileDescriptor;
-import android.os.ParcelFileDescriptor;
-import android.os.ParcelFileDescriptor.AutoCloseInputStream;
-import java.util.jar.JarFile;
-import java.util.jar.JarEntry;
 
 public class NNTestBase implements AutoCloseable {
     protected static final String TAG = "NN_TESTBASE";
@@ -75,8 +61,7 @@
             boolean enableIntermediateTensorsDump,
             String nnApiDeviceName,
             boolean mmapModel,
-            String nnApiCacheDir,
-            long nnApiLibHandle) throws NnApiDelegationFailure;
+            String nnApiCacheDir) throws NnApiDelegationFailure;
 
     private synchronized native void destroyModel(long modelHandle);
 
@@ -90,8 +75,7 @@
             int flags);
 
     private synchronized native CompilationBenchmarkResult runCompilationBenchmark(
-        long modelHandle, int maxNumIterations, float warmupTimeoutSec, float runTimeoutSec,
-        boolean useNnapiSl);
+            long modelHandle, int maxNumIterations, float warmupTimeoutSec, float runTimeoutSec);
 
     private synchronized native void dumpAllLayers(
             long modelHandle,
@@ -145,29 +129,6 @@
     private String mTemporaryModelFilePath;
     private boolean mSampleResults;
 
-    // If set to true the test will look for the NNAPI SL binaries in the app resources,
-    // copy them into the app cache dir and configure the TfLite test to load NNAPI
-    // from the library.
-    private boolean mUseNnApiSupportLibrary = false;
-    private boolean mExtractNnApiSupportLibrary = false;
-
-    static final String USE_NNAPI_SL_PROPERTY = "useNnApiSupportLibrary";
-    static final String EXTRACT_NNAPI_SL_PROPERTY = "extractNnApiSupportLibrary";
-
-    private static boolean getBooleanTestParameter(String key, boolean defaultValue) {
-      // All instrumentation arguments are passed as String so I have to convert the value here.
-      return Boolean.parseBoolean(
-          InstrumentationRegistry.getArguments().getString(key, "" + defaultValue));
-    }
-
-    public static boolean shouldUseNnApiSupportLibrary() {
-      return getBooleanTestParameter(USE_NNAPI_SL_PROPERTY, false);
-    }
-
-    public static boolean shouldExtractNnApiSupportLibrary() {
-        return getBooleanTestParameter(EXTRACT_NNAPI_SL_PROPERTY, false);
-    }
-
     public NNTestBase(String modelName, String modelFile, int[] inputShape,
             InferenceInOutSequence.FromAssets[] inputOutputAssets,
             InferenceInOutSequence.FromDataset[] inputOutputDatasets,
@@ -208,9 +169,6 @@
       setTfLiteBackend(TfLiteBackend.NNAPI);
     }
 
-    public  void setUseNnApiSupportLibrary(boolean value) {mUseNnApiSupportLibrary = value;}
-    public  void setExtractNnApiSupportLibrary(boolean value) {mExtractNnApiSupportLibrary = value;}
-
     public void setNNApiDeviceName(String value) {
         if (mTfLiteBackend != TfLiteBackend.NNAPI) {
             Log.e(TAG, "Setting device name has no effect when not using NNAPI");
@@ -224,20 +182,6 @@
 
     public final boolean setupModel(Context ipcxt) throws IOException, NnApiDelegationFailure {
         mContext = ipcxt;
-        long nnApiLibHandle = 0;
-        if (mUseNnApiSupportLibrary) {
-          // TODO: support different drivers providers maybe with a flag
-          QualcommSupportLibraryDriverHandler qcSlhandler = new QualcommSupportLibraryDriverHandler();
-          nnApiLibHandle = qcSlhandler.getOrLoadNnApiSlHandle(mContext, mExtractNnApiSupportLibrary);
-          if (nnApiLibHandle == 0) {
-            Log.e(TAG, String
-                .format("Unable to find NNAPI SL entry point '%s' in embedded libraries path.",
-                    SupportLibraryDriverHandler.NNAPI_SL_LIB_NAME));
-            throw new NnApiDelegationFailure(String
-                .format("Unable to find NNAPI SL entry point '%s' in embedded libraries path.",
-                    SupportLibraryDriverHandler.NNAPI_SL_LIB_NAME));
-          }
-        }
         if (mTemporaryModelFilePath != null) {
             deleteOrWarn(mTemporaryModelFilePath);
         }
@@ -245,7 +189,7 @@
         String nnApiCacheDir = mContext.getCodeCacheDir().toString();
         mModelHandle = initModel(
                 mTemporaryModelFilePath, mTfLiteBackend.ordinal(), mEnableIntermediateTensorsDump,
-                mNNApiDeviceName.orElse(null), mMmapModel, nnApiCacheDir, nnApiLibHandle);
+                mNNApiDeviceName.orElse(null), mMmapModel, nnApiCacheDir);
         if (mModelHandle == 0) {
             Log.e(TAG, "Failed to init the model");
             return false;
@@ -424,8 +368,7 @@
             throw new UnsupportedModelException("Unsupported model");
         }
         CompilationBenchmarkResult result = runCompilationBenchmark(
-            mModelHandle, maxIterations, warmupTimeoutSec, runTimeoutSec,
-            shouldUseNnApiSupportLibrary());
+                mModelHandle, maxIterations, warmupTimeoutSec, runTimeoutSec);
         if (result == null) {
             throw new BenchmarkException("Failed to run compilation benchmark");
         }
@@ -475,7 +418,11 @@
 
             try (InputStream in = assetManager.open(modelAssetName);
                  FileOutputStream out = new FileOutputStream(outFile)) {
-                copyFull(in, out);
+                byte[] byteBuffer = new byte[1024];
+                int readBytes = -1;
+                while ((readBytes = in.read(byteBuffer)) != -1) {
+                    out.write(byteBuffer, 0, readBytes);
+                }
             }
         } catch (IOException e) {
             Log.e(TAG, "Failed to copy asset file: " + modelAssetName, e);
@@ -483,14 +430,6 @@
         }
     }
 
-    public static void copyFull(InputStream in, OutputStream out) throws IOException {
-        byte[] byteBuffer = new byte[1024];
-        int readBytes = -1;
-        while ((readBytes = in.read(byteBuffer)) != -1) {
-            out.write(byteBuffer, 0, readBytes);
-        }
-    }
-
     @Override
     public void close() {
         destroy();
diff --git a/src/com/android/nn/benchmark/core/NnApiDelegationFailure.java b/src/com/android/nn/benchmark/core/NnApiDelegationFailure.java
index d2d07b8..ed0cea2 100644
--- a/src/com/android/nn/benchmark/core/NnApiDelegationFailure.java
+++ b/src/com/android/nn/benchmark/core/NnApiDelegationFailure.java
@@ -27,11 +27,6 @@
         mNnApiErrno = nnApiErrno;
     }
 
-    public NnApiDelegationFailure(String message) {
-        super(message);
-        mNnApiErrno = 0;
-    }
-
     public int getNnApiErrno() {
         return mNnApiErrno;
     }
diff --git a/src/com/android/nn/benchmark/core/Processor.java b/src/com/android/nn/benchmark/core/Processor.java
index 87a4455..f8aaa04 100644
--- a/src/com/android/nn/benchmark/core/Processor.java
+++ b/src/com/android/nn/benchmark/core/Processor.java
@@ -23,7 +23,6 @@
 import android.util.Log;
 import android.util.Pair;
 
-import com.android.nn.benchmark.core.TestModels.TestModelEntry;
 import java.io.IOException;
 import java.util.Collections;
 import java.util.List;
@@ -71,13 +70,6 @@
     private float mCompilationBenchmarkRunTimeSeconds;
     private int mCompilationBenchmarkMaxIterations;
 
-    // Used to avoid accessing the Instrumentation Arguments when the crash tests are spawning
-    // a separate process.
-    private String mModelFilterRegex;
-
-    private boolean mUseNnApiSupportLibrary;
-    private boolean mExtractNnApiSupportLibrary;
-
     public Processor(Context context, Processor.Callback callback, int[] testList) {
         mContext = context;
         mCallback = callback;
@@ -91,9 +83,6 @@
         mMaxRunIterations = 0;
         mBenchmarkCompilationCaching = false;
         mBackend = TfLiteBackend.CPU;
-        mModelFilterRegex = null;
-        mUseNnApiSupportLibrary = false;
-        mExtractNnApiSupportLibrary = false;
     }
 
     public void setUseNNApi(boolean useNNApi) {
@@ -136,13 +125,6 @@
         mMaxRunIterations = value;
     }
 
-    public void setModelFilterRegex(String value) {
-        this.mModelFilterRegex = value;
-    }
-
-    public void setUseNnApiSupportLibrary(boolean value) { mUseNnApiSupportLibrary = value; }
-    public void setExtractNnApiSupportLibrary(boolean value) { mExtractNnApiSupportLibrary = value; }
-
     public void enableCompilationCachingBenchmarks(
             float warmupTimeSeconds, float runTimeSeconds, int maxIterations) {
         mBenchmarkCompilationCaching = true;
@@ -181,10 +163,7 @@
             throws NnApiDelegationFailure {
         try (NNTestBase tb = testModelEntry.createNNTestBase(TfLiteBackend.NNAPI,
                 /*enableIntermediateTensorsDump=*/false,
-                /*mmapModel=*/ false,
-                NNTestBase.shouldUseNnApiSupportLibrary(),
-                NNTestBase.shouldExtractNnApiSupportLibrary()
-            )) {
+                /*mmapModel=*/ false)) {
             tb.setNNApiDeviceName(acceleratorName);
             return tb.setupModel(context);
         } catch (IOException e) {
@@ -210,7 +189,7 @@
             oldTestBase.destroy();
         }
         NNTestBase tb = t.createNNTestBase(mBackend, /*enableIntermediateTensorsDump=*/false,
-            mMmapModel, mUseNnApiSupportLibrary, mExtractNnApiSupportLibrary);
+                mMmapModel);
         if (mBackend == TfLiteBackend.NNAPI) {
             tb.setNNApiDeviceName(mAcceleratorName);
         }
@@ -342,7 +321,6 @@
     }
 
     private void benchmarkAllModels() throws IOException, BenchmarkException {
-        final List<TestModelEntry> modelsList = TestModels.modelsList(mModelFilterRegex);
         // Loop over the tests we want to benchmark
         for (int ct = 0; ct < mTestList.length; ct++) {
             if (!mRun.get()) {
@@ -360,7 +338,7 @@
             }
 
             TestModels.TestModelEntry testModel =
-                    modelsList.get(mTestList[ct]);
+                    TestModels.modelsList().get(mTestList[ct]);
 
             int testNumber = ct + 1;
             mCallback.onStatusUpdate(testNumber, mTestList.length,
diff --git a/src/com/android/nn/benchmark/core/TestModels.java b/src/com/android/nn/benchmark/core/TestModels.java
index d91e013..9eb90f5 100644
--- a/src/com/android/nn/benchmark/core/TestModels.java
+++ b/src/com/android/nn/benchmark/core/TestModels.java
@@ -19,9 +19,6 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicReference;
-import androidx.test.InstrumentationRegistry;
-import java.util.stream.Collectors;
-import android.util.Log;
 
 /** Information about available benchmarking models */
 public class TestModels {
@@ -74,32 +71,27 @@
             mInDataSize = inDataSize;
         }
 
-        // Used by VTS tests.
         public NNTestBase createNNTestBase() {
             return new NNTestBase(mModelName, mModelFile, mInputShape, mInOutAssets, mInOutDatasets,
                     mEvaluator, mMinSdkVersion);
         }
 
         public NNTestBase createNNTestBase(TfLiteBackend tfLiteBackend, boolean enableIntermediateTensorsDump) {
-            return createNNTestBase(tfLiteBackend, enableIntermediateTensorsDump, /*mmapModel=*/false,
-                /*useNnApiSl=*/false, /*extractNnApiSl=*/false);
+            return createNNTestBase(tfLiteBackend, enableIntermediateTensorsDump, /*mmapModel=*/false);
         }
 
         // Used by CTS tests.
         public NNTestBase createNNTestBase(boolean useNNAPI, boolean enableIntermediateTensorsDump) {
             TfLiteBackend tfLiteBackend = useNNAPI ? TfLiteBackend.NNAPI : TfLiteBackend.CPU;
-            return createNNTestBase(tfLiteBackend, enableIntermediateTensorsDump,
-                /*mmapModel=*/false, /*useNnApiSl=*/false, /*extractNnApiSl=*/false);
+            return createNNTestBase(tfLiteBackend, enableIntermediateTensorsDump, /*mmapModel=*/false);
         }
 
         public NNTestBase createNNTestBase(TfLiteBackend tfLiteBackend, boolean enableIntermediateTensorsDump,
-                boolean mmapModel, boolean useNnApiSl, boolean extractNnApiSl) {
+                boolean mmapModel) {
             NNTestBase test = createNNTestBase();
             test.setTfLiteBackend(tfLiteBackend);
             test.enableIntermediateTensorsDump(enableIntermediateTensorsDump);
             test.setMmapModel(mmapModel);
-            test.setUseNnApiSupportLibrary(useNnApiSl);
-            test.setExtractNnApiSupportLibrary(extractNnApiSl);
             return test;
         }
 
@@ -137,46 +129,12 @@
         return frozenEntries.get() != null;
     }
 
-    static final String MODEL_FILTER_PROPERTY = "nnBenchmarkModelFilter";
-
-    public static String getModelFilterRegex() {
-        // All instrumentation arguments are passed as String so I have to convert the value here.
-        return InstrumentationRegistry.getArguments().getString(MODEL_FILTER_PROPERTY, "");
-    }
-
-    /**
-     * Returns the list of models eventually by a user specified instrumentation filter regex.
-     */
-    static public List<TestModelEntry> modelsList() {
-        return modelsList(getModelFilterRegex());
-    }
-
-    /**
-     * Returns the list of models eventually by a user specified instrumentation filter.
-     */
-    static public List<TestModelEntry> modelsList(String modelFilterRegex) {
-        if (modelFilterRegex == null || modelFilterRegex.isEmpty()) {
-            Log.i("NN_BENCHMARK", "No model filter, returning all models");
-            return fullModelsList();
-        }
-        Log.i("NN_BENCHMARK", "Filtering model with filter " + modelFilterRegex);
-        List<TestModelEntry> result = fullModelsList().stream()
-                .filter( modelEntry ->
-                    modelEntry.mModelName.matches(modelFilterRegex)
-                )
-                .collect(Collectors.toList());
-
-        Log.i("NN_BENCHMARK", "Returning models: " + result);
-
-        return result;
-    }
-
     /**
      * Fetch list of test models.
      *
      * If this method was called at least once, then it's impossible to register new models.
      */
-    static public List<TestModelEntry> fullModelsList() {
+    static public List<TestModelEntry> modelsList() {
         frozenEntries.compareAndSet(null, sTestModelEntryList);
         return frozenEntries.get();
     }
diff --git a/src/com/android/nn/benchmark/core/sl/QualcommSupportLibraryDriverHandler.java b/src/com/android/nn/benchmark/core/sl/QualcommSupportLibraryDriverHandler.java
deleted file mode 100644
index 3edaef1..0000000
--- a/src/com/android/nn/benchmark/core/sl/QualcommSupportLibraryDriverHandler.java
+++ /dev/null
@@ -1,67 +0,0 @@
-package com.android.nn.benchmark.core.sl;
-
-import android.content.Context;
-import android.system.ErrnoException;
-import android.system.Os;
-import android.util.Log;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.util.List;
-import java.util.stream.Collectors;
-
-public class QualcommSupportLibraryDriverHandler extends SupportLibraryDriverHandler {
-  private static final String NNAPI_DSP_SL_LIBRARIES_ASSET_PATH = "dsp_loaded_libraries.txt";
-  private static final String DSP_LOAD_PATH_ENV_VAR = "ADSP_LIBRARY_PATH";
-
-  @Override
-  public void prepareDriver(Context context, String nnSupportLibFilePath) throws IOException {
-    boolean isApkPath = nnSupportLibFilePath.contains("apk!");
-
-    String dspLibsFolder = null;
-    Log.i(TAG, "Preparing NNAPI SL");
-    if (isApkPath) {
-      dspLibsFolder = extractDSPLibraries(context);
-    } else {
-      dspLibsFolder = new File(nnSupportLibFilePath).getParent();
-    }
-
-    if (dspLibsFolder != null) {
-      try {
-        Os.setenv(DSP_LOAD_PATH_ENV_VAR, dspLibsFolder, /*overwrite=*/true);
-        Log.i(TAG, String.format("Overwritten system env variable %s with %s",
-            DSP_LOAD_PATH_ENV_VAR, dspLibsFolder));
-      } catch (ErrnoException errnoException) {
-        throw new IOException(String.format("Unable to overwrite system env variable %s with %s",
-            DSP_LOAD_PATH_ENV_VAR, dspLibsFolder), errnoException);
-      }
-    }
-  }
-
-  private String extractDSPLibraries(Context context)
-      throws IOException {
-    try {
-      BufferedReader slLibraryListReader
-          = new BufferedReader(
-          new InputStreamReader(
-              context.getAssets().open(NNAPI_DSP_SL_LIBRARIES_ASSET_PATH)));
-      final String nnLibTargetFolder = context.getCodeCacheDir().toString();
-      final List<String> libsToExtract = slLibraryListReader.lines().collect(Collectors.toList());
-      if (libsToExtract.isEmpty()) {
-        Log.i(TAG, "No SL library to extract.");
-        return null;
-      }
-      for (final String libraryFile : libsToExtract) {
-        if (!extractNnApiSlLibTo(context, libraryFile, nnLibTargetFolder)) {
-          throw new FileNotFoundException(String.format("Unable to extract file %s", libraryFile));
-        }
-      }
-      return nnLibTargetFolder;
-    } catch (IOException e) {
-      Log.e(TAG, "Unable to find list of SL libraries to extract from APK under assets.", e);
-      throw e;
-    }
-  }
-}
diff --git a/src/com/android/nn/benchmark/core/sl/SupportLibraryDriverHandler.java b/src/com/android/nn/benchmark/core/sl/SupportLibraryDriverHandler.java
deleted file mode 100644
index 5c2f232..0000000
--- a/src/com/android/nn/benchmark/core/sl/SupportLibraryDriverHandler.java
+++ /dev/null
@@ -1,147 +0,0 @@
-package com.android.nn.benchmark.core.sl;
-
-import android.content.Context;
-import android.util.Log;
-import com.android.nn.benchmark.core.NNTestBase;
-import dalvik.system.BaseDexClassLoader;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.util.jar.JarEntry;
-import java.util.jar.JarFile;
-import java.util.stream.Collectors;
-
-/**
- * Abstracts the initialization required to enable a NNAPI Support Library for a given vendor.
- **/
-public abstract class SupportLibraryDriverHandler {
-
-  static {
-    System.loadLibrary("support_library_jni");
-  }
-
-  protected static final String TAG = "NN_TESTBASE";
-
-  private static final String NNAPI_SL_LIBRARIES_LIST_ASSET_PATH = "sl_prebuilt_filelist.txt";
-  public static final String NNAPI_SL_LIB_NAME = "libnnapi_sl_driver.so";
-
-  private static native long loadNnApiSlHandle(String nnApiSlPath);
-
-  // Guarded by this
-  private static long nnapiSlHandle = 0;
-
-  public synchronized long getOrLoadNnApiSlHandle(Context context, boolean extractNnApiSupportLibrary)
-      throws IOException {
-    if (nnapiSlHandle == 0) {
-      Log.i(TAG, "Initializing NNAPI SL.");
-
-      String nnSupportLibFilePath = null;
-      Log.i(TAG, "Preparing NNAPI SL");
-      if (extractNnApiSupportLibrary) {
-        nnSupportLibFilePath = extractAllAndGetNnApiSlPath(context, NNAPI_SL_LIB_NAME);
-      } else {
-        nnSupportLibFilePath = getNnApiSlPathFromApkLibraries(context, NNAPI_SL_LIB_NAME);
-      }
-
-      prepareDriver(context, nnSupportLibFilePath);
-
-      if (nnSupportLibFilePath != null) {
-        nnapiSlHandle = loadNnApiSlHandle(nnSupportLibFilePath);
-        if (nnapiSlHandle == 0) {
-          Log.e(TAG, String
-              .format("Unable load NNAPI SL from '%s'.", nnSupportLibFilePath));
-        } else {
-          Log.i(TAG, String
-              .format("Successfully loaded NNAPI SL from '%s'.", nnSupportLibFilePath));
-        }
-      } else {
-        Log.e(TAG, String
-            .format("Unable to find NNAPI SL entry point '%s' in embedded libraries path.",
-                NNAPI_SL_LIB_NAME));
-      }
-    }
-    return nnapiSlHandle;
-  }
-
-  private static InputStream getInputStreamFromApk(String apkPath, String filePath) throws IOException {
-    Log.i(TAG, String.format("Getting input stream from APK '%s' and file '%s'.", apkPath, filePath));
-
-    JarFile jarFile = new JarFile(apkPath);
-    JarEntry jarEntry = jarFile.getJarEntry(filePath);
-    return jarFile.getInputStream(jarEntry);
-  }
-
-  private static String extractAllAndGetNnApiSlPath(Context context, String entryPointName)
-      throws IOException {
-    try {
-      BufferedReader slLibraryListReader
-          = new BufferedReader(
-          new InputStreamReader(
-              context.getAssets().open(NNAPI_SL_LIBRARIES_LIST_ASSET_PATH)));
-      String result = null;
-      final String nnLibTargetFolder = context.getCodeCacheDir().toString();
-      for (final String libraryFile : slLibraryListReader.lines().collect(Collectors.toList())) {
-        try {
-          boolean copied = extractNnApiSlLibTo(context, libraryFile, nnLibTargetFolder);
-          if (copied && libraryFile.equals(entryPointName)) {
-            result = new File(nnLibTargetFolder, libraryFile).getAbsolutePath();
-          }
-        } catch (FileNotFoundException unableToExtractFile) {
-          return null;
-        }
-      }
-      return result;
-    } catch (IOException e) {
-      Log.e(TAG, "Unable to find list of SL libraries under assets.", e);
-      throw e;
-    }
-  }
-
-  protected static boolean extractNnApiSlLibTo(Context context, String libraryFile, String targetFolder)
-      throws IOException {
-    String sourcePath = getNnApiSlPathFromApkLibraries(context, libraryFile);
-    if (sourcePath == null) {
-      Log.w(TAG, String.format("Unable to find SL library '%s' to extract assuming is not part of this chipset distribution.", libraryFile));
-      return false;
-    }
-
-    String[] apkAndLibraryPaths = sourcePath.split("!");
-    if (apkAndLibraryPaths.length != 2) {
-      final String errorMsg = String.format("Unable to extract %s.", sourcePath);
-      Log.e(TAG, errorMsg);
-      throw new FileNotFoundException(errorMsg);
-    }
-
-    File targetPath = new File(targetFolder, libraryFile);
-    try(InputStream in = getInputStreamFromApk(apkAndLibraryPaths[0],
-        // Removing leading '/'
-        apkAndLibraryPaths[1].substring(1));
-        OutputStream out = new FileOutputStream(targetPath)
-    ) {
-      NNTestBase.copyFull(in, out);
-    }
-
-    Log.i(TAG, String.format("Copied '%s' to '%s'.", sourcePath, targetPath));
-
-    return true;
-  }
-
-  private static String getNnApiSlPathFromApkLibraries(Context context, String resourceName) {
-    BaseDexClassLoader dexClassLoader = (BaseDexClassLoader) context.getClassLoader();
-    // Removing the "lib" prefix and ".so" suffix.
-    String libShortName = resourceName.substring(3, resourceName.length() - 3);
-    String result = dexClassLoader.findLibrary(libShortName);
-    if (result != null) {
-      return result;
-    }
-    return dexClassLoader.findLibrary(resourceName);
-  }
-
-  // Vendor-specifi preparation steps
-  protected abstract void prepareDriver(Context context, String nnSupportLibFilePath) throws IOException;
-}
diff --git a/src/com/android/nn/benchmark/util/DumpIntermediateTensors.java b/src/com/android/nn/benchmark/util/DumpIntermediateTensors.java
index 438384a..5c4197c 100644
--- a/src/com/android/nn/benchmark/util/DumpIntermediateTensors.java
+++ b/src/com/android/nn/benchmark/util/DumpIntermediateTensors.java
@@ -94,9 +94,7 @@
                     TfLiteBackend backend = useNNAPI ? TfLiteBackend.NNAPI : TfLiteBackend.CPU;
                     TestModelEntry modelEntry = TestModels.getModelByName(modelName);
                     try (NNTestBase testBase = modelEntry.createNNTestBase(
-                            backend, /*enableIntermediateTensorsDump*/true, /*mmapModel*/false,
-                        NNTestBase.shouldUseNnApiSupportLibrary(),
-                        NNTestBase.shouldExtractNnApiSupportLibrary())) {
+                            backend, /*enableIntermediateTensorsDump*/true, /*mmapModel*/false)) {
                         testBase.setupModel(this);
                         File outputDir = new File(getFilesDir() + "/" + DUMP_DIR +
                                 "/" + modelName, useNNAPIDir);
diff --git a/src/com/android/nn/benchmark/app/AcceleratorSpecificTestSupport.java b/src/com/android/nn/crashtest/app/AcceleratorSpecificTestSupport.java
similarity index 74%
rename from src/com/android/nn/benchmark/app/AcceleratorSpecificTestSupport.java
rename to src/com/android/nn/crashtest/app/AcceleratorSpecificTestSupport.java
index 0bdac1f..68e35d9 100644
--- a/src/com/android/nn/benchmark/app/AcceleratorSpecificTestSupport.java
+++ b/src/com/android/nn/crashtest/app/AcceleratorSpecificTestSupport.java
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-package com.android.nn.benchmark.app;
+package com.android.nn.crashtest.app;
 
 import android.content.Context;
 import android.util.Log;
@@ -32,7 +32,6 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collections;
 import java.util.List;
 import java.util.Optional;
 import java.util.concurrent.Callable;
@@ -93,21 +92,6 @@
         return accelerators;
     }
 
-    // This method returns an empty list if no accelerator name has been specified.
-    static List<String> getOptionalTargetAcceleratorNames() {
-        List<String> accelerators = new ArrayList<>();
-        String acceleratorFilter = getTestParameter(ACCELERATOR_FILTER_PROPERTY, "");
-        if (acceleratorFilter.isEmpty()) {
-            return Collections.emptyList();
-        }
-        accelerators.addAll(NNTestBase.availableAcceleratorNames().stream().filter(
-            name -> name.matches(acceleratorFilter)).collect(
-            Collectors.toList()));
-        if (getBooleanTestParameter(INCLUDE_NNAPI_SELECTED_ACCELERATOR_PROPERTY, false)) {
-            accelerators.add(null); // running tests with no specified target accelerator too
-        }
-        return accelerators;
-    }
 
     static List<Object[]> perAcceleratorTestConfig(List<Object[]> testConfig) {
         return testConfig.stream()
@@ -122,28 +106,6 @@
                 .collect(Collectors.toList());
     }
 
-    // Generates a per-accelerator list of test configurations if an accelerator filter has been
-    // specified. Will return the origin list with an extra `null` parameter for the accelerator
-    // name if not.
-    static List<Object[]> maybeAddAcceleratorsToTestConfig(List<Object[]> testConfig) {
-        return testConfig.stream()
-            .flatMap(currConfigurationParams -> {
-                List<String> accelerators = getOptionalTargetAcceleratorNames();
-                if (accelerators.isEmpty()) {
-                    accelerators = Collections.singletonList((String)null);
-                }
-                return accelerators.stream().map(
-                    accelerator -> {
-                        Object[] result =
-                            Arrays.copyOf(currConfigurationParams,
-                                currConfigurationParams.length + 1);
-                        result[currConfigurationParams.length] = accelerator;
-                        return result;
-                    });
-            })
-            .collect(Collectors.toList());
-    }
-
     class DriverLivenessChecker implements Callable<Boolean> {
         final Processor mProcessor;
         private final AtomicBoolean mRun = new AtomicBoolean(true);
@@ -164,8 +126,6 @@
             mProcessor.setTfLiteBackend(TfLiteBackend.NNAPI);
             mProcessor.setCompleteInputSet(false);
             mProcessor.setNnApiAcceleratorName(acceleratorName);
-            mProcessor.setUseNnApiSupportLibrary(NNTestBase.shouldUseNnApiSupportLibrary());
-            mProcessor.setExtractNnApiSupportLibrary(NNTestBase.shouldExtractNnApiSupportLibrary());
             mTestModelEntry = testModelEntry;
         }
 
@@ -185,7 +145,7 @@
                         return false;
                     }
                 } catch (IOException | BenchmarkException e) {
-                    Log.e(TAG, String.format("Error running model %s", mTestModelEntry.mModelName), e);
+                    Log.e(TAG, String.format("Error running model %s", mTestModelEntry.mModelName));
                     return false;
                 }
             }
diff --git a/src/com/android/nn/crashtest/app/NNClientEarlyTerminationTest.java b/src/com/android/nn/crashtest/app/NNClientEarlyTerminationTest.java
index 691f4c0..b8d4188 100644
--- a/src/com/android/nn/crashtest/app/NNClientEarlyTerminationTest.java
+++ b/src/com/android/nn/crashtest/app/NNClientEarlyTerminationTest.java
@@ -24,8 +24,6 @@
 
 import androidx.test.InstrumentationRegistry;
 
-import com.android.nn.benchmark.app.AcceleratorSpecificTestSupport;
-import com.android.nn.benchmark.core.NNTestBase;
 import com.android.nn.benchmark.core.NnApiDelegationFailure;
 import com.android.nn.benchmark.core.TestModels;
 
@@ -48,7 +46,7 @@
 @RunWith(Parameterized.class)
 public class NNClientEarlyTerminationTest extends
         ActivityInstrumentationTestCase2<NNParallelTestActivity> implements
-    AcceleratorSpecificTestSupport {
+        AcceleratorSpecificTestSupport {
 
     private static final String TAG = "NNClientEarlyTermination";
     private static final Duration MAX_SEPARATE_PROCESS_EXECUTION_TIME = Duration.ofSeconds(70);
@@ -129,11 +127,11 @@
     }
 
     private Intent compileSupportedModelsOnNThreadsFor(int threadCount, Duration testDuration)
-        throws NnApiDelegationFailure {
+            throws NnApiDelegationFailure {
         Intent intent = new Intent();
         intent.putExtra(
-            NNParallelTestActivity.EXTRA_TEST_LIST, IntStream.range(0,
-                TestModels.modelsList().size()).toArray());
+                NNParallelTestActivity.EXTRA_TEST_LIST, IntStream.range(0,
+                        TestModels.modelsList().size()).toArray());
         intent.putExtra(NNParallelTestActivity.EXTRA_THREAD_COUNT, threadCount);
         intent.putExtra(NNParallelTestActivity.EXTRA_TEST_DURATION_MILLIS, testDuration.toMillis());
         intent.putExtra(NNParallelTestActivity.EXTRA_RUN_IN_SEPARATE_PROCESS, true);
@@ -141,10 +139,6 @@
         intent.putExtra(NNParallelTestActivity.EXTRA_ACCELERATOR_NAME, mAcceleratorName);
         intent.putExtra(NNParallelTestActivity.EXTRA_IGNORE_UNSUPPORTED_MODELS, true);
         intent.putExtra(NNParallelTestActivity.EXTRA_RUN_MODEL_COMPILATION_ONLY, true);
-        intent.putExtra(NNParallelTestActivity.EXTRA_USE_NNAPI_SL,
-            NNTestBase.shouldUseNnApiSupportLibrary());
-        intent.putExtra(NNParallelTestActivity.EXTRA_EXTRACT_NNAPI_SL,
-            NNTestBase.shouldExtractNnApiSupportLibrary());
         return intent;
     }
 }
diff --git a/src/com/android/nn/crashtest/app/NNMemoryMappedModelCompilationTest.java b/src/com/android/nn/crashtest/app/NNMemoryMappedModelCompilationTest.java
index 466c67d..b652396 100644
--- a/src/com/android/nn/crashtest/app/NNMemoryMappedModelCompilationTest.java
+++ b/src/com/android/nn/crashtest/app/NNMemoryMappedModelCompilationTest.java
@@ -24,7 +24,6 @@
 
 import androidx.test.InstrumentationRegistry;
 
-import com.android.nn.benchmark.app.AcceleratorSpecificTestSupport;
 import com.android.nn.benchmark.core.NnApiDelegationFailure;
 import com.android.nn.benchmark.core.TestModels;
 
@@ -49,7 +48,7 @@
 @RunWith(Parameterized.class)
 public class NNMemoryMappedModelCompilationTest extends
         ActivityInstrumentationTestCase2<NNParallelTestActivity> implements
-    AcceleratorSpecificTestSupport {
+        AcceleratorSpecificTestSupport {
 
     private static final String TAG = "NNMemoryMappedModelCompilation";
     private static final Duration MAX_SEPARATE_PROCESS_EXECUTION_TIME = Duration.ofSeconds(70);
diff --git a/src/com/android/nn/crashtest/app/NNMultipleProcessInferenceTest.java b/src/com/android/nn/crashtest/app/NNMultipleProcessInferenceTest.java
index ce92967..ba0e51e 100644
--- a/src/com/android/nn/crashtest/app/NNMultipleProcessInferenceTest.java
+++ b/src/com/android/nn/crashtest/app/NNMultipleProcessInferenceTest.java
@@ -18,7 +18,6 @@
 
 import android.content.Intent;
 
-import com.android.nn.benchmark.app.AcceleratorSpecificTestSupport;
 import com.android.nn.benchmark.core.NnApiDelegationFailure;
 import com.android.nn.crashtest.core.test.RunModelsInMultipleProcesses;
 
diff --git a/src/com/android/nn/crashtest/app/NNMultipleProcessModelLoadTest.java b/src/com/android/nn/crashtest/app/NNMultipleProcessModelLoadTest.java
index 54d27c2..860cf02 100644
--- a/src/com/android/nn/crashtest/app/NNMultipleProcessModelLoadTest.java
+++ b/src/com/android/nn/crashtest/app/NNMultipleProcessModelLoadTest.java
@@ -18,7 +18,6 @@
 
 import android.content.Intent;
 
-import com.android.nn.benchmark.app.AcceleratorSpecificTestSupport;
 import com.android.nn.benchmark.core.NnApiDelegationFailure;
 import com.android.nn.crashtest.core.test.RunModelsInMultipleProcesses;
 
diff --git a/src/com/android/nn/crashtest/app/NNMultipleProcessTest.java b/src/com/android/nn/crashtest/app/NNMultipleProcessTest.java
index bd78a53..4c81b93 100644
--- a/src/com/android/nn/crashtest/app/NNMultipleProcessTest.java
+++ b/src/com/android/nn/crashtest/app/NNMultipleProcessTest.java
@@ -25,7 +25,6 @@
 
 import androidx.test.InstrumentationRegistry;
 
-import com.android.nn.benchmark.app.AcceleratorSpecificTestSupport;
 import com.android.nn.benchmark.app.BenchmarkTestBase;
 import com.android.nn.benchmark.core.NnApiDelegationFailure;
 import com.android.nn.benchmark.core.TestModels;
diff --git a/src/com/android/nn/crashtest/app/NNParallelInferenceTest.java b/src/com/android/nn/crashtest/app/NNParallelInferenceTest.java
index 6a84bc5..b6f66a9 100644
--- a/src/com/android/nn/crashtest/app/NNParallelInferenceTest.java
+++ b/src/com/android/nn/crashtest/app/NNParallelInferenceTest.java
@@ -26,9 +26,7 @@
 
 import androidx.test.InstrumentationRegistry;
 
-import com.android.nn.benchmark.app.AcceleratorSpecificTestSupport;
 import com.android.nn.benchmark.app.BenchmarkTestBase;
-import com.android.nn.benchmark.core.NNTestBase;
 import com.android.nn.benchmark.core.NnApiDelegationFailure;
 import com.android.nn.benchmark.core.TestModels;
 
@@ -130,10 +128,6 @@
             intent.putExtra(NNParallelTestActivity.EXTRA_ACCELERATOR_NAME, acceleratorName);
             intent.putExtra(NNParallelTestActivity.EXTRA_IGNORE_UNSUPPORTED_MODELS, true);
         }
-        intent.putExtra(NNParallelTestActivity.EXTRA_USE_NNAPI_SL,
-            NNTestBase.shouldUseNnApiSupportLibrary());
-        intent.putExtra(NNParallelTestActivity.EXTRA_EXTRACT_NNAPI_SL,
-            NNTestBase.shouldExtractNnApiSupportLibrary());
         return intent;
     }
 }
diff --git a/src/com/android/nn/crashtest/app/NNParallelTestActivity.java b/src/com/android/nn/crashtest/app/NNParallelTestActivity.java
index 277e1b6..e7c9d95 100644
--- a/src/com/android/nn/crashtest/app/NNParallelTestActivity.java
+++ b/src/com/android/nn/crashtest/app/NNParallelTestActivity.java
@@ -33,7 +33,6 @@
 import android.widget.TextView;
 
 import com.android.nn.benchmark.app.R;
-import com.android.nn.benchmark.core.TestModels;
 import com.android.nn.crashtest.core.CrashTestCoordinator;
 import com.android.nn.crashtest.core.test.RunModelsInParallel;
 
@@ -54,8 +53,6 @@
     public static final String EXTRA_IGNORE_UNSUPPORTED_MODELS = "ignore_unsupported_models";
     public static final String EXTRA_RUN_MODEL_COMPILATION_ONLY = "run_model_compilation_only";
     public static final String EXTRA_MEMORY_MAP_MODEL = "memory_map_model";
-    public static final String EXTRA_USE_NNAPI_SL = "use_nnapi_sl";
-    public static final String EXTRA_EXTRACT_NNAPI_SL = "extract_nnapi_sl";
 
     // Not using AtomicBoolean to have the concept of unset status
     private CrashTestCoordinator mCoordinator;
@@ -112,8 +109,6 @@
         boolean ignoreUnsupportedModels = intent.getBooleanExtra(EXTRA_IGNORE_UNSUPPORTED_MODELS,
                 false);
         boolean mmapModel = intent.getBooleanExtra(EXTRA_MEMORY_MAP_MODEL, false);
-        boolean useNnapiSl = intent.getBooleanExtra(EXTRA_USE_NNAPI_SL, false);
-        boolean extractNnapiSl = intent.getBooleanExtra(EXTRA_EXTRACT_NNAPI_SL, false);
 
         final boolean runModelCompilationOnly = intent.getBooleanExtra(
                 EXTRA_RUN_MODEL_COMPILATION_ONLY, false);
@@ -121,8 +116,7 @@
         mCoordinator.startTest(RunModelsInParallel.class,
             RunModelsInParallel.intentInitializer(testList, threadCount,
                 Duration.ofMillis(testDurationMillis), mTestName, acceleratorName,
-                ignoreUnsupportedModels, runModelCompilationOnly, mmapModel,
-                TestModels.getModelFilterRegex(), useNnapiSl, extractNnapiSl),
+                ignoreUnsupportedModels, runModelCompilationOnly, mmapModel),
             mTestStatus, runInSeparateProcess, mTestName);
 
         mStopTestButton.setEnabled(true);
diff --git a/src/com/android/nn/crashtest/app/NNPerformanceDegradationTest.java b/src/com/android/nn/crashtest/app/NNPerformanceDegradationTest.java
index 8595edd..b91050f 100644
--- a/src/com/android/nn/crashtest/app/NNPerformanceDegradationTest.java
+++ b/src/com/android/nn/crashtest/app/NNPerformanceDegradationTest.java
@@ -22,7 +22,6 @@
 
 import androidx.test.InstrumentationRegistry;
 
-import com.android.nn.benchmark.app.AcceleratorSpecificTestSupport;
 import com.android.nn.benchmark.app.BenchmarkTestBase;
 import com.android.nn.crashtest.core.test.PerformanceDegradationTest;
 
@@ -39,7 +38,7 @@
 @RunWith(Parameterized.class)
 public class NNPerformanceDegradationTest extends
         ActivityInstrumentationTestCase2<NNPerformanceDegradationTestActivity> implements
-    AcceleratorSpecificTestSupport {
+        AcceleratorSpecificTestSupport {
     public static final String TAG = PerformanceDegradationTest.TAG;
 
 
diff --git a/src/com/android/nn/crashtest/app/NNRandomGraphTest.java b/src/com/android/nn/crashtest/app/NNRandomGraphTest.java
index 56c80f0..b477949 100644
--- a/src/com/android/nn/crashtest/app/NNRandomGraphTest.java
+++ b/src/com/android/nn/crashtest/app/NNRandomGraphTest.java
@@ -25,7 +25,6 @@
 
 import androidx.test.InstrumentationRegistry;
 
-import com.android.nn.benchmark.app.AcceleratorSpecificTestSupport;
 import com.android.nn.benchmark.app.BenchmarkTestBase;
 import com.android.nn.benchmark.core.NnApiDelegationFailure;
 import com.android.nn.benchmark.core.TestModels;
diff --git a/src/com/android/nn/crashtest/core/test/PerformanceDegradationTest.java b/src/com/android/nn/crashtest/core/test/PerformanceDegradationTest.java
index 77197b9..c203441 100644
--- a/src/com/android/nn/crashtest/core/test/PerformanceDegradationTest.java
+++ b/src/com/android/nn/crashtest/core/test/PerformanceDegradationTest.java
@@ -26,7 +26,7 @@
 import com.android.nn.benchmark.core.Processor;
 import com.android.nn.benchmark.core.TestModels;
 import com.android.nn.benchmark.core.TfLiteBackend;
-import com.android.nn.benchmark.app.AcceleratorSpecificTestSupport;
+import com.android.nn.crashtest.app.AcceleratorSpecificTestSupport;
 import com.android.nn.crashtest.core.CrashTest;
 import com.android.nn.crashtest.core.CrashTestCoordinator;
 
diff --git a/src/com/android/nn/crashtest/core/test/RunModelsInParallel.java b/src/com/android/nn/crashtest/core/test/RunModelsInParallel.java
index 15fc27c..9f01128 100644
--- a/src/com/android/nn/crashtest/core/test/RunModelsInParallel.java
+++ b/src/com/android/nn/crashtest/core/test/RunModelsInParallel.java
@@ -51,9 +51,6 @@
     private static final String IGNORE_UNSUPPORTED_MODELS = "ignore_unsupported_models";
     private static final String RUN_MODEL_COMPILATION_ONLY = "run_model_compilation_only";
     private static final String MEMORY_MAP_MODEL = "memory_map_model";
-    private static final String MODEL_FILTER = "model_filter";
-    private static final String USE_NNAPI_SL = "use_nnapi_sl";
-    private static final String EXTRACT_NNAPI_SL = "extract_nnapi_sl";
 
     private final Set<Processor> activeTests = new HashSet<>();
     private final List<Boolean> mTestCompletionResults = Collections.synchronizedList(
@@ -70,14 +67,11 @@
     private CountDownLatch mParallelTestComplete;
     private ProgressListener mProgressListener;
     private boolean mMmapModel;
-    private boolean mUseNnapiSl;
-    private boolean mExtractNnapiSl;
 
     static public CrashTestIntentInitializer intentInitializer(int[] models, int threadCount,
-        Duration duration, String testName, String acceleratorName,
-        boolean ignoreUnsupportedModels,
-        boolean runModelCompilationOnly, boolean mmapModel, String modelFilter, boolean useNnapiSl,
-        boolean extractNnapiSl) {
+            Duration duration, String testName, String acceleratorName,
+            boolean ignoreUnsupportedModels,
+            boolean runModelCompilationOnly, boolean mmapModel) {
         return intent -> {
             intent.putExtra(MODELS, models);
             intent.putExtra(DURATION, duration.toMillis());
@@ -87,9 +81,6 @@
             intent.putExtra(IGNORE_UNSUPPORTED_MODELS, ignoreUnsupportedModels);
             intent.putExtra(RUN_MODEL_COMPILATION_ONLY, runModelCompilationOnly);
             intent.putExtra(MEMORY_MAP_MODEL, mmapModel);
-            intent.putExtra(MODEL_FILTER, modelFilter);
-            intent.putExtra(USE_NNAPI_SL, useNnapiSl);
-            intent.putExtra(EXTRACT_NNAPI_SL, extractNnapiSl);
         };
     }
 
@@ -105,8 +96,6 @@
                 IGNORE_UNSUPPORTED_MODELS, false);
         mRunModelCompilationOnly = configParams.getBooleanExtra(RUN_MODEL_COMPILATION_ONLY, false);
         mMmapModel = configParams.getBooleanExtra(MEMORY_MAP_MODEL, false);
-        mUseNnapiSl = configParams.getBooleanExtra(USE_NNAPI_SL, false);
-        mExtractNnapiSl = configParams.getBooleanExtra(EXTRACT_NNAPI_SL, false);
         mContext = context;
         mProgressListener = progressListener.orElseGet(() -> (Optional<String> message) -> {
             Log.v(CrashTest.TAG, message.orElse("."));
@@ -149,8 +138,6 @@
         result.setIgnoreUnsupportedModels(mIgnoreUnsupportedModels);
         result.setRunModelCompilationOnly(mRunModelCompilationOnly);
         result.setMmapModel(mMmapModel);
-        result.setUseNnApiSupportLibrary(mUseNnapiSl);
-        result.setExtractNnApiSupportLibrary(mExtractNnapiSl);
         return result;
     }
 
diff --git a/tail_crash_test_logs.sh b/tail_crash_test_logs.sh
index 53af187..64bef3f 100755
--- a/tail_crash_test_logs.sh
+++ b/tail_crash_test_logs.sh
@@ -68,7 +68,7 @@
   esac
 done
 
-CRASH_TEST_LOG_TAGS+=("NN_BENCHMARK" "NN_TESTBASE")
+CRASH_TEST_LOG_TAGS+=("NN_BENCHMARK")
 while IFS='' read -r tag; do
   CRASH_TEST_LOG_TAGS+=("$tag");
 done <<< $(find src/com/android/nn/crashtest -name '*.java' -exec grep "TAG =" {} \; \
@@ -102,7 +102,7 @@
 fi
 
 # Fatal message for everything else to show crash dumps
-LOG_TAG_FILTER="${LOG_TAG_FILTER} QCSL:V tflite:E *:F"
+LOG_TAG_FILTER="${LOG_TAG_FILTER} *:F"
 
 export ANDROID_LOG_TAGS="${LOG_TAG_FILTER}"
 
diff --git a/tools/convert_binary_to_img.py b/tools/convert_binary_to_img.py
deleted file mode 100755
index 843e02f..0000000
--- a/tools/convert_binary_to_img.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/python3
-""" Convert binary back to image. Use float32 as default.
-
-usage: convert_binary_to_img.py [-h] -i INPUT -s height width depth
-
-optional arguments:
-  -h, --help            show this help message and exit
-  -i INPUT, --input INPUT
-                        Path to input binary file. File extension needs to be .input.
-  -s height width depth, --shape height width depth
-                        Output image shape. e.g. 224 224 3
-Example usage:
-python3 convert_binary_to_img.py -i image_file.input -s 224 224 3
-"""
-
-import argparse
-import os
-import sys
-
-import numpy as np
-from PIL import Image
-
-def convert_file(filename: str, h: int, w: int, d: int):
-    """Converts the input binary file back to image with shape following the input parameters.
-
-    Parameters
-    ----------
-    h : int, height
-    w : int, width
-    d : int, depth
-    """
-    with open(filename, 'rb') as f:
-        arr = np.frombuffer(f.read(), dtype=np.float32)
-    print(f'Reshape buffer from {arr.shape} to {(h, w, d)}.')
-    arr = arr.reshape((h, w, d))
-    arr = (arr + 1) * 128
-    im = Image.fromarray(arr.astype(np.uint8))
-    destination = filename.replace('input', 'jpg')
-    print(f'Image generated to {destination}.')
-    im.save(destination)
-
-if __name__ == '__main__':
-    parser = argparse.ArgumentParser()
-    parser.add_argument('-i','--input', type=str, required=True, help='Path to input binary file. File extension needs to be .input.')
-    parser.add_argument('-s','--shape', type=int, required=True, nargs=3, help='Output image shape. e.g. 224 224 3', metavar=('height', 'width', 'depth'))
-    args = parser.parse_args()
-
-    convert_file(args.input, *args.shape)
diff --git a/tools/gen_mobilenet_input.py b/tools/gen_mobilenet_input.py
index d3cced3..ab0241c 100755
--- a/tools/gen_mobilenet_input.py
+++ b/tools/gen_mobilenet_input.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python3
+ #!/usr/bin/python3
 """ Generate inputs for NNAPI benchamrks using a image
 
 Usage: