xref: /aosp_15_r20/external/executorch/examples/models/llama/llama_test.py (revision 523fa7a60841cd1ecfb9cc4201f1ca8b03ed023a)
1# Copyright (c) Meta Platforms, Inc. and affiliates.
2# All rights reserved.
3#
4# This source code is licensed under the BSD-style license found in the
5# LICENSE file in the root directory of this source tree.
6
7import unittest
8
9from .export_llama import build_model
10
11
12class LlamaTest(unittest.TestCase):
13    def test_quantized_llama(self):
14        _ = build_model(
15            modelname="model",
16            extra_opts="--fairseq2 -qmode int8",
17            par_local_output=True,
18            resource_pkg_name=__name__,
19        )
20
21    def test_half_llama(self):
22        _ = build_model(
23            modelname="model",
24            extra_opts="--fairseq2 -d fp16",
25            par_local_output=True,
26            resource_pkg_name=__name__,
27        )
28
29
30#    def test_half_xnnpack_llama(self):
31#        output_path = build_model(
32#            modelname="model",
33#            extra_opts="--fairseq2 -d fp16 -X",
34#            par_local_output=True,
35#            resource_pkg_name=__name__,
36#        )
37