model.py 2.58 KB
Newer Older
1 2 3 4 5
import json
import numpy
import asyncio
import triton_python_backend_utils as pb_utils

Nikhilesh Bhatnagar's avatar
Nikhilesh Bhatnagar committed
6

7
class TritonPythonModel:
Nikhilesh Bhatnagar's avatar
Nikhilesh Bhatnagar committed
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69
    def initialize(self, args):
        self.target_dtype = pb_utils.triton_string_to_numpy(
            pb_utils.get_output_config_by_name(
                json.loads(args["model_config"]), "OUTPUT_TEXT"
            )["data_type"]
        )

    async def execute(self, requests):
        return [
            pb_utils.InferenceResponse(
                output_tensors=[
                    pb_utils.Tensor(
                        "OUTPUT_TEXT",
                        numpy.array(
                            [
                                [
                                    pb_utils.get_output_tensor_by_name(
                                        result, "OUTPUT_SENT"
                                    )
                                    .as_numpy()[0, 0]
                                    .decode("utf-8")
                                ]
                                for result in (await asyncio.gather(*awaits))
                            ],
                            dtype=self.target_dtype,
                        ),
                    )
                ]
            )
            for awaits in [
                [
                    pb_utils.InferenceRequest(
                        model_name=f"himangy-{input_language_id[0].decode('utf-8')}-{output_language_id[0].decode('utf-8')}",
                        requested_output_names=["OUTPUT_SENT"],
                        inputs=[
                            pb_utils.Tensor(
                                "INPUT_SENT_TOKENIZED",
                                numpy.array(
                                    [[input_text_tokenized[0].decode("utf-8")]],
                                    dtype="object",
                                ),
                            )
                        ],
                    ).async_exec()
                    for input_text_tokenized, input_language_id, output_language_id in zip(
                        pb_utils.get_input_tensor_by_name(
                            request, "INPUT_TEXT_TOKENIZED"
                        ).as_numpy(),
                        pb_utils.get_input_tensor_by_name(
                            request, "INPUT_LANGUAGE_ID"
                        ).as_numpy(),
                        pb_utils.get_input_tensor_by_name(
                            request, "OUTPUT_LANGUAGE_ID"
                        ).as_numpy(),
                    )
                ]
                for request in requests
            ]
        ]

    def finalize(self):
        pass