EdgeClient.inferences.perform_inference

Perform an inference on some data against a particular model version.

EdgeClient.inferences.perform_inference(model_identifier: str, model_version: str, input_sources: List[InputSource], explain=False, tags=None)

This method allows users to perform an inference on some data against a particular model version.

Parameters

ParameterTypeDescriptionExample
model_identifierstrThe model identifier.'ed542963de'
model_versionstrThe model version string in semantic version format.'1.0.1'
input_sourcesList[InputSource]A list of input sources of type InputSource[InputSource(key="input.txt", text="Today is a great day.")]
explainboolIf the model supports explainability, flag this job to return an explanation of the predictionsTrue
tagsMapping[str, str]An arbitrary set of key/value tags to associate with this inference.

Returns

A Inference object returned from Inference API

Examples

# Submit a piece of text to the model.
from modzy.edge import InputSource
inference = edge_client.perform_inference(
    "text-classifier",
    "1.0.0",
    [
        InputSource(
            key="input.txt",
            text="A sample bit of text to run an inference on."
        )
    ],
    explain=False,
    tags={
        "a tag key": "some tag value",
        "another tag key": "another tag value",
    },
)

# Submit a piece of text to the model as a binary string.
from modzy.edge import InputSource
inference = edge_client.perform_inference(
    "text-classifier",
    "1.0.0",
    [
        InputSource(
            key="input.txt",
            data=b"QSBzYW1wbGUgYml0IG9mIHRleHQgdG8gcnVuIGFuIGluZmVyZW5jZSBvbi4K"
        )
    ],
    explain=False,
    tags={
        "a tag key": "some tag value",
        "another tag key": "another tag value",
    },
)

# Submit a piece of text to the model from a file in AWS S3.
from modzy.edge import InputSource, S3InputSource
inference = edge_client.perform_inference(
    "text-classifier",
    "1.0.0",
    [
        InputSource(
            key="input.txt",
            s3=S3InputSource(
                region="us-east-1",
                bucket="my-bucket",
                path="path/to/my-file.txt",
                access_key_id=os.getenv("AWS_ACCESS_KEY_ID")
                access_key_secret=os.getenv("AWS_SECRET_ACCESS_KEY")
            )
        )
    ],
    explain=False,
    tags={
        "a tag key": "some tag value",
        "another tag key": "another tag value",
    },
)

# Submit a piece of text to the model from a file in a S3-compatible storage provider
# like MinIO or NetApp StorageGrid.
from modzy.edge import InputSource, S3InputSource
inference = edge_client.perform_inference(
    "text-classifier",
    "1.0.0",
    [
        InputSource(
            key="input.txt",
            s3=S3InputSource(
                endpoint="https://my-storage-provider.example.com",
                bucket="my-bucket",
                path="path/to/my-file.txt",
                access_key_id=os.getenv("AWS_ACCESS_KEY_ID")
                access_key_secret=os.getenv("AWS_SECRET_ACCESS_KEY")
            )
        )
    ],
    explain=False,
    tags={
        "a tag key": "some tag value",
        "another tag key": "another tag value",
    },
)

# Submit a piece of text to the model from a file in Azure BlobStorage.
from modzy.edge import InputSource, AzureInputSource
inference = edge_client.perform_inference(
    "text-classifier",
    "1.0.0",
    [
        InputSource(
            key="input.txt",
            azure=AzureInputSource(
                container="my-blob-container",
                path="path/to/my-file.txt",
                storage_account="my-azure-storage-account-name"
                storage_account_key=os.getenv("AZURE_STORAGE_KEY")
            )
        )
    ],
    explain=False,
    tags={
        "a tag key": "some tag value",
        "another tag key": "another tag value",
    },
)