def convert_params()

in aidial_adapter_bedrock/llm/model/ai21.py [0:0]


def convert_params(params: ModelParameters) -> Dict[str, Any]:
    ret = {}

    if params.max_tokens is not None:
        ret["maxTokens"] = params.max_tokens
    else:
        # The default for max tokens is 16, which is too small for most use cases.
        # Choosing reasonable default.
        ret["maxTokens"] = DEFAULT_MAX_TOKENS_AI21

    if params.temperature is not None:
        #   AI21 temperature ranges from 0.0 to 1.0
        # OpenAI temperature ranges from 0.0 to 2.0
        # Thus scaling down by 2x to match the AI21 range
        ret["temperature"] = params.temperature / 2.0

    if params.top_p is not None:
        ret["topP"] = params.top_p

    if params.stop:
        ret["stopSequences"] = params.stop

    # NOTE: AI21 has "numResults" parameter, however we emulate multiple result
    # via multiple calls to support all models uniformly.

    return ret