{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "provenance": []
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "language_info": {
      "name": "python"
    }
  },
  "cells": [
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "adotBkqZSh5g"
      },
      "outputs": [],
      "source": [
        "!pip install litellm"
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "from litellm import completion\n",
        "\n",
        "## set ENV variables\n",
        "os.environ[\"OPENAI_API_KEY\"] = \"openai key\"\n",
        "os.environ[\"COHERE_API_KEY\"] = \"cohere key\"\n",
        "os.environ[\"REPLICATE_API_KEY\"] = \"replicate key\"\n",
        "messages = [{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}]\n",
        "\n",
        "# openai call\n",
        "response = completion(model=\"gpt-3.5-turbo\", messages=messages)\n",
        "\n",
        "# cohere call\n",
        "response = completion(\"command-nightly\", messages)\n",
        "\n",
        "# replicate call\n",
        "response = completion(\"replicate/llama-2-70b-chat:2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1\", messages)"
      ],
      "metadata": {
        "id": "LeOqznSgSj-z"
      },
      "execution_count": null,
      "outputs": []
    }
  ]
}