Skip to content

Commit

Permalink
Add Azure OpenAI docs
Browse files Browse the repository at this point in the history
  • Loading branch information
slundberg committed Jun 2, 2023
1 parent a3b0312 commit 0f7be47
Show file tree
Hide file tree
Showing 5 changed files with 191 additions and 78 deletions.
2 changes: 1 addition & 1 deletion guidance/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "0.0.60"
__version__ = "0.0.61"

import types
import sys
Expand Down
3 changes: 1 addition & 2 deletions guidance/llms/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
from ._openai import OpenAI
from ._openai import OpenAI, MSALOpenAI, AzureOpenAI
from ._transformers import Transformers
from ._mock import Mock
from ._llm import LLM, LLMSession, SyncSession
from ._azure_openai import AzureOpenAI
from ._deep_speed import DeepSpeed
from . import transformers
from . import caches
75 changes: 0 additions & 75 deletions guidance/llms/_azure_openai.py

This file was deleted.

80 changes: 80 additions & 0 deletions guidance/llms/_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,3 +553,83 @@ async def __call__(self, prompt, stop=None, stop_regex=None, temperature=None, n
return [llm_cache[key]]

return llm_cache[key]


import os
import atexit
import json
import platformdirs
from ._openai import OpenAI

class AzureOpenAI(OpenAI):
def __init__(self, *args, **kwargs):
raise NotImplementedError("The AzureOpenAI class has been merged with the OpenAI class for Azure usage. Please use the OpenAI class instead: https://guidance.readthedocs.io/en/latest/example_notebooks/api_examples/llms/OpenAI.html")

class MSALOpenAI(OpenAI):
""" Microsoft Authentication Library (MSAL) OpenAI style integration.
Warning: This class is not finalized and may change in the future.
"""

llm_name: str = "azure_openai"

def __init__(self, model=None, client_id=None, authority=None, caching=True, max_retries=5, max_calls_per_min=60, token=None,
endpoint=None, scopes=None, temperature=0.0, chat_mode="auto"):


assert endpoint is not None, "An endpoint must be specified!"

# build a standard OpenAI LLM object
super().__init__(
model=model, caching=caching, max_retries=max_retries, max_calls_per_min=max_calls_per_min,
token=token, endpoint=endpoint, temperature=temperature, chat_mode=chat_mode
)

self.client_id = client_id
self.authority = authority
self.scopes = scopes

from msal import PublicClientApplication, SerializableTokenCache
self._token_cache = SerializableTokenCache()
self._token_cache_path = os.path.join(platformdirs.user_cache_dir("guidance"), "_azure_openai.token")
self._app = PublicClientApplication(client_id=self.client_id, authority=self.authority, token_cache=self._token_cache)
if os.path.exists(self._token_cache_path):
self._token_cache.deserialize(open(self._token_cache_path, 'r').read())

self._rest_headers["X-ModelType"] = self.model_name

@property
def token(self):
return self._get_token()
@token.setter
def token(self, value):
pass # ignored for now

def _get_token(self):
accounts = self._app.get_accounts()
result = None

if accounts:
# Assuming the end user chose this one
chosen = accounts[0]

# Now let's try to find a token in cache for this account
result = self._app.acquire_token_silent(self.scopes, account=chosen)

if not result:
# So no suitable token exists in cache. Let's get a new one from AAD.
flow = self._app.initiate_device_flow(scopes=self.scopes)

if "user_code" not in flow:
raise ValueError(
"Fail to create device flow. Err: %s" % json.dumps(flow, indent=4))

print(flow["message"])

result = self._app.acquire_token_by_device_flow(flow)

# save the aquired token
with open(self._token_cache_path, "w") as f:
f.write(self._token_cache.serialize())

return result["access_token"]
109 changes: 109 additions & 0 deletions notebooks/api_examples/llms/OpenAI.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# `OpenAI` API examples\n",
"\n",
"This notebook contains examples of how to use the `OpenAI` LLM."
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Basic OpenAI usage"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div id=\"guidance-stop-button-836b0e69-9215-48a7-be0f-33edbb2a754b\" style=\"cursor: pointer; margin: 0px; display: none; float: right; padding: 3px; border-radius: 4px 4px 4px 4px; border: 0px solid rgba(127, 127, 127, 1); padding-left: 10px; padding-right: 10px; font-size: 13px; background-color: rgba(127, 127, 127, 0.25);\">Stop program</div><div id=\"guidance-content-836b0e69-9215-48a7-be0f-33edbb2a754b\"><pre style='margin: 0px; padding: 0px; padding-left: 8px; margin-left: -8px; border-radius: 0px; border-left: 1px solid rgba(127, 127, 127, 0.2); white-space: pre-wrap; font-family: ColfaxAI, Arial; font-size: 15px; line-height: 23px;'>My favorite flavor is<span style='background-color: rgba(0, 165, 0, 0.25); opacity: 1.0; display: inline;' title='{{gen &#x27;flavor&#x27; max_tokens=10 stop=&quot;.&quot;}}'> chocolate</span></pre></div>\n",
"<script type=\"text/javascript\">(()=>{var t={296:(t,e,n)=>{var i=NaN,o=\"[object Symbol]\",r=/^\\s+|\\s+$/g,a=/^[-+]0x[0-9a-f]+$/i,s=/^0b[01]+$/i,c=/^0o[0-7]+$/i,d=parseInt,u=\"object\"==typeof n.g&&n.g&&n.g.Object===Object&&n.g,l=\"object\"==typeof self&&self&&self.Object===Object&&self,f=u||l||Function(\"return this\")(),h=Object.prototype.toString,p=Math.max,m=Math.min,g=function(){return f.Date.now()};function b(t){var e=typeof t;return!!t&&(\"object\"==e||\"function\"==e)}function y(t){if(\"number\"==typeof t)return t;if(function(t){return\"symbol\"==typeof t||function(t){return!!t&&\"object\"==typeof t}(t)&&h.call(t)==o}(t))return i;if(b(t)){var e=\"function\"==typeof t.valueOf?t.valueOf():t;t=b(e)?e+\"\":e}if(\"string\"!=typeof t)return 0===t?t:+t;t=t.replace(r,\"\");var n=s.test(t);return n||c.test(t)?d(t.slice(2),n?2:8):a.test(t)?i:+t}t.exports=function(t,e,n){var i,o,r,a,s,c,d=0,u=!1,l=!1,f=!0;if(\"function\"!=typeof t)throw new TypeError(\"Expected a function\");function h(e){var n=i,r=o;return i=o=void 0,d=e,a=t.apply(r,n)}function v(t){var n=t-c;return void 0===c||n>=e||n<0||l&&t-d>=r}function _(){var t=g();if(v(t))return w(t);s=setTimeout(_,function(t){var n=e-(t-c);return l?m(n,r-(t-d)):n}(t))}function w(t){return s=void 0,f&&i?h(t):(i=o=void 0,a)}function j(){var t=g(),n=v(t);if(i=arguments,o=this,c=t,n){if(void 0===s)return function(t){return d=t,s=setTimeout(_,e),u?h(t):a}(c);if(l)return s=setTimeout(_,e),h(c)}return void 0===s&&(s=setTimeout(_,e)),a}return e=y(e)||0,b(n)&&(u=!!n.leading,r=(l=\"maxWait\"in n)?p(y(n.maxWait)||0,e):r,f=\"trailing\"in n?!!n.trailing:f),j.cancel=function(){void 0!==s&&clearTimeout(s),d=0,i=c=o=s=void 0},j.flush=function(){return void 0===s?a:w(g())},j}},777:t=>{var e,n,i=Math.max,o=(e=function(t,e){return function(t,e,n){if(\"function\"!=typeof t)throw new TypeError(\"Expected a function\");return setTimeout((function(){t.apply(void 0,n)}),1)}(t,0,e)},n=i(void 0===n?e.length-1:n,0),function(){for(var t=arguments,o=-1,r=i(t.length-n,0),a=Array(r);++o<r;)a[o]=t[n+o];o=-1;for(var s=Array(n+1);++o<n;)s[o]=t[o];return s[n]=a,function(t,e,n){switch(n.length){case 0:return t.call(e);case 1:return t.call(e,n[0]);case 2:return t.call(e,n[0],n[1]);case 3:return t.call(e,n[0],n[1],n[2])}return t.apply(e,n)}(e,this,s)});t.exports=o}},e={};function n(i){var o=e[i];if(void 0!==o)return o.exports;var r=e[i]={exports:{}};return t[i](r,r.exports,n),r.exports}n.n=t=>{var e=t&&t.__esModule?()=>t.default:()=>t;return n.d(e,{a:e}),e},n.d=(t,e)=>{for(var i in e)n.o(e,i)&&!n.o(t,i)&&Object.defineProperty(t,i,{enumerable:!0,get:e[i]})},n.g=function(){if(\"object\"==typeof globalThis)return globalThis;try{return this||new Function(\"return this\")()}catch(t){if(\"object\"==typeof window)return window}}(),n.o=(t,e)=>Object.prototype.hasOwnProperty.call(t,e),(()=>{\"use strict\";const t=t=>{const e=new Set;do{for(const n of Reflect.ownKeys(t))e.add([t,n])}while((t=Reflect.getPrototypeOf(t))&&t!==Object.prototype);return e};function e(e,{include:n,exclude:i}={}){const o=t=>{const e=e=>\"string\"==typeof e?t===e:e.test(t);return n?n.some(e):!i||!i.some(e)};for(const[n,i]of t(e.constructor.prototype)){if(\"constructor\"===i||!o(i))continue;const t=Reflect.getOwnPropertyDescriptor(n,i);t&&\"function\"==typeof t.value&&(e[i]=e[i].bind(e))}return e}var i=n(777),o=n.n(i),r=n(296),a=n.n(r);class s{constructor(t,n){e(this),this.interfaceId=t,this.callbackMap={},this.data={},this.pendingData={},this.jcomm=new c(\"guidance_interface_target_\"+this.interfaceId,this.updateData,\"open\"),this.debouncedSendPendingData500=a()(this.sendPendingData,500),this.debouncedSendPendingData1000=a()(this.sendPendingData,1e3),n&&o()(n)}send(t,e){this.addPendingData(t,e),this.sendPendingData()}sendEvent(t){for(const e of Object.keys(t))this.addPendingData(e,t[e]);this.sendPendingData()}debouncedSendEvent500(t){for(const e of Object.keys(t))this.addPendingData(e,t[e]);this.debouncedSendPendingData500()}debouncedSend500(t,e){this.addPendingData(t,e),this.debouncedSendPendingData500()}debouncedSend1000(t,e){this.addPendingData(t,e),this.debouncedSendPendingData1000()}addPendingData(t,e){Array.isArray(t)||(t=[t]);for(const n in t)this.pendingData[t[n]]=e}updateData(t){t=JSON.parse(t.data);for(const e in t)this.data[e]=t[e];for(const e in t)e in this.callbackMap&&this.callbackMap[e](this.data[e])}subscribe(t,e){this.callbackMap[t]=e,o()((e=>this.callbackMap[t](this.data[t])))}sendPendingData(){this.jcomm.send_data(this.pendingData),this.pendingData={}}}class c{constructor(t,e,n=\"open\"){this._fire_callback=this._fire_callback.bind(this),this._register=this._register.bind(this),this.jcomm=void 0,this.callback=e,void 0!==window.Jupyter?\"register\"===n?Jupyter.notebook.kernel.comm_manager.register_target(t,this._register):(this.jcomm=Jupyter.notebook.kernel.comm_manager.new_comm(t),this.jcomm.on_msg(this._fire_callback)):void 0!==window._mgr&&(\"register\"===n?window._mgr.widgetManager.proxyKernel.registerCommTarget(t,this._register):(this.jcomm=window._mgr.widgetManager.proxyKernel.createComm(t),this.jcomm.open({},\"\"),this.jcomm.onMsg=this._fire_callback))}send_data(t){void 0!==this.jcomm?this.jcomm.send(t):console.error(\"Jupyter comm module not yet loaded! So we can't send the message.\")}_register(t,e){this.jcomm=t,this.jcomm.on_msg(this._fire_callback)}_fire_callback(t){this.callback(t.content.data)}}class d{constructor(t,n){e(this),this.id=t,this.comm=new s(t),this.comm.subscribe(\"append\",this.appendData),this.comm.subscribe(\"replace\",this.replaceData),this.comm.subscribe(\"event\",this.eventOccurred),this.element=document.getElementById(\"guidance-content-\"+t),this.stop_button=document.getElementById(\"guidance-stop-button-\"+t),this.stop_button.onclick=()=>this.comm.send(\"event\",\"stop\")}appendData(t){t&&(this.stop_button.style.display=\"inline-block\",this.element.innerHTML+=t)}replaceData(t){t&&(this.stop_button.style.display=\"inline-block\",this.element.innerHTML=t)}eventOccurred(t){\"complete\"===t&&(this.stop_button.style.display=\"none\")}}window._guidanceDisplay=function(t,e){return new d(t,e)}})()})();; window._guidanceDisplay(\"836b0e69-9215-48a7-be0f-33edbb2a754b\");</script>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import guidance\n",
"\n",
"# this relies on the environment variable OPENAI_API_KEY being set\n",
"llm = guidance.llms.OpenAI('text-davinci-003')\n",
"\n",
"program = guidance(\"\"\"My favorite flavor is{{gen 'flavor' max_tokens=10 stop=\".\"}}\"\"\", llm=llm)\n",
"program()"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Basic Azure OpenAI usage"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"import guidance\n",
"\n",
"# here we explicitly pass in the API key\n",
"llm = guidance.llms.OpenAI(\n",
" 'text-davinci-003',\n",
" api_type='azure',\n",
" api_key='sk-...',\n",
" api_base='https://example-endpoint.openai.azure.com',\n",
" api_version='2023-05-15',\n",
" deployment_id='...'\n",
")\n",
"\n",
"program = guidance(\"\"\"My favorite flavor is{{gen 'flavor' max_tokens=10 stop=\".\"}}\"\"\", llm=llm)\n",
"program()"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"<hr style=\"height: 1px; opacity: 0.5; border: none; background: #cccccc;\">\n",
"<div style=\"text-align: center; opacity: 0.5\">Have an idea for more helpful examples? Pull requests that add to this documentation notebook are encouraged!</div>"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "adatest",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.4"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

0 comments on commit 0f7be47

Please sign in to comment.