langchain-go调用deepseek

news/2025/2/26 10:11:50

1.查看官网

在这里插入图片描述

发现只有ollama,openai,Mistral于是查看代码

2.代码查看

先从llm, err := openai.New(url, model, token)开始

在这里插入图片描述

发现New方法可以传option参数,再看一下option参数
const (
	tokenEnvVarName        = "OPENAI_API_KEY"      //nolint:gosec
	modelEnvVarName        = "OPENAI_MODEL"        //nolint:gosec
	baseURLEnvVarName      = "OPENAI_BASE_URL"     //nolint:gosec
	baseAPIBaseEnvVarName  = "OPENAI_API_BASE"     //nolint:gosec
	organizationEnvVarName = "OPENAI_ORGANIZATION" //nolint:gosec
)

type APIType openaiclient.APIType

const (
	APITypeOpenAI  APIType = APIType(openaiclient.APITypeOpenAI)
	APITypeAzure           = APIType(openaiclient.APITypeAzure)
	APITypeAzureAD         = APIType(openaiclient.APITypeAzureAD)
)

const (
	DefaultAPIVersion = "2023-05-15"
)

type options struct {
	token        string
	model        string
	baseURL      string
	organization string
	apiType      APIType
	httpClient   openaiclient.Doer

	responseFormat *ResponseFormat

	// required when APIType is APITypeAzure or APITypeAzureAD
	apiVersion     string
	embeddingModel string

	callbackHandler callbacks.Handler
}

// Option is a functional option for the OpenAI client.
type Option func(*options)

// ResponseFormat is the response format for the OpenAI client.
type ResponseFormat = openaiclient.ResponseFormat

// ResponseFormatJSON is the JSON response format.
var ResponseFormatJSON = &ResponseFormat{Type: "json_object"} //nolint:gochecknoglobals

// WithToken passes the OpenAI API token to the client. If not set, the token
// is read from the OPENAI_API_KEY environment variable.
func WithToken(token string) Option {
	return func(opts *options) {
		opts.token = token
	}
}

// WithModel passes the OpenAI model to the client. If not set, the model
// is read from the OPENAI_MODEL environment variable.
// Required when ApiType is Azure.
func WithModel(model string) Option {
	return func(opts *options) {
		opts.model = model
	}
}

// WithEmbeddingModel passes the OpenAI model to the client. Required when ApiType is Azure.
func WithEmbeddingModel(embeddingModel string) Option {
	return func(opts *options) {
		opts.embeddingModel = embeddingModel
	}
}

// WithBaseURL passes the OpenAI base url to the client. If not set, the base url
// is read from the OPENAI_BASE_URL environment variable. If still not set in ENV
// VAR OPENAI_BASE_URL, then the default value is https://api.openai.com/v1 is used.
func WithBaseURL(baseURL string) Option {
	return func(opts *options) {
		opts.baseURL = baseURL
	}
}

// WithOrganization passes the OpenAI organization to the client. If not set, the
// organization is read from the OPENAI_ORGANIZATION.
func WithOrganization(organization string) Option {
	return func(opts *options) {
		opts.organization = organization
	}
}

// WithAPIType passes the api type to the client. If not set, the default value
// is APITypeOpenAI.
func WithAPIType(apiType APIType) Option {
	return func(opts *options) {
		opts.apiType = apiType
	}
}

// WithAPIVersion passes the api version to the client. If not set, the default value
// is DefaultAPIVersion.
func WithAPIVersion(apiVersion string) Option {
	return func(opts *options) {
		opts.apiVersion = apiVersion
	}
}

// WithHTTPClient allows setting a custom HTTP client. If not set, the default value
// is http.DefaultClient.
func WithHTTPClient(client openaiclient.Doer) Option {
	return func(opts *options) {
		opts.httpClient = client
	}
}

// WithCallback allows setting a custom Callback Handler.
func WithCallback(callbackHandler callbacks.Handler) Option {
	return func(opts *options) {
		opts.callbackHandler = callbackHandler
	}
}

// WithResponseFormat allows setting a custom response format.
func WithResponseFormat(responseFormat *ResponseFormat) Option {
	return func(opts *options) {
		opts.responseFormat = responseFormat
	}
}
这里发现了各种配置的地方,以及获取环境变量,于是我们使用openai包下面的各种with方法来配置deepseek的地址
// 我这里使用的是腾讯的API
url := openai.WithBaseURL("https://api.lkeap.cloud.tencent.com/v1/chat/completions")
	model := openai.WithModel("deepseek-v3")
	token := openai.WithToken("API-KEY")
	// We can construct an LLMChain from a PromptTemplate and an LLM.
	llm, err := openai.New(url, model, token)
	if err != nil {
		return err
	}
	ctx := context.Background()
	completion, err := llm.Call(ctx, "The first man to walk on the moon",
		llms.WithTemperature(0.8),
		llms.WithStopWords([]string{"Armstrong"}))

	if err != nil {
		return err
	}
	fmt.Println(completion)
	return nil

http://www.niftyadmin.cn/n/5868532.html

相关文章

HITCON2017SSRFME-学习复盘

代码审计 192.168.122.15 <?phpif (isset($_SERVER[HTTP_X_FORWARDED_FOR])) {$http_x_headers explode(,, $_SERVER[HTTP_X_FORWARDED_FOR]);//用逗号分割多个IP$_SERVER[REMOTE_ADDR] $http_x_headers[0];}echo $_SERVER["REMOTE_ADDR"];//给第一个IP发送请…

Linux主机用户登陆安全配置

Linux主机用户登陆安全配置 在Linux主机上进行用户登录安全配置是一个重要的安全措施&#xff0c;可以防止未经授权的访问。以下是如何创建用户hbu、赋予其sudo权限&#xff0c;以及禁止root用户SSH登录&#xff0c;以及通过ssh key管理主机用户登陆。 创建用户hbu 使用具有…

从零到一:如何用阿里云百炼和火山引擎搭建专属 AI 助手(DeepSeek)?

本文首发&#xff1a;从零到一&#xff1a;如何用阿里云百炼和火山引擎搭建专属 AI 助手&#xff08;DeepSeek&#xff09;&#xff1f; 阿里云百炼和火山引擎都推出了免费的 DeepSeek 模型体验额度&#xff0c;今天我和大家一起搭建一个本地的专属 AI 助手。  阿里云百炼为 …

单链表删除算法(p=L; j=0;与p=p->next;j=1的辨析)

算法描述 Status ListDelete&#xff08;LinkList &L,int i&#xff09; { //在带头结点的单链表 L 中&#xff0c;删除第 i 个元素 pL; j0; while ((p->next) && (j<i-1)) {pp->next; j;} if (!(p->next)||(j>i-1)) return ERROR; qp->nex…

【Docker基础】理解 Docker:本质、性质、架构与核心组件

文章目录 Docker 本质Docker 的引擎迭代Docker 和虚拟机的区别Docker 为什么比虚拟机资源利用率高&#xff0c;速度快&#xff1f;Docker 和 JVM 虚拟化的区别Docker 版本1. LXC (Linux Containers)2. libcontainer3. Moby4. docker-ce5. docker-ee总结&#xff1a; Docker 架构…

DeepSeek个人知识库

deepseek构建个人知识库 安装软件链接 : 安装链接 先在本地把deepseek跑起来&#xff0c;本地部署deepseek见前文链接: 本地部署ollama # 目前软件只支持1.5b小模型&#xff0c;将就着用 ollama run deepseek-r1:1.5b等服务器启动后开启软件 上传文件 输入消息 &#xff08…

结构型模式 - 外观模式 (Facade)

结构型模式 - 外观模式 (Facade) 又名门面模式,通过为多个子系统提供一个一致的接口,使得子系统使用起来更加容易. 外观模式是“迪米特法则”的典型应用. // CPU 类&#xff0c;代表 CPU 子系统 class CPU {public void start() {System.out.println("CPU 启动");}p…

ubuntu离线安装Ollama并部署Llama3.1 70B INT4

文章目录 1.下载Ollama2. 下载安装Ollama的安装命令文件install.sh3.安装并验证Ollama4.下载所需要的大模型文件4.1 加载.GGUF文件&#xff08;推荐、更容易&#xff09;4.2 加载.Safetensors文件&#xff08;不建议使用&#xff09; 5.配置大模型文件 参考&#xff1a; 1、 如…