feat(llmx): 添加 OpenAI接口支持 #48

Merged
gewuyou merged 8 commits from dev into test 2025-05-11 21:51:03 +08:00
37 changed files with 1081 additions and 139 deletions

View File

@ -0,0 +1,216 @@
name: CI/CD Pipeline
on:
push:
branches:
- master # 触发构建的分支
env:
# ========== 环境变量配置 ==========
DOCKER_REGISTRY_URL: ${{vars.DOCKER_REGISTRY_URL}} # 私有Docker镜像仓库地址
INTERNAL_DOCKER_REGISTRY_URL: ${{vars.INTERNAL_DOCKER_REGISTRY_URL}}
PROJECT_NAME: llmx # 项目名称
MAIN_COMPOSE_FILE: docker/docker-compose.master.main.yml
AGENT_COMPOSE_FILE: docker/docker-compose.master.agent.yml
SERVER_PASSWORD: ${{ secrets.SERVER_PASSWORD }} # 仓库密码
JCNC_GITEA_URL: ${{vars.SERVER_GITEA_URL}} # Gitea地址
RUNNER_TOOL_CACHE: /opt/tools-cache # 工具缓存目录
GRADLE_CACHE_KEY: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
SPRING_PROFILES_ACTIVE: master
INTERNAL_SERVER_HOST: ${{ vars.INTERNAL_SERVER_HOST }}
INTERNAL_SERVER_PROT: ${{ vars.INTERNAL_SERVER_PROT }}
SINGAPORE_SERVER_HOST: ${{ vars.SINGAPORE_SERVER_HOST }}
SSH_PROT: ${{ vars.SSH_PROT }}
jobs:
build-and-deploy:
runs-on: ubuntu-latest
container:
image: jcnc/act-runner:latest # 使用自定义Runner镜像
options: --user root # 以root用户运行需要docker权限
steps:
# ========== 1. 代码检出 ==========
- name: 🛒 Checkout source code
uses: ${{env.JCNC_GITEA_URL}}/actions/checkout@v4
with:
fetch-depth: 0 # 获取完整git历史某些插件需要
# ========== 2. Docker环境准备 ==========
- name: 🐳 Install Docker Environment
run: |
echo "=== 检查Docker安装状态 ==="
if ! command -v docker >/dev/null; then
echo "❌ Docker未安装开始安装..."
curl -fsSL https://get.docker.com | sh | tee docker-install.log
echo "✅ Docker安装完成"
echo "✅ Docker Compose安装完成"
else
echo " Docker已安装版本: $(docker -v)"
echo " Docker Compose已安装版本: $(docker compose version)"
fi
# ========== 3. Gradle环境准备 ==========
- name: 🔧 Prepare Gradle Environment
run: |
echo "赋予gradlew执行权限..."
chmod +x gradlew
echo "当前目录结构:"
ls -al
# ========== 4. 恢复缓存 ==========
- name: 📦 Use Cache
id: cache
uses: ${{env.JCNC_GITEA_URL}}/actions/cache/restore@v4
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
~/.cache
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
restore-keys: |
${{ runner.os }}-gradle-
${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
env:
ACTIONS_RUNNER_DEBUG: true # 启用缓存调试输出
- name: ⚙️ Setup Gradle
uses: ${{env.JCNC_GITEA_URL}}/gradle/actions/setup-gradle@v4
with:
gradle-version: wrapper # 使用项目自带的gradle-wrapper
- name: 🔧 Install sshpass
run: |
apt-get update && apt-get install -y sshpass
- name: 📦 Copy Compose File to Internal Server (with password)
run: |
sshpass -p "${{ secrets.INTERNAL_SERVER_PASSWORD }}" \
scp -P ${{ env.INTERNAL_SERVER_PROT }} -o StrictHostKeyChecking=no \
${{ env.MAIN_COMPOSE_FILE }} \
root@${{ env.INTERNAL_SERVER_HOST }}:/home/luke/deploy/llmx/
- name: 📦 Copy Compose File to Singapore Server (with password)
run: |
sshpass -p "${{ secrets.SINGAPORE_SERVER_PASSWORD }}" \
scp -P ${{ env.SSH_PROT }} -o StrictHostKeyChecking=no \
${{ env.AGENT_COMPOSE_FILE }} \
root@${{ env.SINGAPORE_SERVER_HOST }}:/home/deploy/llmx/
# ========== 5. 构建阶段 ==========
- name: 🏗️ Build with Jib
run: |
echo "开始构建Docker镜像..."
./gradlew jib --stacktrace --build-cache --info -Dorg.gradle.caching=true -Dorg.gradle.jvmargs="-Xmx2g -Xms2g -XX:MaxMetaspaceSize=1g" | tee build.log
echo "=== 镜像构建结果 ==="
docker images | grep ${{ env.PROJECT_NAME }} || true
- name: 🛑 Stop Gradle Daemon
run: |
echo "停止Gradle守护进程..."
./gradlew --stop
echo "剩余Java进程:"
ps aux | grep java || true
- name: 🛰️ Tag & Push to Internal Registry
run: |
echo "标记并推送镜像到内部服务器..."
docker tag ${{env.DOCKER_REGISTRY_URL}}/llmx-core-service:latest ${{env.INTERNAL_DOCKER_REGISTRY_URL}}/llmx-core-service:latest
docker tag ${{env.DOCKER_REGISTRY_URL}}/llmx-impl-bailian:latest ${{env.INTERNAL_DOCKER_REGISTRY_URL}}/llmx-impl-bailian:latest
echo "${{ secrets.INTERNAL_DOCKER_REGISTRY_PASSWORD }}" | docker login ${{env.INTERNAL_DOCKER_REGISTRY_URL}} -u root --password-stdin
docker push ${{env.INTERNAL_DOCKER_REGISTRY_URL}}/llmx-core-service:latest
docker push ${{env.INTERNAL_DOCKER_REGISTRY_URL}}/llmx-impl-bailian:latest
docker logout ${{env.INTERNAL_DOCKER_REGISTRY_URL}}
- name: 🛰️ Tag & Push to Singapore Registry
run: |
echo "标记并推送镜像到内部服务器..."
docker tag ${{env.DOCKER_REGISTRY_URL}}/llmx-core-service:latest ${{env.SINGAPORE_DOCKER_REGISTRY_URL}}/llmx-core-service:latest
docker tag ${{env.DOCKER_REGISTRY_URL}}/llmx-impl-bailian:latest ${{env.SINGAPORE_DOCKER_REGISTRY_URL}}/llmx-impl-bailian:latest
echo "${{ secrets.INTERNAL_DOCKER_REGISTRY_PASSWORD }}" | docker login ${{env.SINGAPORE_DOCKER_REGISTRY_URL}} -u root --password-stdin
docker push ${{env.SINGAPORE_DOCKER_REGISTRY_URL}}/llmx-core-service:latest
docker push ${{env.SINGAPORE_DOCKER_REGISTRY_URL}}/llmx-impl-bailian:latest
docker logout ${{env.SINGAPORE_DOCKER_REGISTRY_URL}}
# ========== 6. 保存缓存 ==========
- name: 📦 Save Cache
id: cache
uses: ${{env.JCNC_GITEA_URL}}/actions/cache/save@v4
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
~/.cache
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
restore-keys: |
${{ runner.os }}-gradle-
${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
env:
ACTIONS_RUNNER_DEBUG: true # 启用缓存调试输出
- name: 🧼 Cleanup Dangling Images
run: |
echo "开始清理无标签镜像..."
docker image prune -f
remote-internal-deploy:
needs: build-and-deploy
runs-on: ubuntu-latest
container:
image: jcnc/act-runner:latest # 使用自定义Runner镜像
options: --user root # 以root用户运行需要docker权限
steps:
- name: ✈️ Deploy on Internal Server
uses: ${{env.JCNC_GITEA_URL}}/appleboy/ssh-action@v1
with:
host: ${{ env.INTERNAL_SERVER_HOST }}
port: ${{ env.INTERNAL_SERVER_PROT }}
username: root
password: ${{ secrets.INTERNAL_SERVER_PASSWORD }}
script: |
cd /home/luke/deploy/llmx
echo "准备部署环境..."
chmod +x docker-compose.master.yml
echo "当前Docker状态:"
docker ps -a
echo "清理旧容器..."
docker compose -f docker-compose.master.yml down --remove-orphans
echo "清理后Docker状态:"
docker ps -a
echo "拉取最新镜像..."
docker compose -f docker-compose.master.yml pull
echo "启动新服务..."
docker compose -f docker-compose.master.yml up -d
docker compose ps
echo "=== 服务状态检查 ==="
docker compose -f docker-compose.master.yml ps
echo "开始清理无标签镜像..."
docker image prune -f
echo "清理docker-compose.master.yml"
rm -rf docker-compose.master.yml
remote-singapore-deploy:
needs: build-and-deploy
runs-on: ubuntu-latest
container:
image: jcnc/act-runner:latest # 使用自定义Runner镜像
options: --user root # 以root用户运行需要docker权限
steps:
- name: ✈️ Deploy on Internal Server
uses: ${{env.JCNC_GITEA_URL}}/appleboy/ssh-action@v1
with:
host: ${{ env.SINGAPORE_SERVER_HOST }}
port: ${{ env.SSH_PROT }}
username: root
password: ${{ secrets.SINGAPORE_SERVER_PASSWORD }}
script: |
cd /home/deploy/llmx
echo "准备部署环境..."
chmod +x docker-compose.master.yml
echo "当前Docker状态:"
docker ps -a
echo "清理旧容器..."
docker compose -f docker-compose.master.yml down --remove-orphans
echo "清理后Docker状态:"
docker ps -a
echo "拉取最新镜像..."
docker compose -f docker-compose.master.yml pull
echo "启动新服务..."
docker compose -f docker-compose.master.yml up -d
docker compose ps
echo "=== 服务状态检查 ==="
docker compose -f docker-compose.master.yml ps
echo "开始清理无标签镜像..."
docker image prune -f
echo "清理docker-compose.master.yml"
rm -rf docker-compose.master.yml

View File

@ -84,7 +84,6 @@ subprojects {
implementation(libs.okHttp)
// forgeBoot依赖
implementation(libs.forgeBoot.core.extension)
implementation(libs.forgeBoot.core.extension)
}
}
if(project.getPropertyByBoolean(ProjectFlags.USE_DAO_DEPENDENCE)){

View File

@ -0,0 +1,32 @@
services:
llmx-core-service-banana:
image: ${DOCKER_REGISTRY_URL}/llmx-core-service
container_name: llmx-core-service
ports:
- "9002:9002"
networks:
- llmx-net-master
environment:
SPRING_PROFILES_ACTIVE: ${SPRING_PROFILES_ACTIVE}
volumes:
- llmx-core-service-volume:/app/volume
restart: always
llmx-impl-bailian-banana:
image: ${DOCKER_REGISTRY_URL}/llmx-impl-bailian
container_name: llmx-impl-bailian
ports:
- "9003:9003"
networks:
- llmx-net-master
environment:
SPRING_PROFILES_ACTIVE: ${SPRING_PROFILES_ACTIVE}
volumes:
- llmx-impl-bailian-volume:/app/volume
restart: always
networks:
llmx-net-master:
driver: bridge
volumes:
llmx-core-service-volume:
llmx-impl-bailian-volume:

View File

@ -0,0 +1,48 @@
services:
llmx-core-service-apple:
image: ${DOCKER_REGISTRY_URL}/llmx-core-service
container_name: llmx-core-service
ports:
- "9002:9002"
networks:
- llmx-net-master
environment:
SPRING_PROFILES_ACTIVE: ${SPRING_PROFILES_ACTIVE}
volumes:
- llmx-core-service-volume:/app/volume
restart: always
llmx-database:
image: postgres:16-alpine # 长期支持版本推荐用 16
container_name: llmx-database
restart: always
ports:
- "5432:5432"
networks:
- llmx-net-master
environment:
POSTGRES_DB: llmx_db
POSTGRES_USER: llmx
POSTGRES_PASSWORD: L4s6f9y3,
volumes:
- llmx-db-volume
llmx-impl-bailian-apple:
image: ${DOCKER_REGISTRY_URL}/llmx-impl-bailian
container_name: llmx-impl-bailian
ports:
- "9003:9003"
networks:
- llmx-net-master
environment:
SPRING_PROFILES_ACTIVE: ${SPRING_PROFILES_ACTIVE}
volumes:
- llmx-impl-bailian-volume:/app/volume
restart: always
networks:
llmx-net-master:
driver: bridge
volumes:
llmx-core-service-volume:
llmx-impl-bailian-volume:
llmx-db-volume:

View File

@ -17,7 +17,7 @@ sealed class MultiModalContent {
/**
* Image数据类表示图像内容的模态
* @param image 图像的base64
* @param image 图像的base64或者url
*/
data class Image(val image: String) : MultiModalContent()
}
@ -35,8 +35,8 @@ sealed class MultiModalContent {
data class ChatRequest(
val prompt: String? = "",
val model: String,
val messages: List<MultiModalMessage> =listOf(),
val options: Map<String, String> = mapOf()
val messages: List<MultiModalMessage> = listOf(),
val options: Map<String, String> = mapOf(),
)
/**
@ -46,7 +46,5 @@ data class ChatRequest(
*/
data class MultiModalMessage(
val role: String, // "system", "user", "assistant"
val content: List<MultiModalContent>
val content: List<MultiModalContent>,
)

View File

@ -32,8 +32,8 @@ class ModelRouteManager(
val modelServiceMap = modelRouteMappingRepository.findAllByEnabled(true)
.associate { it.model to it.serviceName }
log.info("modelServiceMap: $modelServiceMap")
for ((model, serviceName) in modelServiceMap) {
if (model.startsWith(model)) {
for ((modelId, serviceName) in modelServiceMap) {
if (modelId == model) {
return serviceName
}
}

View File

@ -1,7 +1,2 @@
server:
port: 9002
spring:
cloud:
nacos:
discovery:
server-addr: 49.235.96.75:9001

View File

@ -0,0 +1,23 @@
spring:
cloud:
nacos:
username: nacos
password: L4s6f9y3,
server-addr: 49.235.96.75:8848
discovery:
server-addr: ${spring.cloud.nacos.server-addr}
username: ${spring.cloud.nacos.username}
password: ${spring.cloud.nacos.password}
group: llmx-${spring.profiles.active}
namespace: ab34d859-6f1a-4f28-ac6b-27a7410ab27b
config:
file-extension: yaml
namespace: ab34d859-6f1a-4f28-ac6b-27a7410ab27b
refresh-enabled: true
extension-configs:
- data-id: ${spring.application.name}-${spring.profiles.active}.yaml
refresh: true
group: ${spring.application.name}
- data-id: common-db.yaml
refresh: true
group: infra

View File

@ -5,6 +5,12 @@ apply {
}
dependencies {
compileOnly(libs.springBootStarter.web)
// kt协程依赖
compileOnly(libs.kotlinx.coruntes.reactor)
// okHttp依赖
compileOnly(libs.okHttp)
api(libs.org.reactivestreams.reactiveStreams)
api(project(Modules.Core.COMMON))
// forgeBoot依赖
implementation(libs.forgeBoot.core.extension)
}

View File

@ -0,0 +1,100 @@
package org.jcnc.llmx.core.spi.adapter
import com.fasterxml.jackson.databind.ObjectMapper
import com.gewuyou.forgeboot.core.extension.log
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.flow.FlowCollector
import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.reactive.asPublisher
import okhttp3.Headers.Companion.toHeaders
import okhttp3.MediaType.Companion.toMediaType
import okhttp3.OkHttpClient
import okhttp3.Request
import okhttp3.RequestBody.Companion.toRequestBody
import org.jcnc.llmx.core.common.entities.response.ChatResponsePart
import org.reactivestreams.Publisher
import org.springframework.http.MediaType
import java.io.BufferedReader
/**
* 抽象聊天流适配器
*
* @since 2025-05-11 20:43:16
* @author gewuyou
*/
abstract class AbstractChatStreamAdapter(
private val okHttpClient: OkHttpClient,
private val objectMapper: ObjectMapper,
) : ChatStreamAdapter {
/**
* 发送流式聊天请求
*
* 该函数负责向指定URL发送聊天请求并根据响应流提取内容
* 使用协程调度器来控制并发和异步处理逻辑
*
* @param url 请求的URL地址
* @param headers 请求头包含认证等信息
* @param requestBody 请求体可以是任意类型根据API需求而定
* @param extractContent 函数用于从响应流中提取聊天响应的部分内容
* @param dispatcher 协程调度器用于执行异步或并发操作
* @return 返回一个发布者用于订阅聊天响应流
*/
override fun sendStreamChat(
url: String,
headers: Map<String, String>,
requestBody: Any,
extractContent: (String) -> ChatResponsePart,
dispatcher: CoroutineDispatcher,
): Publisher<ChatResponsePart> = flow {
val requestJson = objectMapper.writeValueAsString(requestBody)
log.info("📤 请求参数: $requestJson")
val request = buildRequest(url, headers, requestJson)
okHttpClient.newCall(request).execute().use { response ->
if (!response.isSuccessful) {
log.error("🚨 请求失败: ${response.code} ${response.message}")
throw RuntimeException("❌ 请求失败: HTTP ${response.code}")
}
val responseBody = response.body ?: throw RuntimeException("❌ 响应体为空")
responseBody.charStream().buffered().use { reader ->
val allContent = StringBuilder()
processResponse(reader, extractContent, allContent, dispatcher)
log.info("📦 完整内容: $allContent")
}
}
}.asPublisher()
/**
* 构建HTTP请求
*
* @param url 请求的URL地址
* @param headers 请求头包含认证等信息
* @param json 请求体的JSON字符串
* @return 返回构建好的HTTP请求对象
*/
open fun buildRequest(url: String, headers: Map<String, String>, json: String): Request {
return Request.Builder()
.url(url)
.headers(headers.toHeaders())
.post(json.toRequestBody(MediaType.APPLICATION_JSON_VALUE.toMediaType()))
.build()
}
/**
* 处理响应流的抽象方法
*
* 此方法需由子类实现用以解析响应流并发射聊天响应部分
*
* @param reader 响应流的BufferedReader对象
* @param extractContent 函数用于从响应流中提取聊天响应的部分内容
* @param allContent 用于存储响应流的完整内容
* @param dispatcher 协程调度器用于执行异步或并发操作
*/
abstract suspend fun FlowCollector<ChatResponsePart>.processResponse(
reader: BufferedReader,
extractContent: (String) -> ChatResponsePart,
allContent: StringBuilder,
dispatcher: CoroutineDispatcher
)
}

View File

@ -0,0 +1,38 @@
package org.jcnc.llmx.core.spi.adapter
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.Dispatchers
import org.jcnc.llmx.core.common.entities.response.ChatResponsePart
import org.reactivestreams.Publisher
/**
* 聊天流适配器
*
* 该接口定义了一个函数用于发送流式聊天请求并处理响应流
* 主要用途是与聊天API进行交互以流的方式处理聊天消息从而实现高效的实时通信
*
* @since 2025-05-11 20:19:29
* @author gewuyou
*/
interface ChatStreamAdapter {
/**
* 发送流式聊天请求
*
* 该函数负责向指定URL发送聊天请求并根据响应流提取内容
* 使用协程调度器来控制并发和异步处理逻辑
*
* @param url 请求的URL地址
* @param headers 请求头包含认证等信息
* @param requestBody 请求体可以是任意类型根据API需求而定
* @param extractContent 函数用于从响应流中提取聊天响应的部分内容
* @param dispatcher 协程调度器用于执行异步或并发操作
* @return 返回一个发布者用于订阅聊天响应流
*/
fun sendStreamChat(
url: String,
headers: Map<String, String>,
requestBody: Any,
extractContent: (String) -> ChatResponsePart,
dispatcher: CoroutineDispatcher=Dispatchers.IO
): Publisher<ChatResponsePart>
}

View File

@ -8,6 +8,7 @@ import org.jcnc.llmx.core.common.entities.response.EmbeddingResponse
import org.reactivestreams.Publisher
import org.springframework.http.MediaType
import org.springframework.web.bind.annotation.PostMapping
import org.springframework.web.bind.annotation.RequestBody
import org.springframework.web.bind.annotation.RequestMapping
/**
@ -31,7 +32,7 @@ interface LLMProvider {
* @return 返回一个Flow流通过该流可以接收到聊天响应的部分数据如消息状态更新等
*/
@PostMapping("/chat", produces = [MediaType.APPLICATION_NDJSON_VALUE])
fun chat(request: ChatRequest): Publisher<ChatResponsePart>
fun chat(@RequestBody request: ChatRequest): Publisher<ChatResponsePart>
/**
* 处理多模态聊天请求的函数
* 该函数通过 POST 方法接收一个聊天请求并以 NDJSON 的形式返回聊天响应的部分
@ -43,7 +44,7 @@ interface LLMProvider {
* 它使用了响应式编程模型适合处理高并发和大数据量的响应
*/
@PostMapping("/multimodalityChat", produces = [MediaType.APPLICATION_NDJSON_VALUE])
fun multimodalityChat(request: ChatRequest): Publisher<ChatResponsePart>
fun multimodalityChat(@RequestBody request: ChatRequest): Publisher<ChatResponsePart>
/**
* 嵌入功能方法
* 该方法允许用户发送嵌入请求以获取LLM生成的嵌入向量

View File

@ -2,18 +2,14 @@ package org.jcnc.llmx.impl.baiLian.adapter
import com.fasterxml.jackson.databind.ObjectMapper
import com.gewuyou.forgeboot.core.extension.log
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.currentCoroutineContext
import kotlinx.coroutines.flow.FlowCollector
import kotlinx.coroutines.flow.flow
import okhttp3.Headers.Companion.toHeaders
import okhttp3.MediaType.Companion.toMediaType
import kotlinx.coroutines.isActive
import kotlinx.coroutines.withContext
import okhttp3.OkHttpClient
import okhttp3.Request
import okhttp3.RequestBody.Companion.toRequestBody
import org.jcnc.llmx.core.common.entities.response.ChatResponsePart
import org.jcnc.llmx.core.spi.adapter.AbstractChatStreamAdapter
import org.springframework.stereotype.Component
import java.io.BufferedReader
@ -30,71 +26,26 @@ import java.io.BufferedReader
*/
@Component
class DashScopeAdapter(
private val okHttpClient: OkHttpClient,
private val objectMapper: ObjectMapper,
) {
okHttpClient: OkHttpClient,
objectMapper: ObjectMapper,
) : AbstractChatStreamAdapter(okHttpClient, objectMapper) {
/**
* 发送流式聊天请求
* 处理响应流的抽象方法
*
* 本函数构建并发送一个聊天请求然后以流的形式接收和处理响应
* 它主要用于与DashScope API进行交互提取并发布聊天响应的部分内容
* 该方法用于处理从API接收到的响应流它读取响应流的每一行
* 如果行以"data:"开头则提取该行的内容解析为ChatResponsePart对象
* 并将解析后的对象发射到流中
*
* @param url 请求的URL
* @param headers 请求的头部信息
* @param requestBody 请求的主体内容
* @param extractContent 一个函数用于从JSON响应中提取内容
* @param dispatcher 协程调度器默认为IO调度器
* @return 返回一个Flow发布聊天响应的部分内容
* @param reader 响应流的BufferedReader对象
* @param extractContent 函数用于从响应流中提取聊天响应的部分内容
* @param allContent 用于存储响应流的完整内容
* @param dispatcher 协程调度器用于执行异步或并发操作
*/
fun sendStreamChat(
url: String,
headers: Map<String, String>,
requestBody: Any,
extractContent: (String) -> ChatResponsePart,
dispatcher: CoroutineDispatcher = Dispatchers.IO,
): Flow<ChatResponsePart> = flow {
val requestJson = objectMapper.writeValueAsString(requestBody)
log.info("📤 请求参数: {}", requestJson)
val request = buildRequest(url, headers, requestJson)
okHttpClient.newCall(request).execute().use { response ->
if (!response.isSuccessful) {
log.error("🚨 DashScope 请求失败: code ${response.code} message: ${response.message} body: ${response.body}")
throw RuntimeException("❌ DashScope 请求失败: HTTP ${response.code}")
}
val responseBody = response.body ?: throw RuntimeException("❌ DashScope 响应体为空")
responseBody.charStream().buffered().use { reader ->
val allContent = StringBuilder()
try {
processResponse(dispatcher, reader, extractContent, allContent)
log.info("📦 完整内容: {}", allContent)
} catch (e: Exception) {
log.error("🚨 读取 DashScope 响应流失败: {}", e.message, e)
throw e
}
}
}
}
/**
* 处理响应流
*
* 本函数读取HTTP响应中的数据行解析并提取内容
* 它在循环中读取每一行并使用提供的函数提取内容
*
* @param dispatcher 协程调度器
* @param reader 响应体的BufferedReader
* @param extractContent 一个函数用于从JSON响应中提取内容
* @param allContent 保存所有提取内容的StringBuilder
*/
private suspend fun FlowCollector<ChatResponsePart>.processResponse(
dispatcher: CoroutineDispatcher,
override suspend fun FlowCollector<ChatResponsePart>.processResponse(
reader: BufferedReader,
extractContent: (String) -> ChatResponsePart,
allContent: StringBuilder,
dispatcher: CoroutineDispatcher,
) {
while (currentCoroutineContext().isActive) {
val line = withContext(dispatcher) {
@ -116,22 +67,4 @@ class DashScopeAdapter(
}
}
}
/**
* 构建请求
*
* 本函数构建一个OkHttp请求对象用于发送聊天请求
*
* @param url 请求的URL
* @param headers 请求的头部信息
* @param json 请求的主体内容的JSON字符串
* @return 返回构建好的Request对象
*/
private fun buildRequest(url: String, headers: Map<String, String>, json: String): Request {
return Request.Builder()
.url(url)
.headers(headers.toHeaders())
.post(json.toRequestBody("application/json".toMediaType()))
.build()
}
}

View File

@ -34,12 +34,12 @@ class BaiLianProvider(
* @param request 聊天请求对象包含建立聊天所需的信息如用户标识会话标识等
* @return 返回一个Flow流通过该流可以接收到聊天响应的部分数据如消息状态更新等
*/
override fun chat(@RequestBody request: ChatRequest): Publisher<ChatResponsePart> {
return baiLianModelService.streamChat(request).asPublisher()
override fun chat(request: ChatRequest): Publisher<ChatResponsePart> {
return baiLianModelService.streamChat(request)
}
override fun multimodalityChat(@RequestBody request: ChatRequest): Publisher<ChatResponsePart> {
return baiLianModelService.streamMultimodalityChat(request).asPublisher()
override fun multimodalityChat(request: ChatRequest): Publisher<ChatResponsePart> {
return baiLianModelService.streamMultimodalityChat(request)
}
/**

View File

@ -14,7 +14,7 @@ import com.fasterxml.jackson.annotation.JsonProperty
* @param requestId 请求ID用于跟踪和调试请求
*/
@JsonIgnoreProperties(ignoreUnknown = true)
data class DashScopeResponse(
data class DashScopeMultimodalityChatResponse(
val output: Output?,
val usage: Usage?,
@JsonProperty("request_id")

View File

@ -1,9 +1,8 @@
package org.jcnc.llmx.impl.baiLian.service
import kotlinx.coroutines.flow.Flow
import org.jcnc.llmx.core.common.entities.request.ChatRequest
import org.jcnc.llmx.core.common.entities.response.ChatResponsePart
import org.reactivestreams.Publisher
/**
* 百炼模型服务接口
@ -17,14 +16,15 @@ interface BaiLianModelService {
* 使用流式聊天交互
*
* @param request 聊天请求对象包含用户输入上下文等信息
* @return 返回一个Flow流包含部分聊天响应允许逐步处理和消费响应
* @return 返回一个发布者对象用于订阅聊天响应的部分数据
*/
fun streamChat(request: ChatRequest): Flow<ChatResponsePart>
fun streamChat(request: ChatRequest): Publisher<ChatResponsePart>
/**
* 使用流式多模态聊天交互
*
* @param request 聊天请求对象包含用户输入上下文等信息
* @return 返回一个Flow流包含部分聊天响应允许逐步处理和消费响应
* @return 返回一个发布者对象用于订阅聊天响应的部分数据
*/
fun streamMultimodalityChat(request: ChatRequest) : Flow<ChatResponsePart>
fun streamMultimodalityChat(request: ChatRequest): Publisher<ChatResponsePart>
}

View File

@ -10,8 +10,9 @@ import org.jcnc.llmx.core.common.entities.response.Usage
import org.jcnc.llmx.impl.baiLian.adapter.DashScopeAdapter
import org.jcnc.llmx.impl.baiLian.config.entities.DashScopeProperties
import org.jcnc.llmx.impl.baiLian.entities.response.DashScopeResponse
import org.jcnc.llmx.impl.baiLian.entities.response.DashScopeMultimodalityChatResponse
import org.jcnc.llmx.impl.baiLian.service.BaiLianModelService
import org.reactivestreams.Publisher
import org.springframework.stereotype.Service
import org.springframework.util.CollectionUtils
@ -35,7 +36,7 @@ class BaiLianModelServiceImpl(
* @param request 聊天请求对象包含用户输入上下文等信息
* @return 返回一个Flow流包含部分聊天响应允许逐步处理和消费响应
*/
override fun streamChat(request: ChatRequest): Flow<ChatResponsePart> {
override fun streamChat(request: ChatRequest): Publisher<ChatResponsePart> {
// 构造请求URL
val url = "${dashScopeProperties.baseUrl}${dashScopeProperties.appId}/completion"
log.info("请求URL: $url")
@ -132,7 +133,7 @@ class BaiLianModelServiceImpl(
* @param request 聊天请求对象包含用户输入上下文等信息
* @return 返回一个Flow流包含部分聊天响应允许逐步处理和消费响应
*/
override fun streamMultimodalityChat(request: ChatRequest): Flow<ChatResponsePart> {
override fun streamMultimodalityChat(request: ChatRequest): Publisher<ChatResponsePart> {
// 构造请求URL
val url = dashScopeProperties.multimodalityUrl
log.info("请求URL: $url")
@ -159,7 +160,7 @@ class BaiLianModelServiceImpl(
return dashScopeAdapter.sendStreamChat(
url, headers, body,
{ json: String ->
val response = objectMapper.readValue(json, DashScopeResponse::class.java)
val response = objectMapper.readValue(json, DashScopeMultimodalityChatResponse::class.java)
val choices = response
.output
?.choices

View File

@ -1,22 +1,2 @@
server:
port: 9003
spring:
application:
name: llmx-impl-bailian
cloud:
nacos:
discovery:
server-addr: 49.235.96.75:9001 # Nacos 服务地址
# 阿里云配置
aliyun:
# DashScope服务配置
dash:
# 访问凭证配置
scope:
access-key-id: LTAI5tHiA2Ry3XTAfoSEJW6z # 阿里云访问密钥ID
access-key-secret: K5sf4FxZZuUgLEFnyfepBfMqFGmDcD # 阿里云访问密钥密钥
endpoint: bailian.cn-beijing.aliyuncs.com # 阿里云服务端点
workspace-id: llm-axfkuqft05uzbjpi # 工作区ID
api-key: sk-78af4dd964a94f4cb373851064dbdc12 # API密钥
app-id: 3fae0bbab2e54a90a37aa02cd12dd62c # 应用ID
base-url: https://dashscope.aliyuncs.com/api/v1/apps/ # 基础API URL

View File

@ -0,0 +1,20 @@
spring:
cloud:
nacos:
username: nacos
password: L4s6f9y3,
server-addr: 49.235.96.75:8848
discovery:
username: ${spring.cloud.nacos.username}
password: ${spring.cloud.nacos.password}
server-addr: ${spring.cloud.nacos.server-addr}
group: llmx-${spring.profiles.active}
namespace: ab34d859-6f1a-4f28-ac6b-27a7410ab27b
config:
file-extension: yaml
namespace: ab34d859-6f1a-4f28-ac6b-27a7410ab27b
refresh-enabled: true
extension-configs:
- data-id: ${spring.application.name}-${spring.profiles.active}.yaml
refresh: true
group: ${spring.application.name}

View File

@ -0,0 +1,60 @@
package org.jcnc.llmx.impl.openai.adapter
import com.fasterxml.jackson.databind.ObjectMapper
import com.gewuyou.forgeboot.core.extension.log
import kotlinx.coroutines.CoroutineDispatcher
import kotlinx.coroutines.currentCoroutineContext
import kotlinx.coroutines.flow.FlowCollector
import kotlinx.coroutines.isActive
import kotlinx.coroutines.withContext
import okhttp3.OkHttpClient
import org.jcnc.llmx.core.common.entities.response.ChatResponsePart
import org.jcnc.llmx.core.spi.adapter.AbstractChatStreamAdapter
import org.springframework.stereotype.Component
import java.io.BufferedReader
/**
*Open AI 适配器
*
* @since 2025-05-11 18:01:15
* @author gewuyou
*/
@Component
class OpenAiAdapter(
okHttpClient: OkHttpClient,
objectMapper: ObjectMapper,
) : AbstractChatStreamAdapter(okHttpClient, objectMapper) {
override suspend fun FlowCollector<ChatResponsePart>.processResponse(
reader: BufferedReader,
extractContent: (String) -> ChatResponsePart,
allContent: StringBuilder,
dispatcher: CoroutineDispatcher,
) {
while (currentCoroutineContext().isActive) {
val line = withContext(dispatcher) {
reader.readLine()
} ?: break
if (line.startsWith("event:")) {
// 可选:可读取事件类型
continue
}
if (line.startsWith("data:")) {
val jsonPart = line.removePrefix("data:").trim()
try {
val part = extractContent(jsonPart)
allContent.append(part.content)
log.debug("✅ 提取内容: {}", part)
if (part.content.isNotBlank()) {
emit(part)
}
} catch (e: Exception) {
log.warn("⚠️ 无法解析 JSON: {}", jsonPart, e)
}
}
}
}
}

View File

@ -0,0 +1,60 @@
package org.jcnc.llmx.impl.openai.config
import com.gewuyou.forgeboot.core.extension.log
import okhttp3.OkHttpClient
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
import org.springframework.core.env.Environment
import java.security.SecureRandom
import java.security.cert.X509Certificate
import java.util.concurrent.TimeUnit
import javax.net.ssl.SSLContext
import javax.net.ssl.TrustManager
import javax.net.ssl.X509TrustManager
/**
*客户端配置
*
* @since 2025-03-28 17:03:48
* @author gewuyou
*/
@Configuration
class ClientConfig {
/**
* OkHttpClient
*/
@Bean
fun createOkHttpClient(environment: Environment): OkHttpClient {
val activeProfiles = environment.activeProfiles.getOrNull(0)
return when (activeProfiles) {
"dev", "test" -> {
val trustAllCerts = arrayOf<TrustManager>(
object : X509TrustManager {
override fun checkClientTrusted(chain: Array<out X509Certificate>?, authType: String?) {}
override fun checkServerTrusted(chain: Array<out X509Certificate>?, authType: String?) {}
override fun getAcceptedIssuers(): Array<X509Certificate> = arrayOf()
}
)
val sslContext = SSLContext.getInstance("TLS")
sslContext.init(null, trustAllCerts, SecureRandom())
val sslSocketFactory = sslContext.socketFactory
log.warn("警告: 开启了不安全的https请求请不要在正式环境使用!")
OkHttpClient.Builder()
.sslSocketFactory(sslSocketFactory, trustAllCerts[0] as X509TrustManager)
.hostnameVerifier { _, _ -> true }
.connectTimeout(30, TimeUnit.SECONDS)
.readTimeout(30, TimeUnit.SECONDS)
.writeTimeout(30, TimeUnit.SECONDS)
.build()
}
else -> {
OkHttpClient.Builder()
.connectTimeout(30, TimeUnit.SECONDS)
.readTimeout(30, TimeUnit.SECONDS)
.writeTimeout(30, TimeUnit.SECONDS)
.build()
}
}
}
}

View File

@ -0,0 +1,15 @@
package org.jcnc.llmx.impl.openai.config
import org.jcnc.llmx.impl.openai.config.entities.OpenAiProperties
import org.springframework.boot.context.properties.EnableConfigurationProperties
import org.springframework.context.annotation.Configuration
/**
*Open AI 配置
*
* @since 2025-05-11 16:57:14
* @author gewuyou
*/
@Configuration
@EnableConfigurationProperties(OpenAiProperties::class)
class OpenAiConfig

View File

@ -0,0 +1,15 @@
package org.jcnc.llmx.impl.openai.config.entities
import org.springframework.boot.context.properties.ConfigurationProperties
/**
*Open AI 属性
*
* @since 2025-05-11 16:56:15
* @author gewuyou
*/
@ConfigurationProperties(prefix = "openai")
class OpenAiProperties {
var apiKey: String = ""
var baseUrl: String = ""
}

View File

@ -0,0 +1,29 @@
package org.jcnc.llmx.impl.openai.config.service
import org.jcnc.llmx.core.common.entities.request.ChatRequest
import org.jcnc.llmx.core.common.entities.response.ChatResponsePart
import org.reactivestreams.Publisher
/**
*Open AI 模型服务
*
* @since 2025-05-11 17:25:15
* @author gewuyou
*/
interface OpenAiModelService{
/**
* 使用流式聊天交互
*
* @param request 聊天请求对象包含用户输入上下文等信息
* @return 返回一个发布者对象用于订阅聊天响应的部分数据
*/
fun streamChat(request: ChatRequest): Publisher<ChatResponsePart>
/**
* 使用流式多模态聊天交互
*
* @param request 聊天请求对象包含用户输入上下文等信息
* @return 返回一个发布者对象用于订阅聊天响应的部分数据
*/
fun streamMultimodalityChat(request: ChatRequest): Publisher<ChatResponsePart>
}

View File

@ -0,0 +1,89 @@
package org.jcnc.llmx.impl.openai.config.service.impl
import com.fasterxml.jackson.databind.ObjectMapper
import com.gewuyou.forgeboot.core.extension.log
import org.jcnc.llmx.core.common.entities.request.ChatRequest
import org.jcnc.llmx.core.common.entities.response.ChatResponsePart
import org.jcnc.llmx.core.common.entities.response.Usage
import org.jcnc.llmx.impl.openai.adapter.OpenAiAdapter
import org.jcnc.llmx.impl.openai.config.entities.OpenAiProperties
import org.jcnc.llmx.impl.openai.config.service.OpenAiModelService
import org.jcnc.llmx.impl.openai.entities.response.FullResponse
import org.jcnc.llmx.impl.openai.entities.response.OutputTextDelta
import org.jcnc.llmx.impl.openai.entities.response.OutputTextDone
import org.jcnc.llmx.impl.openai.extension.toOpenAIMultimodalityChatRequest
import org.reactivestreams.Publisher
import org.springframework.stereotype.Service
/**
*Open AI 模型服务实现
*
* @since 2025-05-11 17:26:23
* @author gewuyou
*/
@Service
class OpenAiModelServiceImpl(
private val openAiProperties: OpenAiProperties,
private val openAiAdapter: OpenAiAdapter,
private val objectMapper: ObjectMapper,
) : OpenAiModelService {
/**
* 使用流式聊天交互
*
* @param request 聊天请求对象包含用户输入上下文等信息
* @return 返回一个发布者对象用于订阅聊天响应的部分数据
*/
override fun streamChat(request: ChatRequest): Publisher<ChatResponsePart> {
TODO("Not yet implemented")
}
override fun streamMultimodalityChat(request: ChatRequest): Publisher<ChatResponsePart> {
val url = "${openAiProperties.baseUrl}/v1/responses"
val headers = mapOf(
"Authorization" to "Bearer ${openAiProperties.apiKey}",
"Content-Type" to "application/json"
)
val openAIRequest = request.toOpenAIMultimodalityChatRequest()
val body = mutableMapOf<String, Any>(
"stream" to true
).also {
it.putAll(openAIRequest)
}
return openAiAdapter.sendStreamChat(
url, headers, body,
extractContent = { json ->
val node = objectMapper.readTree(json)
val type = node["type"]?.asText()
when (type) {
"response.output_text.delta" -> {
val outputTextDelta = objectMapper.treeToValue(node, OutputTextDelta::class.java)
ChatResponsePart(content = outputTextDelta.delta)
}
"response.output_text.done" -> {
val outputTextDone = objectMapper.treeToValue(node, OutputTextDone::class.java)
ChatResponsePart(content = outputTextDone.text, done = true)
}
"response.completed" -> {
val completed = objectMapper.treeToValue(node, FullResponse::class.java)
val usage = completed.usage?.let {
Usage(
promptTokens = it.inputTokens ?: 0,
completionTokens = it.outputTokens ?: 0,
totalTokens = it.totalTokens ?: 0
)
}
ChatResponsePart(content = "", done = true, usage = usage)
}
else -> {
log.debug("忽略事件类型: {}", type)
ChatResponsePart(content = "")
}
}
}
)
}
}

View File

@ -0,0 +1,33 @@
package org.jcnc.llmx.impl.openai.controller
import org.jcnc.llmx.core.common.entities.request.ChatRequest
import org.jcnc.llmx.core.common.entities.request.EmbeddingRequest
import org.jcnc.llmx.core.common.entities.response.ChatResponsePart
import org.jcnc.llmx.core.common.entities.response.EmbeddingResponse
import org.jcnc.llmx.core.spi.provider.LLMProvider
import org.jcnc.llmx.impl.openai.config.service.OpenAiModelService
import org.reactivestreams.Publisher
import org.springframework.web.bind.annotation.RestController
/**
*Open AI 提供商
*
* @since 2025-05-11 16:32:51
* @author gewuyou
*/
@RestController
class OpenAiProvider(
private val openAiModelService: OpenAiModelService
): LLMProvider {
override fun chat(request: ChatRequest): Publisher<ChatResponsePart> {
TODO("Not yet implemented")
}
override fun multimodalityChat(request: ChatRequest): Publisher<ChatResponsePart> {
return openAiModelService.streamMultimodalityChat(request)
}
override fun embedding(request: EmbeddingRequest): EmbeddingResponse {
TODO("Not yet implemented")
}
}

View File

@ -0,0 +1,140 @@
package org.jcnc.llmx.impl.openai.entities.response
import com.fasterxml.jackson.annotation.JsonProperty
/** 通用 SSE 响应封装类型 */
data class SseEvent<T>(
@JsonProperty("type")
val type: String,
@JsonProperty("data")
val data: T
)
/** response.created, response.in_progress, response.completed 共用结构 */
data class FullResponse(
@JsonProperty("id")
val id: String,
@JsonProperty("object")
val objectType: String,
@JsonProperty("created_at")
val createdAt: Long,
@JsonProperty("status")
val status: String,
@JsonProperty("model")
val model: String,
@JsonProperty("output")
val output: List<MessageItem>?,
@JsonProperty("service_tier")
val serviceTier: String?,
@JsonProperty("store")
val store: Boolean?,
@JsonProperty("temperature")
val temperature: Double?,
@JsonProperty("top_p")
val topP: Double?,
@JsonProperty("tool_choice")
val toolChoice: String?,
@JsonProperty("tools")
val tools: List<Any>?,
@JsonProperty("text")
val text: TextFormat?,
@JsonProperty("usage")
val usage: UsageInfo?
)
data class TextFormat(
@JsonProperty("format")
val format: FormatType
)
data class FormatType(
@JsonProperty("type")
val type: String
)
data class UsageInfo(
@JsonProperty("input_tokens")
val inputTokens: Int?,
@JsonProperty("output_tokens")
val outputTokens: Int?,
@JsonProperty("total_tokens")
val totalTokens: Int?
)
/** response.output_item.added, response.output_item.done 共用结构 */
data class OutputItemEvent(
@JsonProperty("output_index")
val outputIndex: Int,
@JsonProperty("item")
val item: MessageItem
)
data class MessageItem(
@JsonProperty("id")
val id: String,
@JsonProperty("type")
val type: String,
@JsonProperty("status")
val status: String,
@JsonProperty("content")
val content: List<MessageContent>,
@JsonProperty("role")
val role: String
)
data class MessageContent(
@JsonProperty("type")
val type: String,
@JsonProperty("text")
val text: String,
@JsonProperty("annotations")
val annotations: List<Any>?
)
/** response.output_text.delta */
data class OutputTextDelta(
@JsonProperty("item_id")
val itemId: String,
@JsonProperty("output_index")
val outputIndex: Int,
@JsonProperty("content_index")
val contentIndex: Int,
@JsonProperty("delta")
val delta: String
)
/** response.output_text.done */
data class OutputTextDone(
@JsonProperty("item_id")
val itemId: String,
@JsonProperty("output_index")
val outputIndex: Int,
@JsonProperty("content_index")
val contentIndex: Int,
@JsonProperty("text")
val text: String
)
/** response.content_part.done */
data class ContentPartDone(
@JsonProperty("item_id")
val itemId: String,
@JsonProperty("output_index")
val outputIndex: Int,
@JsonProperty("content_index")
val contentIndex: Int,
@JsonProperty("part")
val part: MessageContent
)
/** response.content_part.added */
data class ContentPartAdded(
@JsonProperty("item_id")
val itemId: String,
@JsonProperty("output_index")
val outputIndex: Int,
@JsonProperty("content_index")
val contentIndex: Int,
@JsonProperty("part")
val part: MessageContent
)

View File

@ -0,0 +1,36 @@
package org.jcnc.llmx.impl.openai.extension
import org.jcnc.llmx.core.common.entities.request.ChatRequest
import org.jcnc.llmx.core.common.entities.request.MultiModalContent
fun ChatRequest.toOpenAIMultimodalityChatRequest(): Map<String, Any> {
val input = this.messages.map { msg ->
mapOf(
"role" to msg.role,
"content" to msg.content.map { content ->
when (content) {
is MultiModalContent.Text -> mapOf(
"type" to "input_text",
"text" to content.text
)
is MultiModalContent.Image -> mapOf(
"type" to "input_image",
"image_url" to content.image
)
}
}
)
}
val base = mutableMapOf(
"model" to model,
"input" to input,
)
// 附加参数(比如 temperature, max_tokens 等)
base.putAll(options)
return base
}

View File

@ -0,0 +1,2 @@
server:
port: 8083

View File

@ -0,0 +1,2 @@
server:
port: 9004

View File

@ -0,0 +1,2 @@
server:
port: 9004

View File

@ -1 +0,0 @@
spring.application.name=llmx-impl-openai

View File

@ -0,0 +1,3 @@
spring:
profiles:
active: dev

View File

@ -0,0 +1,22 @@
spring:
cloud:
nacos:
ip: 127.0.0.1
username: nacos
password: L4s6f9y3,
server-addr: 49.235.96.75:8848
discovery:
ip: ${spring.cloud.nacos.ip}
username: ${spring.cloud.nacos.username}
password: ${spring.cloud.nacos.password}
server-addr: ${spring.cloud.nacos.server-addr}
group: llmx-${spring.profiles.active}
namespace: a17d57ec-4fd9-44c7-a617-7f6003a0b332
config:
file-extension: yaml
namespace: a17d57ec-4fd9-44c7-a617-7f6003a0b332
refresh-enabled: true
extension-configs:
- data-id: ${spring.application.name}-${spring.profiles.active}.yaml
refresh: true
group: ${spring.application.name}

View File

@ -0,0 +1,20 @@
spring:
cloud:
nacos:
username: nacos
password: L4s6f9y3,
server-addr: 49.235.96.75:8848
discovery:
username: ${spring.cloud.nacos.username}
password: ${spring.cloud.nacos.password}
server-addr: ${spring.cloud.nacos.server-addr}
group: llmx-${spring.profiles.active}
namespace: ab34d859-6f1a-4f28-ac6b-27a7410ab27b
config:
file-extension: yaml
namespace: ab34d859-6f1a-4f28-ac6b-27a7410ab27b
refresh-enabled: true
extension-configs:
- data-id: ${spring.application.name}-${spring.profiles.active}.yaml
refresh: true
group: ${spring.application.name}

View File

@ -0,0 +1,20 @@
spring:
cloud:
nacos:
username: nacos
password: L4s6f9y3,
server-addr: llmx-nacos:8848
discovery:
username: ${spring.cloud.nacos.username}
password: ${spring.cloud.nacos.password}
server-addr: ${spring.cloud.nacos.server-addr}
group: llmx-${spring.profiles.active}
namespace: 54a289f7-5f4a-4c83-8a0a-199defa35458
config:
file-extension: yaml
namespace: 54a289f7-5f4a-4c83-8a0a-199defa35458
refresh-enabled: true
extension-configs:
- data-id: ${spring.application.name}-${spring.profiles.active}.yaml
refresh: true
group: ${spring.application.name}

View File

@ -0,0 +1,5 @@
spring:
application:
name: llmx-impl-openai
profiles:
active: dev