discourse-ai/admin/assets/javascripts/discourse/routes/admin-plugins-show-discourse-ai-llms-show.js
Roman Rizzi 64641b6175
FEATURE: LLM Triage support for systemless models. (#757)
* FEATURE: LLM Triage support for systemless models.

This change adds support for OSS models without support for system messages. LlmTriage's system message field is no longer mandatory. We now send the post contents in a separate user message.

* Models using Ollama can also disable system prompts
2024-08-21 11:41:55 -03:00

20 lines
551 B
JavaScript

import DiscourseRoute from "discourse/routes/discourse";
export default DiscourseRoute.extend({
async model(params) {
const allLlms = this.modelFor("adminPlugins.show.discourse-ai-llms");
const id = parseInt(params.id, 10);
const record = allLlms.findBy("id", id);
record.provider_params = record.provider_params || {};
return record;
},
setupController(controller, model) {
this._super(controller, model);
controller.set(
"allLlms",
this.modelFor("adminPlugins.show.discourse-ai-llms")
);
},
});