UX: Follow plugin user interface UI guidelines. (#628)

This commit is contained in:
Roman Rizzi 2024-05-16 14:28:57 -03:00 committed by GitHub
parent 1d786fbaaf
commit d8ebed8fb5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 72 additions and 71 deletions

View File

@ -4,6 +4,7 @@ import { Input } from "@ember/component";
import { action } from "@ember/object";
import { later } from "@ember/runloop";
import { inject as service } from "@ember/service";
import BackButton from "discourse/components/back-button";
import DButton from "discourse/components/d-button";
import { popupAjaxError } from "discourse/lib/ajax-error";
import i18n from "discourse-common/helpers/i18n";
@ -59,6 +60,10 @@ export default class AiLlmEditor extends Component {
}
<template>
<BackButton
@route="adminPlugins.show.discourse-ai-llms"
@label="discourse_ai.llms.back"
/>
<form class="form-horizontal ai-llm-editor">
<div class="control-group">
<label>{{i18n "discourse_ai.llms.display_name"}}</label>

View File

@ -1,4 +1,5 @@
import Component from "@glimmer/component";
import { concat } from "@ember/helper";
import { LinkTo } from "@ember/routing";
import icon from "discourse-common/helpers/d-icon";
import i18n from "discourse-common/helpers/i18n";
@ -6,13 +7,15 @@ import I18n from "discourse-i18n";
import AiLlmEditor from "./ai-llm-editor";
export default class AiLlmsListEditor extends Component {
get hasNoLLMElements() {
this.args.llms.length !== 0;
get hasLLMElements() {
return this.args.llms.length !== 0;
}
<template>
<section class="ai-llms-list-editor admin-detail pull-left">
{{#if @currentLlm}}
<AiLlmEditor @model={{@currentLlm}} @llms={{@llms}} />
{{else}}
<div class="ai-llms-list-editor__header">
<h3>{{i18n "discourse_ai.llms.short_title"}}</h3>
{{#unless @currentLlm.isNew}}
@ -26,36 +29,36 @@ export default class AiLlmsListEditor extends Component {
{{/unless}}
</div>
<div class="ai-llms-list-editor__container">
{{#if this.hasNoLLMElements}}
<div class="ai-llms-list-editor__empty_list">
{{icon "robot"}}
{{i18n "discourse_ai.llms.no_llms"}}
</div>
{{else}}
<div class="content-list ai-llms-list-editor__content_list">
<ul>
{{#if this.hasLLMElements}}
<table class="content-list ai-persona-list-editor">
<thead>
<tr>
<th>{{i18n "discourse_ai.llms.display_name"}}</th>
<th>{{i18n "discourse_ai.llms.provider"}}</th>
<th></th>
</tr>
</thead>
<tbody>
{{#each @llms as |llm|}}
<li>
<tr data-persona-id={{llm.id}} class="ai-llm-list__row">
<td><strong>{{llm.display_name}}</strong></td>
<td>{{i18n
(concat "discourse_ai.llms.providers." llm.provider)
}}</td>
<td>
<LinkTo
@route="adminPlugins.show.discourse-ai-llms.show"
current-when="true"
class="btn btn-text btn-small"
@model={{llm}}
>
{{llm.display_name}}
</LinkTo>
</li>
>{{i18n "discourse_ai.llms.edit"}}</LinkTo>
</td>
</tr>
{{/each}}
</ul>
</div>
</tbody>
</table>
{{/if}}
<div class="ai-llms-list-editor__current">
{{#if @currentLlm}}
<AiLlmEditor @model={{@currentLlm}} @llms={{@llms}} />
{{/if}}
</div>
</div>
</section>
</template>
}

View File

@ -40,6 +40,8 @@
}
.ai-persona-editor {
padding-left: 0.5em;
.fk-d-tooltip__icon {
padding-left: 0.25em;
color: var(--primary-medium);

View File

@ -9,28 +9,17 @@
margin: 0;
}
}
&__container {
display: flex;
flex-direction: row;
align-items: center;
gap: 20px;
width: 100%;
align-items: stretch;
}
&__empty_list,
&__content_list {
min-width: 300px;
}
&__empty_list {
align-content: center;
text-align: center;
font-size: var(--font-up-1);
}
.ai-llm-editor {
padding-left: 0.5em;
.ai-llm-editor-input {
width: 350px;
}
.fk-d-tooltip__icon {
padding-left: 0.25em;
color: var(--primary-medium);
}
}

View File

@ -199,15 +199,17 @@ en:
short_title: "LLMs"
no_llms: "No LLMs yet"
new: "New Model"
display_name: "Name to display:"
name: "Model name:"
provider: "Service hosting the model:"
tokenizer: "Tokenizer:"
max_prompt_tokens: "Number of tokens for the prompt:"
url: "URL of the service hosting the model:"
api_key: "API Key of the service hosting the model:"
display_name: "Name to display"
name: "Model name"
provider: "Service hosting the model"
tokenizer: "Tokenizer"
max_prompt_tokens: "Number of tokens for the prompt"
url: "URL of the service hosting the model"
api_key: "API Key of the service hosting the model"
save: "Save"
edit: "Edit"
saved: "LLM Model Saved"
back: "Back"
hints:
max_prompt_tokens: "Max numbers of tokens for the prompt. As a rule of thumb, this should be 50% of the model's context window."