智能专家3号
Some checks failed
Flask 提示词大师 - CI/CD 流水线 / 代码质量检查 (push) Has been cancelled
Flask 提示词大师 - CI/CD 流水线 / 单元测试 (push) Has been cancelled
Flask 提示词大师 - CI/CD 流水线 / 集成测试 (push) Has been cancelled
Flask 提示词大师 - CI/CD 流水线 / 构建Docker镜像 (push) Has been cancelled
Flask 提示词大师 - CI/CD 流水线 / 部署到测试环境 (push) Has been cancelled
Flask 提示词大师 - CI/CD 流水线 / 部署到生产环境 (push) Has been cancelled
Flask 提示词大师 - CI/CD 流水线 / 部署监控系统 (push) Has been cancelled

This commit is contained in:
rjb
2026-03-01 09:53:12 +08:00
parent db43954dd5
commit 34e3ee213e
10 changed files with 612 additions and 13 deletions

View File

@@ -5,15 +5,18 @@
"""
from flask import Blueprint, render_template, request, jsonify
from openai import OpenAI
import hashlib
import json
import logging
import os
import time
from src.flask_prompt_master import db
from src.flask_prompt_master.models.models import User, Prompt
from src.flask_prompt_master.models.history_models import PromptHistory, UserStatistics
logger = logging.getLogger(__name__)
expert_generate_2_bp = Blueprint('expert_generate_2', __name__)
_dedup_cache = {}
client = OpenAI(
api_key=os.environ.get('LLM_API_KEY') or 'sk-fdf7cc1c73504e628ec0119b7e11b8cc',
@@ -154,6 +157,15 @@ def expert_generate_2_api():
if not user_input:
return jsonify({'code': 400, 'message': '请输入您的需求', 'data': None})
uid = _get_user_id()
req_key = (uid, hashlib.md5(user_input.encode()).hexdigest())
now_ts = time.time()
if req_key in _dedup_cache and (now_ts - _dedup_cache[req_key]) < 8:
return jsonify({'code': 429, 'message': '请勿重复提交,请稍后再试', 'data': None})
_dedup_cache[req_key] = now_ts
if len(_dedup_cache) > 500:
_dedup_cache.clear()
# 第一阶段:意图分析
intent_response = client.chat.completions.create(
model="deepseek-chat",
@@ -206,7 +218,7 @@ def expert_generate_2_api():
if not generated_prompt:
return jsonify({'code': 500, 'message': '生成失败,请重试', 'data': None})
user_id = _get_user_id()
user_id = uid
try:
p = Prompt(input_text=user_input, generated_text=generated_prompt, user_id=user_id)
db.session.add(p)

View File

@@ -6,15 +6,18 @@
"""
from flask import Blueprint, render_template, request, jsonify
from openai import OpenAI
import hashlib
import json
import logging
import os
import time
from src.flask_prompt_master import db
from src.flask_prompt_master.models.models import User, Prompt
from src.flask_prompt_master.models.history_models import PromptHistory, UserStatistics
logger = logging.getLogger(__name__)
expert_generate_3_bp = Blueprint('expert_generate_3', __name__)
_dedup_cache = {}
_llm_client = OpenAI(
api_key=os.environ.get('LLM_API_KEY') or 'sk-fdf7cc1c73504e628ec0119b7e11b8cc',
@@ -153,6 +156,26 @@ def expert_generate_3_api():
raw_input = (payload.get('input_text') or '').strip()
if not raw_input:
return jsonify({'code': 400, 'message': '请输入您的需求', 'data': None})
temperature = payload.get('temperature')
temperature = float(temperature) if temperature is not None else 0.7
temperature = max(0.0, min(2.0, temperature))
max_tokens = payload.get('max_tokens')
max_tokens = int(max_tokens) if max_tokens is not None else 1000
max_tokens = max(100, min(4000, max_tokens))
timeout = payload.get('timeout')
timeout = int(timeout) if timeout is not None else 60
timeout = max(10, min(300, timeout))
uid = _resolve_user_id()
req_key = (uid, hashlib.md5(raw_input.encode()).hexdigest())
now_ts = time.time()
if req_key in _dedup_cache and (now_ts - _dedup_cache[req_key]) < 8:
return jsonify({'code': 429, 'message': '请勿重复提交,请稍后再试', 'data': None})
_dedup_cache[req_key] = now_ts
if len(_dedup_cache) > 500:
_dedup_cache.clear()
resp1 = _llm_client.chat.completions.create(
model="deepseek-chat",
messages=[
@@ -160,7 +183,7 @@ def expert_generate_3_api():
{"role": "user", "content": raw_input}
],
temperature=0.1,
timeout=60
timeout=timeout
)
intent_raw = (resp1.choices[0].message.content or "").strip()
intent_raw = intent_raw.replace('```json', '').replace('```', '').strip()
@@ -189,14 +212,13 @@ def expert_generate_3_api():
{"role": "system", "content": tpl.format(analysis=analysis_str)},
{"role": "user", "content": raw_input}
],
temperature=0.7,
max_tokens=1000,
timeout=60
temperature=temperature,
max_tokens=max_tokens,
timeout=timeout
)
result_prompt = (resp2.choices[0].message.content or "").strip()
if not result_prompt:
return jsonify({'code': 500, 'message': '生成失败,请重试', 'data': None})
uid = _resolve_user_id()
try:
db.session.add(Prompt(input_text=raw_input, generated_text=result_prompt, user_id=uid))
db.session.commit()

View File

@@ -80,13 +80,16 @@
<script>
document.getElementById('expertPromptForm').addEventListener('submit', async function(e) {
e.preventDefault();
var form = e.target;
if (form.dataset.submitting === '1') return;
form.dataset.submitting = '1';
var generateBtn = document.getElementById('generateBtn');
generateBtn.disabled = true;
var input_text = document.getElementById('input_text').value.trim();
var loadingIndicator = document.getElementById('loadingIndicator');
var resultCard = document.getElementById('resultCard');
var generateBtn = document.getElementById('generateBtn');
if (!input_text) { alert('请输入您的需求'); return; }
if (!input_text) { form.dataset.submitting = ''; generateBtn.disabled = false; alert('请输入您的需求'); return; }
loadingIndicator.classList.remove('d-none');
generateBtn.disabled = true;
resultCard.classList.add('d-none');
try {
var response = await fetch('/api/expert-generate-2/generate', {
@@ -114,6 +117,7 @@ document.getElementById('expertPromptForm').addEventListener('submit', async fun
} finally {
loadingIndicator.classList.add('d-none');
generateBtn.disabled = false;
form.dataset.submitting = '';
}
});
function copyToClipboard(text) {

View File

@@ -22,6 +22,32 @@
<textarea class="form-control" id="input_text" name="input_text" rows="4" required></textarea>
<div class="form-text">详细描述您的需求,系统将进行专业分析并生成高质量提示词</div>
</div>
<div class="mb-3">
<button class="btn btn-link btn-sm p-0 text-secondary" type="button" data-bs-toggle="collapse" data-bs-target="#advancedSettings" aria-expanded="false">
<i class="fas fa-sliders-h me-1"></i>高级参数
</button>
<div class="collapse mt-2" id="advancedSettings">
<div class="card card-body bg-light">
<div class="row g-2">
<div class="col-md-4">
<label for="paramTemperature" class="form-label small">temperature</label>
<input type="number" class="form-control form-control-sm" id="paramTemperature" value="0.7" min="0" max="2" step="0.1" title="生成随机性0更确定2更随机">
<div class="form-text small">02默认0.7</div>
</div>
<div class="col-md-4">
<label for="paramMaxTokens" class="form-label small">max_tokens</label>
<input type="number" class="form-control form-control-sm" id="paramMaxTokens" value="1000" min="100" max="4000" step="100" title="最大生成 token 数">
<div class="form-text small">1004000默认1000</div>
</div>
<div class="col-md-4">
<label for="paramTimeout" class="form-label small">timeout</label>
<input type="number" class="form-control form-control-sm" id="paramTimeout" value="60" min="10" max="300" step="10" title="请求超时时间">
<div class="form-text small">10300秒默认60</div>
</div>
</div>
</div>
</div>
</div>
<button type="submit" class="btn btn-primary" id="generateBtn">
<i class="fas fa-magic"></i> 生成专家提示词
</button>
@@ -80,19 +106,30 @@
<script>
document.getElementById('expertForm3').addEventListener('submit', async function(e) {
e.preventDefault();
var form = e.target;
if (form.dataset.submitting === '1') return;
form.dataset.submitting = '1';
var generateBtn = document.getElementById('generateBtn');
generateBtn.disabled = true;
var input_text = document.getElementById('input_text').value.trim();
var loadingIndicator = document.getElementById('loadingIndicator');
var resultCard = document.getElementById('resultCard');
var generateBtn = document.getElementById('generateBtn');
if (!input_text) { alert('请输入您的需求'); return; }
var temperature = parseFloat(document.getElementById('paramTemperature').value) || 0.7;
var maxTokens = parseInt(document.getElementById('paramMaxTokens').value, 10) || 1000;
var timeout = parseInt(document.getElementById('paramTimeout').value, 10) || 60;
if (!input_text) { form.dataset.submitting = ''; generateBtn.disabled = false; alert('请输入您的需求'); return; }
loadingIndicator.classList.remove('d-none');
generateBtn.disabled = true;
resultCard.classList.add('d-none');
try {
var response = await fetch('/api/expert-generate-3/generate', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ input_text: input_text })
body: JSON.stringify({
input_text: input_text,
temperature: temperature,
max_tokens: maxTokens,
timeout: timeout
})
});
var data = await response.json();
if (data.code === 200) {
@@ -114,6 +151,7 @@ document.getElementById('expertForm3').addEventListener('submit', async function
} finally {
loadingIndicator.classList.add('d-none');
generateBtn.disabled = false;
form.dataset.submitting = '';
}
});
function copyToClipboard(text) {