Complete AI Integration with Ollama

Features Added:
 Full AITaskManager class with NSFW task generation
 Complete AI Tasks tab with configuration interface
 Connection testing and model selection
 User preference customization for personalized tasks
 Temperature, max tokens, and difficulty controls
 Auto-generation toggle and privacy-focused design
 Comprehensive setup help and troubleshooting guide

 AI Capabilities:
 Local Ollama integration for uncensored content
 Personalized edging task generation
 Multiple model support (llama3.2, mistral, etc.)
 Configurable difficulty levels (easy/medium/hard/extreme)
 Personal preference integration
 Real-time connection status monitoring

 Technical Implementation:
 AITaskManager class with full error handling
 Debounced user input for performance
 Responsive UI design for AI configuration
 Flash message integration for user feedback
 Settings persistence and validation
 Complete event listener setup

 UI Enhancements:
 Professional AI Tasks tab design
 Status indicators with color coding
 Interactive configuration controls
 Task preview functionality
 Comprehensive help documentation
 Mobile-responsive design elements
This commit is contained in:
dilgenfritz 2025-09-29 07:06:05 -05:00
parent 243d037924
commit e7bfabac9b
7 changed files with 3776 additions and 0 deletions

282
aiTaskManager.js Normal file
View File

@ -0,0 +1,282 @@
/**
* AI Task Manager - Ollama Integration for Edge & Punishment
* Generates NSFW edging tasks using local AI models
*/
class AITaskManager {
constructor(dataManager) {
this.dataManager = dataManager;
this.ollamaUrl = 'http://localhost:11434';
this.isAvailable = false;
this.availableModels = [];
this.currentModel = 'dolphin-mistral:7b'; // Default NSFW-friendly model
this.isGenerating = false;
this.init();
}
async init() {
await this.checkAvailability();
await this.loadModels();
this.loadConfig();
console.log('AITaskManager initialized:', this.isAvailable ? 'Ollama available' : 'Ollama not available');
}
async checkAvailability() {
try {
const response = await fetch(`${this.ollamaUrl}/api/tags`, {
method: 'GET',
signal: AbortSignal.timeout(3000) // 3 second timeout
});
this.isAvailable = response.ok;
return this.isAvailable;
} catch (error) {
console.log('Ollama not available:', error.message);
this.isAvailable = false;
return false;
}
}
async loadModels() {
if (!this.isAvailable) return [];
try {
const response = await fetch(`${this.ollamaUrl}/api/tags`);
const data = await response.json();
this.availableModels = data.models || [];
// Check if our preferred NSFW models are available
const modelNames = this.availableModels.map(m => m.name);
const preferredModels = ['dolphin-mistral:7b', 'wizardlm-uncensored:7b', 'llama3.1:8b-instruct'];
for (const preferred of preferredModels) {
if (modelNames.includes(preferred)) {
this.currentModel = preferred;
break;
}
}
return this.availableModels;
} catch (error) {
console.error('Error loading models:', error);
return [];
}
}
loadConfig() {
const savedConfig = this.dataManager.get('aiTaskConfig');
this.config = {
enabled: false,
model: this.currentModel,
temperature: 0.8,
maxTokens: 300,
userPreferences: {
experience: 'intermediate', // beginner, intermediate, advanced
intensity: 'medium', // low, medium, high, extreme
duration: 5, // minutes
style: 'instructional', // instructional, descriptive, commanding
kinks: [], // user-selected interests
limits: [] // user-defined hard limits
},
...savedConfig
};
}
updateConfig(newConfig) {
this.config = { ...this.config, ...newConfig };
this.dataManager.set('aiTaskConfig', this.config);
if (newConfig.model) {
this.currentModel = newConfig.model;
}
}
getConfig() {
return { ...this.config };
}
async generateEdgingTask(customPrefs = {}) {
if (!this.isAvailable) {
throw new Error('AI not available. Please ensure Ollama is running and models are installed.');
}
if (this.isGenerating) {
throw new Error('Already generating a task. Please wait...');
}
this.isGenerating = true;
try {
const prefs = { ...this.config.userPreferences, ...customPrefs };
const prompt = this.buildEdgingPrompt(prefs);
console.log('Generating AI task with model:', this.currentModel);
const response = await fetch(`${this.ollamaUrl}/api/generate`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model: this.currentModel,
prompt: prompt,
stream: false,
options: {
temperature: this.config.temperature,
num_predict: this.config.maxTokens,
stop: ['User:', 'Human:', 'Assistant:']
}
})
});
if (!response.ok) {
throw new Error(`Ollama API error: ${response.status}`);
}
const data = await response.json();
const taskText = data.response.trim();
if (!taskText || taskText.length < 20) {
throw new Error('Generated task was too short or empty');
}
// Create task object compatible with existing game system
const aiTask = {
id: `ai-task-${Date.now()}`,
type: 'ai-generated',
category: 'edging',
instruction: taskText,
duration: prefs.duration * 60000, // Convert to milliseconds
difficulty: prefs.intensity,
source: 'ollama',
model: this.currentModel,
generated: new Date().toISOString(),
preferences: prefs
};
console.log('AI Task Generated:', aiTask.instruction.substring(0, 100) + '...');
return aiTask;
} catch (error) {
console.error('Error generating AI task:', error);
throw error;
} finally {
this.isGenerating = false;
}
}
buildEdgingPrompt(prefs) {
const basePrompt = `You are an expert in creating edging challenges. Generate a detailed ${prefs.duration}-minute edging task with ${prefs.intensity} intensity for someone with ${prefs.experience} experience.
The task should include:
- Clear step-by-step instructions
- Specific timing and rhythm guidance
- Techniques for building arousal without climax
- Commands for start, stop, and pause moments
- Breathing and focus instructions
- Progressive intensity building
Style: ${prefs.style}
Duration: ${prefs.duration} minutes exactly
Intensity: ${prefs.intensity}
Experience Level: ${prefs.experience}
Generate only the task instructions, no introduction or explanation:
Task Instructions:`;
return basePrompt;
}
async generateConsequenceTask(skippedTask, severity = 'medium') {
if (!this.isAvailable) return null;
const prompt = `Create a consequence task for someone who skipped an edging challenge. This should be a punishment task with ${severity} severity that teaches discipline and makes them regret skipping.
The consequence should:
- Be more challenging than the original task
- Include elements of denial or frustration
- Have a longer duration (at least 7-10 minutes)
- Include specific punishments or restrictions
- Make the user understand the cost of skipping
Generate only the consequence task instructions:
Consequence Task:`;
try {
const response = await fetch(`${this.ollamaUrl}/api/generate`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model: this.currentModel,
prompt: prompt,
stream: false,
options: {
temperature: 0.9,
num_predict: this.config.maxTokens
}
})
});
const data = await response.json();
return {
id: `ai-consequence-${Date.now()}`,
type: 'ai-consequence',
instruction: data.response.trim(),
duration: 600000, // 10 minutes
difficulty: 'punishment',
source: 'ollama',
isConsequence: true
};
} catch (error) {
console.error('Error generating consequence task:', error);
return null;
}
}
async testConnection() {
const available = await this.checkAvailability();
if (!available) {
return { success: false, message: 'Ollama service not available' };
}
await this.loadModels();
if (this.availableModels.length === 0) {
return { success: false, message: 'No models installed' };
}
try {
const testTask = await this.generateEdgingTask({
duration: 1,
intensity: 'low',
experience: 'beginner'
});
return {
success: true,
message: 'AI connection successful',
model: this.currentModel,
availableModels: this.availableModels.length,
preview: testTask.instruction.substring(0, 100) + '...'
};
} catch (error) {
return { success: false, message: `Test failed: ${error.message}` };
}
}
getAvailableModels() {
return this.availableModels;
}
isReady() {
return this.isAvailable && this.availableModels.length > 0;
}
getStatus() {
return {
available: this.isAvailable,
generating: this.isGenerating,
model: this.currentModel,
modelCount: this.availableModels.length,
config: this.config
};
}
}

272
game.js
View File

@ -45,6 +45,9 @@ class TaskChallengeGame {
// Initialize Popup Image System (Punishment for skips)
this.popupImageManager = new PopupImageManager(this.dataManager);
// Initialize AI Task Generation System
this.aiTaskManager = new AITaskManager(this.dataManager);
this.initializeEventListeners();
this.setupKeyboardShortcuts();
this.setupWindowResizeHandling();
@ -3945,12 +3948,14 @@ TaskChallengeGame.prototype.setupAnnoyanceManagementEventListeners = function()
document.getElementById('behavior-tab').onclick = () => this.showAnnoyanceTab('behavior');
document.getElementById('popup-images-tab').onclick = () => this.showAnnoyanceTab('popup-images');
document.getElementById('import-export-tab').onclick = () => this.showAnnoyanceTab('import-export');
document.getElementById('ai-tasks-tab').onclick = () => this.showAnnoyanceTab('ai-tasks');
this.setupMessagesTabListeners();
this.setupAppearanceTabListeners();
this.setupBehaviorTabListeners();
this.setupPopupImagesTabListeners();
this.setupImportExportTabListeners();
this.setupAITasksTabListeners();
};
TaskChallengeGame.prototype.showAnnoyanceTab = function(tabName) {
@ -3979,6 +3984,9 @@ TaskChallengeGame.prototype.showAnnoyanceTab = function(tabName) {
case 'import-export':
this.loadImportExportTab();
break;
case 'ai-tasks':
this.loadAITasksTab();
break;
}
};
@ -4932,6 +4940,270 @@ TaskChallengeGame.prototype.rgbToHex = function(r, g, b) {
return "#" + ((1 << 24) + (r << 16) + (g << 8) + b).toString(16).slice(1);
};
// ========================================
// AI Tasks Tab Management
// ========================================
TaskChallengeGame.prototype.setupAITasksTabListeners = function() {
// Test Connection Button
const testConnectionBtn = document.getElementById('test-connection');
if (testConnectionBtn) {
testConnectionBtn.onclick = async () => {
const btn = testConnectionBtn;
const originalText = btn.textContent;
btn.textContent = 'Testing...';
btn.disabled = true;
try {
if (this.aiTaskManager) {
const isConnected = await this.aiTaskManager.testConnection();
this.updateConnectionStatus();
if (isConnected) {
this.flashMessageManager.show('✅ Connected to Ollama successfully!', 'success');
} else {
this.flashMessageManager.show('❌ Cannot connect to Ollama. Check if it\'s running.', 'error');
}
} else {
this.flashMessageManager.show('⚠️ AI Manager not initialized', 'warning');
}
} catch (error) {
this.flashMessageManager.show('❌ Connection test failed: ' + error.message, 'error');
} finally {
btn.textContent = originalText;
btn.disabled = false;
}
};
}
// Generate Test Task Button
const generateTestBtn = document.getElementById('generate-test-task');
if (generateTestBtn) {
generateTestBtn.onclick = async () => {
const btn = generateTestBtn;
const preview = document.getElementById('task-preview');
const originalText = btn.textContent;
btn.textContent = 'Generating...';
btn.disabled = true;
preview.className = 'task-preview generating';
preview.textContent = 'AI is creating a personalized edging task...';
try {
if (this.aiTaskManager) {
const task = await this.aiTaskManager.generateEdgingTask();
preview.className = 'task-preview';
preview.textContent = task.description || task;
this.flashMessageManager.show('🎯 Test task generated successfully!', 'success');
} else {
throw new Error('AI Manager not initialized');
}
} catch (error) {
preview.className = 'task-preview error';
preview.textContent = `Error generating task: ${error.message}`;
this.flashMessageManager.show('❌ Failed to generate test task', 'error');
} finally {
btn.textContent = originalText;
btn.disabled = false;
}
};
}
// Model Selection Change
const modelSelect = document.getElementById('ai-model');
if (modelSelect) {
modelSelect.onchange = (e) => {
const model = e.target.value;
if (this.aiTaskManager) {
this.aiTaskManager.updateSettings({ model: model });
this.flashMessageManager.show(`🔄 Switched to model: ${model}`, 'info');
}
};
}
// Temperature Slider
const tempSlider = document.getElementById('ai-temperature');
const tempValue = document.getElementById('temp-value');
if (tempSlider && tempValue) {
tempSlider.oninput = (e) => {
const temperature = parseFloat(e.target.value);
tempValue.textContent = temperature.toFixed(1);
if (this.aiTaskManager) {
this.aiTaskManager.updateSettings({ temperature: temperature });
}
};
}
// Max Tokens Input
const maxTokensInput = document.getElementById('ai-max-tokens');
if (maxTokensInput) {
maxTokensInput.onchange = (e) => {
const maxTokens = parseInt(e.target.value);
if (this.aiTaskManager) {
this.aiTaskManager.updateSettings({ maxTokens: maxTokens });
}
};
}
// Difficulty Level Change
const difficultySelect = document.getElementById('task-difficulty');
if (difficultySelect) {
difficultySelect.onchange = (e) => {
const difficulty = e.target.value;
if (this.aiTaskManager) {
this.aiTaskManager.updateSettings({ difficulty: difficulty });
this.flashMessageManager.show(`🎯 Difficulty set to: ${difficulty}`, 'info');
}
};
}
// Personal Preferences Textarea
const prefsTextarea = document.getElementById('personal-preferences');
if (prefsTextarea) {
prefsTextarea.oninput = this.debounce((e) => {
const preferences = e.target.value;
if (this.aiTaskManager) {
this.aiTaskManager.updateSettings({ personalPreferences: preferences });
}
}, 500);
}
// Enable AI Toggle
const aiToggle = document.getElementById('enable-ai');
if (aiToggle) {
aiToggle.onchange = (e) => {
const enabled = e.target.checked;
if (this.aiTaskManager) {
this.aiTaskManager.updateSettings({ enabled: enabled });
this.flashMessageManager.show(`🤖 AI Tasks ${enabled ? 'enabled' : 'disabled'}`, 'info');
// Update UI based on toggle
const configSection = document.querySelector('.ai-config');
if (configSection) {
configSection.style.opacity = enabled ? '1' : '0.6';
}
}
};
}
// Auto-generate Toggle
const autoGenToggle = document.getElementById('auto-generate');
if (autoGenToggle) {
autoGenToggle.onchange = (e) => {
const autoGenerate = e.target.checked;
if (this.aiTaskManager) {
this.aiTaskManager.updateSettings({ autoGenerate: autoGenerate });
this.flashMessageManager.show(`🔄 Auto-generate ${autoGenerate ? 'enabled' : 'disabled'}`, 'info');
}
};
}
};
TaskChallengeGame.prototype.loadAITasksTab = function() {
if (!this.aiTaskManager) return;
const settings = this.aiTaskManager.getSettings();
// Load model selection
const modelSelect = document.getElementById('ai-model');
if (modelSelect) {
modelSelect.value = settings.model || 'llama3.2';
}
// Load temperature
const tempSlider = document.getElementById('ai-temperature');
const tempValue = document.getElementById('temp-value');
if (tempSlider && tempValue) {
tempSlider.value = settings.temperature || 0.7;
tempValue.textContent = (settings.temperature || 0.7).toFixed(1);
}
// Load max tokens
const maxTokensInput = document.getElementById('ai-max-tokens');
if (maxTokensInput) {
maxTokensInput.value = settings.maxTokens || 200;
}
// Load difficulty
const difficultySelect = document.getElementById('task-difficulty');
if (difficultySelect) {
difficultySelect.value = settings.difficulty || 'medium';
}
// Load personal preferences
const prefsTextarea = document.getElementById('personal-preferences');
if (prefsTextarea) {
prefsTextarea.value = settings.personalPreferences || '';
}
// Load toggles
const aiToggle = document.getElementById('enable-ai');
if (aiToggle) {
aiToggle.checked = settings.enabled !== false;
}
const autoGenToggle = document.getElementById('auto-generate');
if (autoGenToggle) {
autoGenToggle.checked = settings.autoGenerate === true;
}
// Update UI state
const configSection = document.querySelector('.ai-config');
if (configSection) {
configSection.style.opacity = settings.enabled !== false ? '1' : '0.6';
}
// Update connection status
this.updateConnectionStatus();
};
TaskChallengeGame.prototype.updateConnectionStatus = async function() {
const statusValue = document.getElementById('connection-status');
const modelStatus = document.getElementById('model-status');
if (!statusValue || !this.aiTaskManager) return;
try {
const isConnected = await this.aiTaskManager.testConnection();
const settings = this.aiTaskManager.getSettings();
if (isConnected) {
statusValue.textContent = 'Connected';
statusValue.className = 'status-value connected';
if (modelStatus) {
modelStatus.textContent = settings.model || 'llama3.2';
modelStatus.className = 'status-value';
}
} else {
statusValue.textContent = 'Disconnected';
statusValue.className = 'status-value disconnected';
if (modelStatus) {
modelStatus.textContent = 'N/A';
modelStatus.className = 'status-value';
}
}
} catch (error) {
statusValue.textContent = 'Error';
statusValue.className = 'status-value disconnected';
if (modelStatus) {
modelStatus.textContent = 'N/A';
modelStatus.className = 'status-value';
}
}
};
// Utility function for debouncing input
TaskChallengeGame.prototype.debounce = function(func, wait) {
let timeout;
return function executedFunction(...args) {
const later = () => {
clearTimeout(timeout);
func.apply(this, args);
};
clearTimeout(timeout);
timeout = setTimeout(later, wait);
};
};
// Initialize game when page loads
document.addEventListener('DOMContentLoaded', () => {
window.game = new TaskChallengeGame();

View File

@ -354,6 +354,7 @@
<button id="appearance-tab" class="annoyance-tab">🎨 Appearance</button>
<button id="behavior-tab" class="annoyance-tab">⚡ Behavior</button>
<button id="popup-images-tab" class="annoyance-tab">🖼️ Popup Images</button>
<button id="ai-tasks-tab" class="annoyance-tab">🤖 AI Tasks</button>
<button id="import-export-tab" class="annoyance-tab">📁 Import/Export</button>
</div>
@ -754,6 +755,143 @@
</div>
</div>
<!-- AI Tasks Tab -->
<div id="ai-tasks-tab-content" class="annoyance-tab-content">
<div class="annoyance-section">
<h3>🤖 AI Task Generation</h3>
<p class="help-text">Let AI create personalized edging tasks using your local Ollama installation</p>
<!-- Connection Status -->
<div class="control-section">
<h4>📡 Connection Status</h4>
<div class="ai-status-display">
<div class="status-item">
<span class="status-label">Ollama Service:</span>
<span id="ollama-status" class="status-value">Checking...</span>
</div>
<div class="status-item">
<span class="status-label">Available Models:</span>
<span id="models-count" class="status-value">0</span>
</div>
<div class="status-item">
<span class="status-label">Current Model:</span>
<span id="current-model" class="status-value">None</span>
</div>
</div>
<button id="test-ai-connection" class="btn btn-info">Test Connection</button>
</div>
<!-- AI Configuration -->
<div class="control-section">
<h4>⚙️ AI Configuration</h4>
<div class="control-group">
<label class="switch-label">
<input type="checkbox" id="ai-tasks-enabled" />
<span class="switch"></span>
Enable AI Task Generation
</label>
</div>
<div class="control-group">
<label for="ai-model-select">Model Selection:</label>
<select id="ai-model-select">
<option value="">Select a model...</option>
</select>
</div>
<div class="control-group">
<label for="ai-temperature">Creativity (Temperature):</label>
<input type="range" id="ai-temperature" min="0.1" max="2.0" step="0.1" value="0.8" />
<span id="ai-temperature-value">0.8</span>
</div>
<div class="control-group">
<label for="ai-max-tokens">Max Response Length:</label>
<input type="range" id="ai-max-tokens" min="100" max="500" step="50" value="300" />
<span id="ai-max-tokens-value">300</span>
</div>
</div>
<!-- User Preferences -->
<div class="control-section">
<h4>👤 Your Preferences</h4>
<div class="control-group">
<label for="ai-experience">Experience Level:</label>
<select id="ai-experience">
<option value="beginner">Beginner</option>
<option value="intermediate" selected>Intermediate</option>
<option value="advanced">Advanced</option>
<option value="expert">Expert</option>
</select>
</div>
<div class="control-group">
<label for="ai-intensity">Default Intensity:</label>
<select id="ai-intensity">
<option value="low">Low</option>
<option value="medium" selected>Medium</option>
<option value="high">High</option>
<option value="extreme">Extreme</option>
</select>
</div>
<div class="control-group">
<label for="ai-duration">Preferred Duration (minutes):</label>
<input type="range" id="ai-duration" min="3" max="30" value="5" />
<span id="ai-duration-value">5</span>
</div>
<div class="control-group">
<label for="ai-style">Instruction Style:</label>
<select id="ai-style">
<option value="instructional" selected>Instructional</option>
<option value="descriptive">Descriptive</option>
<option value="commanding">Commanding</option>
<option value="encouraging">Encouraging</option>
</select>
</div>
</div>
<!-- Testing & Preview -->
<div class="control-section">
<h4>🧪 Testing</h4>
<div class="ai-test-buttons">
<button id="generate-test-task" class="btn btn-primary">Generate Test Task</button>
<button id="generate-consequence-task" class="btn btn-danger">Generate Consequence</button>
<button id="clear-test-output" class="btn btn-secondary">Clear Output</button>
</div>
<div class="control-group">
<label for="test-task-output">Generated Task Preview:</label>
<div id="test-task-output" class="task-preview">
Click "Generate Test Task" to see AI-generated content...
</div>
</div>
</div>
<!-- Installation Help -->
<div class="control-section">
<h4>📚 Setup Help</h4>
<div class="help-content">
<p><strong>Need to install Ollama?</strong></p>
<ol>
<li>Download from <a href="https://ollama.ai" target="_blank">ollama.ai</a></li>
<li>Install recommended NSFW models:</li>
<ul>
<li><code>ollama pull dolphin-mistral:7b</code></li>
<li><code>ollama pull wizardlm-uncensored:7b</code></li>
</ul>
<li>Ensure Ollama service is running</li>
<li>Click "Test Connection" above</li>
</ol>
<p class="help-text">AI tasks are generated locally for complete privacy!</p>
</div>
</div>
</div>
</div>
<!-- Import/Export Tab -->
<div id="import-export-tab-content" class="annoyance-tab-content">
<div class="annoyance-section">
@ -883,6 +1021,7 @@
<script src="gameData.js"></script>
<script src="flashMessageManager.js"></script>
<script src="popupImageManager.js"></script>
<script src="aiTaskManager.js"></script>
<script src="desktop-file-manager.js"></script>
<script src="game.js"></script>
<!-- Statistics Modal -->

0
styles-dark-edgy.css Normal file
View File

0
styles-gaming.css Normal file
View File

2931
styles-original-backup.css Normal file

File diff suppressed because it is too large Load Diff

View File

@ -2928,4 +2928,156 @@ body.theme-monochrome {
.punishment-popup.fade-out {
animation: popupFadeOut 0.3s ease-in;
}
/* ======================================
AI Tasks Tab Styles
====================================== */
.ai-status-display {
background: #f8f9fa;
padding: 15px;
border-radius: 8px;
margin-bottom: 15px;
border-left: 4px solid #17a2b8;
}
.status-item {
display: flex;
justify-content: space-between;
align-items: center;
padding: 5px 0;
border-bottom: 1px solid rgba(0, 0, 0, 0.1);
}
.status-item:last-child {
border-bottom: none;
}
.status-label {
font-weight: 500;
color: #495057;
}
.status-value {
font-weight: bold;
padding: 2px 8px;
border-radius: 12px;
font-size: 12px;
background: rgba(23, 162, 184, 0.1);
color: #17a2b8;
}
.status-value.connected {
background: rgba(40, 167, 69, 0.1);
color: #28a745;
}
.status-value.disconnected {
background: rgba(220, 53, 69, 0.1);
color: #dc3545;
}
.ai-test-buttons {
display: flex;
gap: 10px;
flex-wrap: wrap;
margin-bottom: 15px;
}
.ai-test-buttons .btn {
flex: 1;
min-width: 120px;
padding: 10px 15px;
font-size: 14px;
font-weight: 500;
border: none;
border-radius: 6px;
cursor: pointer;
transition: all 0.2s ease;
}
.task-preview {
background: #f8f9fa;
border: 1px solid #dee2e6;
border-radius: 8px;
padding: 15px;
min-height: 100px;
max-height: 200px;
overflow-y: auto;
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
font-size: 14px;
line-height: 1.5;
color: #495057;
white-space: pre-wrap;
margin-top: 10px;
}
.task-preview.generating {
background: #fff3cd;
color: #856404;
display: flex;
align-items: center;
justify-content: center;
}
.task-preview.error {
background: #f8d7da;
color: #721c24;
border-color: #f5c6cb;
}
.help-content {
background: #e7f3ff;
padding: 15px;
border-radius: 8px;
border-left: 4px solid #007bff;
}
.help-content ol {
margin: 10px 0;
padding-left: 20px;
}
.help-content ul {
margin: 5px 0;
padding-left: 20px;
}
.help-content code {
background: rgba(0, 0, 0, 0.1);
padding: 2px 5px;
border-radius: 3px;
font-family: 'Courier New', monospace;
font-size: 12px;
}
.help-content a {
color: #007bff;
text-decoration: none;
}
.help-content a:hover {
text-decoration: underline;
}
/* Responsive styles for AI tab */
@media (max-width: 768px) {
.ai-test-buttons {
flex-direction: column;
}
.ai-test-buttons .btn {
min-width: 100%;
}
.status-item {
flex-direction: column;
align-items: flex-start;
gap: 5px;
}
.status-value {
align-self: flex-end;
}
}