|
|
import gradio as gr |
|
|
import os |
|
|
import json |
|
|
import requests |
|
|
from datetime import datetime |
|
|
import time |
|
|
from typing import List, Dict, Any, Generator, Tuple, Optional, Set |
|
|
import logging |
|
|
import re |
|
|
import tempfile |
|
|
from pathlib import Path |
|
|
import sqlite3 |
|
|
import hashlib |
|
|
import threading |
|
|
from contextlib import contextmanager |
|
|
from dataclasses import dataclass, field, asdict |
|
|
from collections import defaultdict |
|
|
import json |
|
|
from pathlib import Path |
|
|
import random |
|
|
|
|
|
|
|
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') |
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
|
|
|
try: |
|
|
from docx import Document |
|
|
from docx.shared import Inches, Pt, RGBColor, Mm |
|
|
from docx.enum.text import WD_ALIGN_PARAGRAPH |
|
|
from docx.enum.style import WD_STYLE_TYPE |
|
|
from docx.oxml.ns import qn |
|
|
from docx.oxml import OxmlElement |
|
|
DOCX_AVAILABLE = True |
|
|
except ImportError: |
|
|
DOCX_AVAILABLE = False |
|
|
logger.warning("python-docx not installed. DOCX export will be disabled.") |
|
|
|
|
|
|
|
|
FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "") |
|
|
BRAVE_SEARCH_API_KEY = os.getenv("BRAVE_SEARCH_API_KEY", "") |
|
|
API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions" |
|
|
MODEL_ID = "dep86pjolcjjnv8" |
|
|
DB_PATH = "novel_sessions_v6.db" |
|
|
|
|
|
|
|
|
TARGET_WORDS = 8000 |
|
|
MIN_WORDS_PER_PART = 800 |
|
|
|
|
|
|
|
|
if not FRIENDLI_TOKEN: |
|
|
logger.error("FRIENDLI_TOKEN not set. Application will not work properly.") |
|
|
FRIENDLI_TOKEN = "dummy_token_for_testing" |
|
|
|
|
|
if not BRAVE_SEARCH_API_KEY: |
|
|
logger.warning("BRAVE_SEARCH_API_KEY not set. Web search features will be disabled.") |
|
|
|
|
|
|
|
|
db_lock = threading.Lock() |
|
|
|
|
|
|
|
|
NARRATIVE_PHASES = [ |
|
|
"Introduction: Daily Life and Cracks", |
|
|
"Development 1: Rising Anxiety", |
|
|
"Development 2: External Shock", |
|
|
"Development 3: Deepening Internal Conflict", |
|
|
"Climax 1: Peak of Crisis", |
|
|
"Climax 2: Moment of Choice", |
|
|
"Falling Action 1: Consequences and Aftermath", |
|
|
"Falling Action 2: New Recognition", |
|
|
"Resolution 1: Changed Daily Life", |
|
|
"Resolution 2: Open Questions" |
|
|
] |
|
|
|
|
|
|
|
|
UNIFIED_STAGES = [ |
|
|
("director", "๐ฌ Director: Integrated Narrative Structure Planning"), |
|
|
("critic_director", "๐ Critic: Deep Review of Narrative Structure"), |
|
|
("director", "๐ฌ Director: Final Master Plan"), |
|
|
] + [ |
|
|
item for i in range(1, 11) |
|
|
for item in [ |
|
|
("writer", f"โ๏ธ Writer: Part {i} - {NARRATIVE_PHASES[i-1]}"), |
|
|
(f"critic_part{i}", f"๐ Part {i} Critic: Immediate Review and Revision Request"), |
|
|
("writer", f"โ๏ธ Writer: Part {i} Revision") |
|
|
] |
|
|
] + [ |
|
|
("critic_final", "๐ Final Critic: Comprehensive Evaluation and Literary Achievement"), |
|
|
] |
|
|
|
|
|
|
|
|
@dataclass |
|
|
class StoryBible: |
|
|
"""Story bible for maintaining narrative consistency""" |
|
|
characters: Dict[str, Dict[str, Any]] = field(default_factory=dict) |
|
|
settings: Dict[str, str] = field(default_factory=dict) |
|
|
timeline: List[Dict[str, Any]] = field(default_factory=list) |
|
|
plot_points: List[Dict[str, Any]] = field(default_factory=list) |
|
|
themes: List[str] = field(default_factory=list) |
|
|
symbols: Dict[str, List[str]] = field(default_factory=dict) |
|
|
style_guide: Dict[str, str] = field(default_factory=dict) |
|
|
opening_sentence: str = "" |
|
|
|
|
|
@dataclass |
|
|
class PartCritique: |
|
|
"""Critique content for each part""" |
|
|
part_number: int |
|
|
continuity_issues: List[str] = field(default_factory=list) |
|
|
character_consistency: List[str] = field(default_factory=list) |
|
|
plot_progression: List[str] = field(default_factory=list) |
|
|
thematic_alignment: List[str] = field(default_factory=list) |
|
|
technical_issues: List[str] = field(default_factory=list) |
|
|
strengths: List[str] = field(default_factory=list) |
|
|
required_changes: List[str] = field(default_factory=list) |
|
|
literary_quality: List[str] = field(default_factory=list) |
|
|
|
|
|
|
|
|
class UnifiedNarrativeTracker: |
|
|
"""Unified narrative tracker for single writer system""" |
|
|
def __init__(self): |
|
|
self.story_bible = StoryBible() |
|
|
self.part_critiques: Dict[int, PartCritique] = {} |
|
|
self.accumulated_content: List[str] = [] |
|
|
self.word_count_by_part: Dict[int, int] = {} |
|
|
self.revision_history: Dict[int, List[str]] = defaultdict(list) |
|
|
self.causal_chains: List[Dict[str, Any]] = [] |
|
|
self.narrative_momentum: float = 0.0 |
|
|
|
|
|
def update_story_bible(self, element_type: str, key: str, value: Any): |
|
|
"""Update story bible""" |
|
|
if element_type == "character": |
|
|
self.story_bible.characters[key] = value |
|
|
elif element_type == "setting": |
|
|
self.story_bible.settings[key] = value |
|
|
elif element_type == "timeline": |
|
|
self.story_bible.timeline.append({"event": key, "details": value}) |
|
|
elif element_type == "theme": |
|
|
if key not in self.story_bible.themes: |
|
|
self.story_bible.themes.append(key) |
|
|
elif element_type == "symbol": |
|
|
if key not in self.story_bible.symbols: |
|
|
self.story_bible.symbols[key] = [] |
|
|
self.story_bible.symbols[key].append(value) |
|
|
|
|
|
def add_part_critique(self, part_number: int, critique: PartCritique): |
|
|
"""Add part critique""" |
|
|
self.part_critiques[part_number] = critique |
|
|
|
|
|
def check_continuity(self, current_part: int, new_content: str) -> List[str]: |
|
|
"""Check continuity""" |
|
|
issues = [] |
|
|
|
|
|
|
|
|
for char_name, char_data in self.story_bible.characters.items(): |
|
|
if char_name in new_content: |
|
|
if "traits" in char_data: |
|
|
for trait in char_data["traits"]: |
|
|
if trait.get("abandoned", False): |
|
|
issues.append(f"{char_name}'s abandoned trait '{trait['name']}' reappears") |
|
|
|
|
|
|
|
|
if len(self.story_bible.timeline) > 0: |
|
|
last_event = self.story_bible.timeline[-1] |
|
|
|
|
|
|
|
|
if current_part > 1 and not any(kw in new_content for kw in |
|
|
['because', 'therefore', 'thus', 'hence', 'consequently']): |
|
|
issues.append("Unclear causality with previous part") |
|
|
|
|
|
return issues |
|
|
|
|
|
def calculate_narrative_momentum(self, part_number: int, content: str) -> float: |
|
|
"""Calculate narrative momentum""" |
|
|
momentum = 5.0 |
|
|
|
|
|
|
|
|
new_elements = len(set(content.split()) - set(' '.join(self.accumulated_content).split())) |
|
|
if new_elements > 100: |
|
|
momentum += 2.0 |
|
|
|
|
|
|
|
|
tension_words = ['crisis', 'conflict', 'tension', 'struggle', 'dilemma'] |
|
|
if any(word in content.lower() for word in tension_words): |
|
|
momentum += 1.5 |
|
|
|
|
|
|
|
|
causal_words = ['because', 'therefore', 'thus', 'consequently', 'hence'] |
|
|
causal_count = sum(1 for word in causal_words if word in content.lower()) |
|
|
momentum += min(causal_count * 0.5, 2.0) |
|
|
|
|
|
|
|
|
if part_number > 1: |
|
|
prev_content = self.accumulated_content[-1] if self.accumulated_content else "" |
|
|
overlap = len(set(content.split()) & set(prev_content.split())) |
|
|
if overlap > len(content.split()) * 0.3: |
|
|
momentum -= 3.0 |
|
|
|
|
|
return max(0.0, min(10.0, momentum)) |
|
|
|
|
|
class NovelDatabase: |
|
|
"""Database management - Modified for single writer system""" |
|
|
@staticmethod |
|
|
def init_db(): |
|
|
with sqlite3.connect(DB_PATH) as conn: |
|
|
conn.execute("PRAGMA journal_mode=WAL") |
|
|
cursor = conn.cursor() |
|
|
|
|
|
|
|
|
cursor.execute(''' |
|
|
CREATE TABLE IF NOT EXISTS sessions ( |
|
|
session_id TEXT PRIMARY KEY, |
|
|
user_query TEXT NOT NULL, |
|
|
language TEXT NOT NULL, |
|
|
created_at TEXT DEFAULT (datetime('now')), |
|
|
updated_at TEXT DEFAULT (datetime('now')), |
|
|
status TEXT DEFAULT 'active', |
|
|
current_stage INTEGER DEFAULT 0, |
|
|
final_novel TEXT, |
|
|
literary_report TEXT, |
|
|
total_words INTEGER DEFAULT 0, |
|
|
story_bible TEXT, |
|
|
narrative_tracker TEXT, |
|
|
opening_sentence TEXT |
|
|
) |
|
|
''') |
|
|
|
|
|
|
|
|
cursor.execute(''' |
|
|
CREATE TABLE IF NOT EXISTS stages ( |
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
|
|
session_id TEXT NOT NULL, |
|
|
stage_number INTEGER NOT NULL, |
|
|
stage_name TEXT NOT NULL, |
|
|
role TEXT NOT NULL, |
|
|
content TEXT, |
|
|
word_count INTEGER DEFAULT 0, |
|
|
status TEXT DEFAULT 'pending', |
|
|
narrative_momentum REAL DEFAULT 0.0, |
|
|
created_at TEXT DEFAULT (datetime('now')), |
|
|
updated_at TEXT DEFAULT (datetime('now')), |
|
|
FOREIGN KEY (session_id) REFERENCES sessions(session_id), |
|
|
UNIQUE(session_id, stage_number) |
|
|
) |
|
|
''') |
|
|
|
|
|
|
|
|
cursor.execute(''' |
|
|
CREATE TABLE IF NOT EXISTS critiques ( |
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
|
|
session_id TEXT NOT NULL, |
|
|
part_number INTEGER NOT NULL, |
|
|
critique_data TEXT, |
|
|
created_at TEXT DEFAULT (datetime('now')), |
|
|
FOREIGN KEY (session_id) REFERENCES sessions(session_id) |
|
|
) |
|
|
''') |
|
|
|
|
|
conn.commit() |
|
|
|
|
|
@staticmethod |
|
|
@contextmanager |
|
|
def get_db(): |
|
|
with db_lock: |
|
|
conn = sqlite3.connect(DB_PATH, timeout=30.0) |
|
|
conn.row_factory = sqlite3.Row |
|
|
try: |
|
|
yield conn |
|
|
finally: |
|
|
conn.close() |
|
|
|
|
|
@staticmethod |
|
|
def create_session(user_query: str, language: str) -> str: |
|
|
session_id = hashlib.md5(f"{user_query}{datetime.now()}".encode()).hexdigest() |
|
|
with NovelDatabase.get_db() as conn: |
|
|
conn.cursor().execute( |
|
|
'INSERT INTO sessions (session_id, user_query, language) VALUES (?, ?, ?)', |
|
|
(session_id, user_query, language) |
|
|
) |
|
|
conn.commit() |
|
|
return session_id |
|
|
|
|
|
@staticmethod |
|
|
def save_stage(session_id: str, stage_number: int, stage_name: str, |
|
|
role: str, content: str, status: str = 'complete', |
|
|
narrative_momentum: float = 0.0): |
|
|
word_count = len(content.split()) if content else 0 |
|
|
with NovelDatabase.get_db() as conn: |
|
|
cursor = conn.cursor() |
|
|
cursor.execute(''' |
|
|
INSERT INTO stages (session_id, stage_number, stage_name, role, content, |
|
|
word_count, status, narrative_momentum) |
|
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?) |
|
|
ON CONFLICT(session_id, stage_number) |
|
|
DO UPDATE SET content=?, word_count=?, status=?, stage_name=?, |
|
|
narrative_momentum=?, updated_at=datetime('now') |
|
|
''', (session_id, stage_number, stage_name, role, content, word_count, |
|
|
status, narrative_momentum, content, word_count, status, stage_name, |
|
|
narrative_momentum)) |
|
|
|
|
|
|
|
|
cursor.execute(''' |
|
|
UPDATE sessions |
|
|
SET total_words = ( |
|
|
SELECT SUM(word_count) |
|
|
FROM stages |
|
|
WHERE session_id = ? AND role = 'writer' AND content IS NOT NULL |
|
|
), |
|
|
updated_at = datetime('now'), |
|
|
current_stage = ? |
|
|
WHERE session_id = ? |
|
|
''', (session_id, stage_number, session_id)) |
|
|
|
|
|
conn.commit() |
|
|
|
|
|
@staticmethod |
|
|
def save_critique(session_id: str, part_number: int, critique: PartCritique): |
|
|
"""Save critique""" |
|
|
with NovelDatabase.get_db() as conn: |
|
|
critique_json = json.dumps(asdict(critique)) |
|
|
conn.cursor().execute( |
|
|
'INSERT INTO critiques (session_id, part_number, critique_data) VALUES (?, ?, ?)', |
|
|
(session_id, part_number, critique_json) |
|
|
) |
|
|
conn.commit() |
|
|
|
|
|
@staticmethod |
|
|
def save_opening_sentence(session_id: str, opening_sentence: str): |
|
|
"""Save opening sentence""" |
|
|
with NovelDatabase.get_db() as conn: |
|
|
conn.cursor().execute( |
|
|
'UPDATE sessions SET opening_sentence = ? WHERE session_id = ?', |
|
|
(opening_sentence, session_id) |
|
|
) |
|
|
conn.commit() |
|
|
|
|
|
@staticmethod |
|
|
def get_writer_content(session_id: str) -> str: |
|
|
"""Get writer content - Integrate all revisions""" |
|
|
with NovelDatabase.get_db() as conn: |
|
|
rows = conn.cursor().execute(''' |
|
|
SELECT content FROM stages |
|
|
WHERE session_id = ? AND role = 'writer' |
|
|
AND stage_name LIKE '%Revision%' |
|
|
ORDER BY stage_number |
|
|
''', (session_id,)).fetchall() |
|
|
|
|
|
if rows: |
|
|
return '\n\n'.join(row['content'] for row in rows if row['content']) |
|
|
else: |
|
|
|
|
|
rows = conn.cursor().execute(''' |
|
|
SELECT content FROM stages |
|
|
WHERE session_id = ? AND role = 'writer' |
|
|
AND stage_name NOT LIKE '%Revision%' |
|
|
ORDER BY stage_number |
|
|
''', (session_id,)).fetchall() |
|
|
return '\n\n'.join(row['content'] for row in rows if row['content']) |
|
|
|
|
|
@staticmethod |
|
|
def save_narrative_tracker(session_id: str, tracker: UnifiedNarrativeTracker): |
|
|
"""Save unified narrative tracker""" |
|
|
with NovelDatabase.get_db() as conn: |
|
|
tracker_data = json.dumps({ |
|
|
'story_bible': asdict(tracker.story_bible), |
|
|
'part_critiques': {k: asdict(v) for k, v in tracker.part_critiques.items()}, |
|
|
'word_count_by_part': tracker.word_count_by_part, |
|
|
'causal_chains': tracker.causal_chains, |
|
|
'narrative_momentum': tracker.narrative_momentum |
|
|
}) |
|
|
conn.cursor().execute( |
|
|
'UPDATE sessions SET narrative_tracker = ? WHERE session_id = ?', |
|
|
(tracker_data, session_id) |
|
|
) |
|
|
conn.commit() |
|
|
|
|
|
@staticmethod |
|
|
def load_narrative_tracker(session_id: str) -> Optional[UnifiedNarrativeTracker]: |
|
|
"""Load unified narrative tracker""" |
|
|
with NovelDatabase.get_db() as conn: |
|
|
row = conn.cursor().execute( |
|
|
'SELECT narrative_tracker FROM sessions WHERE session_id = ?', |
|
|
(session_id,) |
|
|
).fetchone() |
|
|
|
|
|
if row and row['narrative_tracker']: |
|
|
data = json.loads(row['narrative_tracker']) |
|
|
tracker = UnifiedNarrativeTracker() |
|
|
|
|
|
|
|
|
bible_data = data.get('story_bible', {}) |
|
|
tracker.story_bible = StoryBible(**bible_data) |
|
|
|
|
|
|
|
|
for part_num, critique_data in data.get('part_critiques', {}).items(): |
|
|
tracker.part_critiques[int(part_num)] = PartCritique(**critique_data) |
|
|
|
|
|
tracker.word_count_by_part = data.get('word_count_by_part', {}) |
|
|
tracker.causal_chains = data.get('causal_chains', []) |
|
|
tracker.narrative_momentum = data.get('narrative_momentum', 0.0) |
|
|
|
|
|
return tracker |
|
|
return None |
|
|
|
|
|
|
|
|
@staticmethod |
|
|
def get_session(session_id: str) -> Optional[Dict]: |
|
|
with NovelDatabase.get_db() as conn: |
|
|
row = conn.cursor().execute('SELECT * FROM sessions WHERE session_id = ?', |
|
|
(session_id,)).fetchone() |
|
|
return dict(row) if row else None |
|
|
|
|
|
@staticmethod |
|
|
def get_stages(session_id: str) -> List[Dict]: |
|
|
with NovelDatabase.get_db() as conn: |
|
|
rows = conn.cursor().execute( |
|
|
'SELECT * FROM stages WHERE session_id = ? ORDER BY stage_number', |
|
|
(session_id,) |
|
|
).fetchall() |
|
|
return [dict(row) for row in rows] |
|
|
|
|
|
@staticmethod |
|
|
def update_final_novel(session_id: str, final_novel: str, literary_report: str = ""): |
|
|
with NovelDatabase.get_db() as conn: |
|
|
conn.cursor().execute( |
|
|
'''UPDATE sessions SET final_novel = ?, status = 'complete', |
|
|
updated_at = datetime('now'), literary_report = ? WHERE session_id = ?''', |
|
|
(final_novel, literary_report, session_id) |
|
|
) |
|
|
conn.commit() |
|
|
|
|
|
@staticmethod |
|
|
def get_active_sessions() -> List[Dict]: |
|
|
with NovelDatabase.get_db() as conn: |
|
|
rows = conn.cursor().execute( |
|
|
'''SELECT session_id, user_query, language, created_at, current_stage, total_words |
|
|
FROM sessions WHERE status = 'active' ORDER BY updated_at DESC LIMIT 10''' |
|
|
).fetchall() |
|
|
return [dict(row) for row in rows] |
|
|
|
|
|
@staticmethod |
|
|
def get_total_words(session_id: str) -> int: |
|
|
"""Get total word count""" |
|
|
with NovelDatabase.get_db() as conn: |
|
|
row = conn.cursor().execute( |
|
|
'SELECT total_words FROM sessions WHERE session_id = ?', |
|
|
(session_id,) |
|
|
).fetchone() |
|
|
return row['total_words'] if row and row['total_words'] else 0 |
|
|
|
|
|
class WebSearchIntegration: |
|
|
"""Web search functionality""" |
|
|
def __init__(self): |
|
|
self.brave_api_key = BRAVE_SEARCH_API_KEY |
|
|
self.search_url = "https://api.search.brave.com/res/v1/web/search" |
|
|
self.enabled = bool(self.brave_api_key) |
|
|
|
|
|
def search(self, query: str, count: int = 3, language: str = "en") -> List[Dict]: |
|
|
if not self.enabled: |
|
|
return [] |
|
|
headers = { |
|
|
"Accept": "application/json", |
|
|
"X-Subscription-Token": self.brave_api_key |
|
|
} |
|
|
params = { |
|
|
"q": query, |
|
|
"count": count, |
|
|
"search_lang": "ko" if language == "Korean" else "en", |
|
|
"text_decorations": False, |
|
|
"safesearch": "moderate" |
|
|
} |
|
|
try: |
|
|
response = requests.get(self.search_url, headers=headers, params=params, timeout=10) |
|
|
response.raise_for_status() |
|
|
results = response.json().get("web", {}).get("results", []) |
|
|
return results |
|
|
except requests.exceptions.RequestException as e: |
|
|
logger.error(f"Web search API error: {e}") |
|
|
return [] |
|
|
|
|
|
def extract_relevant_info(self, results: List[Dict], max_chars: int = 1500) -> str: |
|
|
if not results: |
|
|
return "" |
|
|
extracted = [] |
|
|
total_chars = 0 |
|
|
for i, result in enumerate(results[:3], 1): |
|
|
title = result.get("title", "") |
|
|
description = result.get("description", "") |
|
|
info = f"[{i}] {title}: {description}" |
|
|
if total_chars + len(info) < max_chars: |
|
|
extracted.append(info) |
|
|
total_chars += len(info) |
|
|
else: |
|
|
break |
|
|
return "\n".join(extracted) |
|
|
|
|
|
|
|
|
class UnifiedLiterarySystem: |
|
|
"""Single writer progressive literary novel generation system""" |
|
|
def __init__(self): |
|
|
self.token = FRIENDLI_TOKEN |
|
|
self.api_url = API_URL |
|
|
self.model_id = MODEL_ID |
|
|
self.narrative_tracker = UnifiedNarrativeTracker() |
|
|
self.web_search = WebSearchIntegration() |
|
|
self.current_session_id = None |
|
|
NovelDatabase.init_db() |
|
|
|
|
|
def create_headers(self): |
|
|
return {"Authorization": f"Bearer {self.token}", "Content-Type": "application/json"} |
|
|
|
|
|
|
|
|
def augment_query(self, user_query: str, language: str) -> str: |
|
|
"""Augment prompt""" |
|
|
if len(user_query.split()) < 15: |
|
|
augmented_template = { |
|
|
"Korean": f"""'{user_query}' |
|
|
|
|
|
**์์ฌ ๊ตฌ์กฐ ํต์ฌ:** |
|
|
- 10๊ฐ ํํธ๊ฐ ํ๋์ ํตํฉ๋ ์ด์ผ๊ธฐ๋ฅผ ๊ตฌ์ฑ |
|
|
- ๊ฐ ํํธ๋ ์ด์ ํํธ์ ํ์ฐ์ ๊ฒฐ๊ณผ |
|
|
- ์ธ๋ฌผ์ ๋ช
ํํ ๋ณํ ๊ถค์ (A โ B โ C) |
|
|
- ์ค์ฌ ๊ฐ๋ฑ์ ์ ์ง์ ๊ณ ์กฐ์ ํด๊ฒฐ |
|
|
- ๊ฐ๋ ฌํ ์ค์ฌ ์์ง์ ์๋ฏธ ๋ณํ""", |
|
|
|
|
|
"English": f"""'{user_query}' |
|
|
|
|
|
**Narrative Structure Core:** |
|
|
- 10 parts forming one integrated story |
|
|
- Each part as inevitable result of previous |
|
|
- Clear character transformation arc (A โ B โ C) |
|
|
- Progressive escalation and resolution of central conflict |
|
|
- Evolving meaning of powerful central symbol""" |
|
|
} |
|
|
return augmented_template.get(language, user_query) |
|
|
return user_query |
|
|
|
|
|
def generate_powerful_opening(self, user_query: str, language: str) -> str: |
|
|
"""Generate powerful opening sentence matching the theme""" |
|
|
|
|
|
opening_prompt = { |
|
|
"Korean": f"""์ฃผ์ : {user_query} |
|
|
|
|
|
์ด ์ฃผ์ ์ ๋ํ ๊ฐ๋ ฌํ๊ณ ์์ ์ ์๋ ์ฒซ๋ฌธ์ฅ์ ์์ฑํ์ธ์. |
|
|
|
|
|
**์ฒซ๋ฌธ์ฅ ์์ฑ ์์น:** |
|
|
1. ์ฆ๊ฐ์ ์ธ ๊ธด์ฅ๊ฐ์ด๋ ๊ถ๊ธ์ฆ ์ ๋ฐ |
|
|
2. ํ๋ฒํ์ง ์์ ์๊ฐ์ด๋ ์ํฉ ์ ์ |
|
|
3. ๊ฐ๊ฐ์ ์ด๊ณ ๊ตฌ์ฒด์ ์ธ ์ด๋ฏธ์ง |
|
|
4. ์ฒ ํ์ ์ง๋ฌธ์ด๋ ์ญ์ค์ ์ง์ |
|
|
5. ์๊ฐ๊ณผ ๊ณต๊ฐ์ ๋
ํนํ ์ค์ |
|
|
|
|
|
**ํ๋ฅญํ ์ฒซ๋ฌธ์ฅ์ ์์ ํจํด:** |
|
|
- "๊ทธ๊ฐ ์ฃฝ์ ๋ , ..." (์ถฉ๊ฒฉ์ ์ฌ๊ฑด) |
|
|
- "๋ชจ๋ ๊ฒ์ด ๋๋ฌ๋ค๊ณ ์๊ฐํ ์๊ฐ..." (๋ฐ์ ์๊ณ ) |
|
|
- "์ธ์์์ ๊ฐ์ฅ [ํ์ฉ์ฌ]ํ [๋ช
์ฌ]๋..." (๋
ํนํ ์ ์) |
|
|
- "[๊ตฌ์ฒด์ ํ๋]ํ๋ ๊ฒ๋ง์ผ๋ก๋..." (์ผ์์ ์ฌํด์) |
|
|
|
|
|
๋จ ํ๋์ ๋ฌธ์ฅ๋ง ์ ์ํ์ธ์.""", |
|
|
|
|
|
"English": f"""Theme: {user_query} |
|
|
|
|
|
Generate an unforgettable opening sentence for this theme. |
|
|
|
|
|
**Opening Sentence Principles:** |
|
|
1. Immediate tension or curiosity |
|
|
2. Unusual perspective or situation |
|
|
3. Sensory and specific imagery |
|
|
4. Philosophical question or paradox |
|
|
5. Unique temporal/spatial setting |
|
|
|
|
|
**Great Opening Patterns:** |
|
|
- "The day he died, ..." (shocking event) |
|
|
- "At the moment everything seemed over..." (reversal hint) |
|
|
- "The most [adjective] [noun] in the world..." (unique definition) |
|
|
- "Just by [specific action]..." (reinterpretation of ordinary) |
|
|
|
|
|
Provide only one sentence.""" |
|
|
} |
|
|
|
|
|
messages = [{"role": "user", "content": opening_prompt.get(language, opening_prompt["Korean"])}] |
|
|
opening = self.call_llm_sync(messages, "writer", language) |
|
|
return opening.strip() |
|
|
|
|
|
def create_director_initial_prompt(self, user_query: str, language: str) -> str: |
|
|
"""Director initial planning - Enhanced version""" |
|
|
augmented_query = self.augment_query(user_query, language) |
|
|
|
|
|
|
|
|
opening_sentence = self.generate_powerful_opening(user_query, language) |
|
|
self.narrative_tracker.story_bible.opening_sentence = opening_sentence |
|
|
if self.current_session_id: |
|
|
NovelDatabase.save_opening_sentence(self.current_session_id, opening_sentence) |
|
|
|
|
|
search_results_str = "" |
|
|
if self.web_search.enabled: |
|
|
short_query = user_query[:50] if len(user_query) > 50 else user_query |
|
|
queries = [ |
|
|
f"{short_query} philosophical meaning", |
|
|
f"human existence meaning {short_query}", |
|
|
f"{short_query} literary works" |
|
|
] |
|
|
for q in queries[:2]: |
|
|
try: |
|
|
results = self.web_search.search(q, count=2, language=language) |
|
|
if results: |
|
|
search_results_str += self.web_search.extract_relevant_info(results) + "\n" |
|
|
except Exception as e: |
|
|
logger.warning(f"Search failed: {str(e)}") |
|
|
|
|
|
lang_prompts = { |
|
|
"Korean": f"""๋
ธ๋ฒจ๋ฌธํ์ ์์ค์ ์ฒ ํ์ ๊น์ด๋ฅผ ์ง๋ ์คํธ์์ค(8,000๋จ์ด)์ ๊ธฐํํ์ธ์. |
|
|
|
|
|
**์ฃผ์ :** {augmented_query} |
|
|
|
|
|
**ํ์ ์ฒซ๋ฌธ์ฅ:** {opening_sentence} |
|
|
|
|
|
**์ฐธ๊ณ ์๋ฃ:** |
|
|
{search_results_str if search_results_str else "N/A"} |
|
|
|
|
|
**ํ์ ๋ฌธํ์ ์์:** |
|
|
|
|
|
1. **์ฒ ํ์ ํ๊ตฌ** |
|
|
- ํ๋์ธ์ ์ค์กด์ ๊ณ ๋ (์์ธ, ์ ์ฒด์ฑ, ์๋ฏธ ์์ค) |
|
|
- ๋์งํธ ์๋์ ์ธ๊ฐ ์กฐ๊ฑด |
|
|
- ์๋ณธ์ฃผ์ ์ฌํ์ ๋ชจ์๊ณผ ๊ฐ์ธ์ ์ ํ |
|
|
- ์ฃฝ์, ์ฌ๋, ์์ ์ ๋ํ ์๋ก์ด ์ฑ์ฐฐ |
|
|
|
|
|
2. **์ฌํ์ ๋ฉ์์ง** |
|
|
- ๊ณ๊ธ, ์ ๋, ์ธ๋ ๊ฐ ๊ฐ๋ฑ |
|
|
- ํ๊ฒฝ ์๊ธฐ์ ์ธ๊ฐ์ ์ฑ
์ |
|
|
- ๊ธฐ์ ๋ฐ์ ๊ณผ ์ธ๊ฐ์ฑ์ ์ถฉ๋ |
|
|
- ํ๋ ๋ฏผ์ฃผ์ฃผ์์ ์๊ธฐ์ ๊ฐ์ธ์ ์ญํ |
|
|
|
|
|
3. **๋ฌธํ์ ์์ฌ ์ฅ์น** |
|
|
- ์ค์ฌ ์์ : [๊ตฌ์ฒด์ ์ฌ๋ฌผ/ํ์] โ [์ถ์์ ์๋ฏธ] |
|
|
- ๋ฐ๋ณต๋๋ ๋ชจํฐํ: [์ด๋ฏธ์ง/ํ๋] (์ต์ 5ํ ๋ณ์ฃผ) |
|
|
- ๋์กฐ๋ฒ: [A vs B]์ ์ง์์ ๊ธด์ฅ |
|
|
- ์์ง์ ๊ณต๊ฐ: [๊ตฌ์ฒด์ ์ฅ์]๊ฐ ์๋ฏธํ๋ ๊ฒ |
|
|
- ์๊ฐ์ ์ฃผ๊ด์ ํ๋ฆ (ํ์, ์๊ฐ, ์ ์ง) |
|
|
|
|
|
4. **ํตํฉ๋ 10ํํธ ๊ตฌ์กฐ** |
|
|
๊ฐ ํํธ๋ณ ํต์ฌ: |
|
|
- ํํธ 1: ์ฒซ๋ฌธ์ฅ์ผ๋ก ์์, ์ผ์ ์ ๊ท ์ด โ ์ฒ ํ์ ์ง๋ฌธ ์ ๊ธฐ |
|
|
- ํํธ 2-3: ์ธ๋ถ ์ฌ๊ฑด โ ๋ด์ ์ฑ์ฐฐ ์ฌํ |
|
|
- ํํธ 4-5: ์ฌํ์ ๊ฐ๋ฑ โ ๊ฐ์ธ์ ๋๋ ๋ง |
|
|
- ํํธ 6-7: ์๊ธฐ์ ์ ์ โ ์ค์กด์ ์ ํ |
|
|
- ํํธ 8-9: ์ ํ์ ๊ฒฐ๊ณผ โ ์๋ก์ด ์ธ์ |
|
|
- ํํธ 10: ๋ณํ๋ ์ธ๊ณ๊ด โ ์ด๋ฆฐ ์ง๋ฌธ |
|
|
|
|
|
5. **๋ฌธ์ฒด ์ง์นจ** |
|
|
- ์์ ์ฐ๋ฌธ์ฒด: ์ผ์ ์ธ์ด์ ์์ ์ ๊ท ํ |
|
|
- ์์์ ํ๋ฆ๊ณผ ๊ฐ๊ด์ ๋ฌ์ฌ์ ๊ต์ฐจ |
|
|
- ์งง๊ณ ๊ฐ๋ ฌํ ๋ฌธ์ฅ๊ณผ ์ฑ์ฐฐ์ ๊ธด ๋ฌธ์ฅ์ ๋ฆฌ๋ฌ |
|
|
- ๊ฐ๊ฐ์ ๋ํ
์ผ๋ก ์ถ์์ ๊ฐ๋
๊ตฌํ |
|
|
|
|
|
๊ตฌ์ฒด์ ์ด๊ณ ํ์ ์ ์ธ ๊ณํ์ ์ ์ํ์ธ์.""", |
|
|
|
|
|
"English": f"""Plan a philosophically profound novella (8,000 words) worthy of Nobel Prize. |
|
|
|
|
|
**Theme:** {augmented_query} |
|
|
|
|
|
**Required Opening:** {opening_sentence} |
|
|
|
|
|
**Reference:** |
|
|
{search_results_str if search_results_str else "N/A"} |
|
|
|
|
|
**Essential Literary Elements:** |
|
|
|
|
|
1. **Philosophical Exploration** |
|
|
- Modern existential anguish (alienation, identity, loss of meaning) |
|
|
- Human condition in digital age |
|
|
- Capitalist contradictions and individual choice |
|
|
- New reflections on death, love, freedom |
|
|
|
|
|
2. **Social Message** |
|
|
- Class, gender, generational conflicts |
|
|
- Environmental crisis and human responsibility |
|
|
- Technology vs humanity collision |
|
|
- Modern democracy crisis and individual role |
|
|
|
|
|
3. **Literary Devices** |
|
|
- Central metaphor: [concrete object/phenomenon] โ [abstract meaning] |
|
|
- Recurring motif: [image/action] (minimum 5 variations) |
|
|
- Contrast: sustained tension of [A vs B] |
|
|
- Symbolic space: what [specific place] means |
|
|
- Subjective time flow (flashback, premonition, pause) |
|
|
|
|
|
4. **Integrated 10-Part Structure** |
|
|
Each part's core: |
|
|
- Part 1: Start with opening sentence, daily cracks โ philosophical questions |
|
|
- Part 2-3: External events โ deepening introspection |
|
|
- Part 4-5: Social conflict โ personal dilemma |
|
|
- Part 6-7: Crisis peak โ existential choice |
|
|
- Part 8-9: Choice consequences โ new recognition |
|
|
- Part 10: Changed worldview โ open questions |
|
|
|
|
|
5. **Style Guidelines** |
|
|
- Poetic prose: balance of everyday language and metaphor |
|
|
- Stream of consciousness crossing with objective description |
|
|
- Rhythm of short intense sentences and reflective long ones |
|
|
- Abstract concepts through sensory details |
|
|
|
|
|
Provide concrete, innovative plan.""" |
|
|
} |
|
|
|
|
|
return lang_prompts.get(language, lang_prompts["Korean"]) |
|
|
|
|
|
def create_critic_director_prompt(self, director_plan: str, user_query: str, language: str) -> str: |
|
|
"""Director plan deep review - Enhanced version""" |
|
|
lang_prompts = { |
|
|
"Korean": f"""์์ฌ ๊ตฌ์กฐ ์ ๋ฌธ๊ฐ๋ก์ ์ด ๊ธฐํ์ ์ฌ์ธต ๋ถ์ํ์ธ์. |
|
|
|
|
|
**์ ์ฃผ์ :** {user_query} |
|
|
|
|
|
**๊ฐ๋
์ ๊ธฐํ:** |
|
|
{director_plan} |
|
|
|
|
|
**์ฌ์ธต ๊ฒํ ํญ๋ชฉ:** |
|
|
|
|
|
1. **์ธ๊ณผ๊ด๊ณ ๊ฒ์ฆ** |
|
|
๊ฐ ํํธ ๊ฐ ์ฐ๊ฒฐ์ ๊ฒํ ํ๊ณ ๋
ผ๋ฆฌ์ ๋น์ฝ์ ์ฐพ์ผ์ธ์: |
|
|
- ํํธ 1โ2: [์ฐ๊ฒฐ์ฑ ํ๊ฐ] |
|
|
- ํํธ 2โ3: [์ฐ๊ฒฐ์ฑ ํ๊ฐ] |
|
|
(๋ชจ๋ ์ฐ๊ฒฐ ์ง์ ๊ฒํ ) |
|
|
|
|
|
2. **์ฒ ํ์ ๊น์ด ํ๊ฐ** |
|
|
- ์ ์๋ ์ฒ ํ์ ์ฃผ์ ๊ฐ ์ถฉ๋ถํ ๊น์๊ฐ? |
|
|
- ํ๋์ ๊ด๋ จ์ฑ์ด ์๋๊ฐ? |
|
|
- ๋
์ฐฝ์ ํต์ฐฐ์ด ์๋๊ฐ? |
|
|
|
|
|
3. **๋ฌธํ์ ์ฅ์น์ ํจ๊ณผ์ฑ** |
|
|
- ์์ ์ ์์ง์ด ์ ๊ธฐ์ ์ผ๋ก ์๋ํ๋๊ฐ? |
|
|
- ๊ณผ๋ํ๊ฑฐ๋ ๋ถ์กฑํ์ง ์์๊ฐ? |
|
|
- ์ฃผ์ ์ ๊ธด๋ฐํ ์ฐ๊ฒฐ๋๋๊ฐ? |
|
|
|
|
|
4. **์บ๋ฆญํฐ ์ํฌ ์คํ ๊ฐ๋ฅ์ฑ** |
|
|
- ๋ณํ๊ฐ ์ถฉ๋ถํ ์ ์ง์ ์ธ๊ฐ? |
|
|
- ๊ฐ ๋จ๊ณ์ ๋๊ธฐ๊ฐ ๋ช
ํํ๊ฐ? |
|
|
- ์ฌ๋ฆฌ์ ์ ๋ขฐ์ฑ์ด ์๋๊ฐ? |
|
|
|
|
|
5. **8,000๋จ์ด ์คํ ๊ฐ๋ฅ์ฑ** |
|
|
- ๊ฐ ํํธ๊ฐ 800๋จ์ด๋ฅผ ์ ์งํ ์ ์๋๊ฐ? |
|
|
- ๋์ด์ง๊ฑฐ๋ ์์ถ๋๋ ๋ถ๋ถ์ ์๋๊ฐ? |
|
|
|
|
|
**ํ์ ๊ฐ์ ์ฌํญ์ ๊ตฌ์ฒด์ ์ผ๋ก ์ ์ํ์ธ์.**""", |
|
|
|
|
|
"English": f"""As narrative structure expert, deeply analyze this plan. |
|
|
|
|
|
**Original Theme:** {user_query} |
|
|
|
|
|
**Director's Plan:** |
|
|
{director_plan} |
|
|
|
|
|
**Deep Review Items:** |
|
|
|
|
|
1. **Causality Verification** |
|
|
Review connections between parts, find logical leaps: |
|
|
- Part 1โ2: [Connection assessment] |
|
|
- Part 2โ3: [Connection assessment] |
|
|
(Review all connection points) |
|
|
|
|
|
2. **Philosophical Depth Assessment** |
|
|
- Is philosophical theme deep enough? |
|
|
- Contemporary relevance? |
|
|
- Original insights? |
|
|
|
|
|
3. **Literary Device Effectiveness** |
|
|
- Do metaphors and symbols work organically? |
|
|
- Not excessive or insufficient? |
|
|
- Tightly connected to theme? |
|
|
|
|
|
4. **Character Arc Feasibility** |
|
|
- Is change sufficiently gradual? |
|
|
- Are motivations clear at each stage? |
|
|
- Psychological credibility? |
|
|
|
|
|
5. **8,000-word Feasibility** |
|
|
- Can each part sustain 800 words? |
|
|
- Any dragging or compressed sections? |
|
|
|
|
|
**Provide specific required improvements.**""" |
|
|
} |
|
|
|
|
|
return lang_prompts.get(language, lang_prompts["Korean"]) |
|
|
|
|
|
def create_writer_prompt(self, part_number: int, master_plan: str, |
|
|
accumulated_content: str, story_bible: StoryBible, |
|
|
language: str) -> str: |
|
|
"""Single writer prompt - Enhanced version""" |
|
|
|
|
|
phase_name = NARRATIVE_PHASES[part_number-1] |
|
|
target_words = MIN_WORDS_PER_PART |
|
|
|
|
|
|
|
|
philosophical_focus = { |
|
|
1: "Introduce existential anxiety through daily cracks", |
|
|
2: "First collision between individual and society", |
|
|
3: "Self-recognition through encounter with others", |
|
|
4: "Shaking beliefs and clashing values", |
|
|
5: "Weight of choice and paradox of freedom", |
|
|
6: "Test of humanity in extreme situations", |
|
|
7: "Weight of consequences and responsibility", |
|
|
8: "Self-rediscovery through others' gaze", |
|
|
9: "Reconciliation with the irreconcilable", |
|
|
10: "New life possibilities and unresolved questions" |
|
|
} |
|
|
|
|
|
literary_techniques = { |
|
|
1: "Introducing objective correlative", |
|
|
2: "Contrapuntal narration", |
|
|
3: "Stream of consciousness", |
|
|
4: "Subtle shifts in perspective", |
|
|
5: "Aesthetics of silence and omission", |
|
|
6: "Subjective transformation of time", |
|
|
7: "Intersection of multiple viewpoints", |
|
|
8: "Subversion of metaphor", |
|
|
9: "Reinterpretation of archetypal images", |
|
|
10: "Multi-layered open ending" |
|
|
} |
|
|
|
|
|
|
|
|
bible_summary = f""" |
|
|
**Characters:** {', '.join(story_bible.characters.keys()) if story_bible.characters else 'TBD'} |
|
|
**Key Symbols:** {', '.join(story_bible.symbols.keys()) if story_bible.symbols else 'TBD'} |
|
|
**Themes:** {', '.join(story_bible.themes[:3]) if story_bible.themes else 'TBD'} |
|
|
**Style:** {story_bible.style_guide.get('voice', 'N/A')} |
|
|
""" |
|
|
|
|
|
|
|
|
prev_content = "" |
|
|
if accumulated_content: |
|
|
prev_parts = accumulated_content.split('\n\n') |
|
|
if len(prev_parts) >= 1: |
|
|
prev_content = prev_parts[-1][-2000:] |
|
|
|
|
|
lang_prompts = { |
|
|
"Korean": f"""๋น์ ์ ํ๋ ๋ฌธํ์ ์ต์ ์ ์ ์ ์๊ฐ์
๋๋ค. |
|
|
**ํ์ฌ: ํํธ {part_number} - {phase_name}** |
|
|
|
|
|
{"**ํ์ ์ฒซ๋ฌธ์ฅ:** " + story_bible.opening_sentence if part_number == 1 and story_bible.opening_sentence else ""} |
|
|
|
|
|
**์ด๋ฒ ํํธ์ ์ฒ ํ์ ์ด์ :** {philosophical_focus[part_number]} |
|
|
**ํต์ฌ ๋ฌธํ ๊ธฐ๋ฒ:** {literary_techniques[part_number]} |
|
|
|
|
|
**์ ์ฒด ๊ณํ:** |
|
|
{master_plan} |
|
|
|
|
|
**์คํ ๋ฆฌ ๋ฐ์ด๋ธ:** |
|
|
{bible_summary} |
|
|
|
|
|
**์ง์ ๋ด์ฉ:** |
|
|
{prev_content if prev_content else "์ฒซ ํํธ์
๋๋ค"} |
|
|
|
|
|
**ํํธ {part_number} ์์ฑ ์ง์นจ:** |
|
|
|
|
|
1. **๋ถ๋:** {target_words}-900 ๋จ์ด (ํ์) |
|
|
|
|
|
2. **๋ฌธํ์ ์์ฌ ์๊ตฌ์ฌํญ:** |
|
|
- ์ต์ 3๊ฐ์ ๋
์ฐฝ์ ์์ /์ง์ |
|
|
- 1๊ฐ ์ด์์ ์์ง์ ์ด๋ฏธ์ง ์ฌํ |
|
|
- ๊ฐ๊ฐ์ ๋ฌ์ฌ์ ์ถ์์ ์ฌ์ ์ ์ตํฉ |
|
|
- ๋ฆฌ๋ฌ๊ฐ ์๋ ๋ฌธ์ฅ ๊ตฌ์ฑ (์ฅ๋จ์ ๋ณ์ฃผ) |
|
|
|
|
|
3. **ํ๋์ ๊ณ ๋ ํํ:** |
|
|
- ๋์งํธ ์๋์ ์์ธ๊ฐ |
|
|
- ์๋ณธ์ฃผ์์ ์ถ์ ๋ถ์กฐ๋ฆฌ |
|
|
- ๊ด๊ณ์ ํ๋ฉด์ฑ๊ณผ ์ง์ ์ฑ ๊ฐ๋ง |
|
|
- ์๋ฏธ ์ถ๊ตฌ์ ๋ฌด์๋ฏธ์ ์ง๋ฉด |
|
|
|
|
|
4. **์ฌํ์ ๋ฉ์์ง ๋ด์ฌํ:** |
|
|
- ์ง์ ์ ์ฃผ์ฅ์ด ์๋ ์ํฉ๊ณผ ์ธ๋ฌผ์ ํตํ ์์ |
|
|
- ๊ฐ์ธ์ ๊ณ ํต๊ณผ ์ฌํ ๊ตฌ์กฐ์ ์ฐ๊ฒฐ |
|
|
- ๋ฏธ์์ ์ผ์๊ณผ ๊ฑฐ์์ ๋ฌธ์ ์ ๊ต์ฐจ |
|
|
|
|
|
5. **์์ฌ์ ์ถ์ง๋ ฅ:** |
|
|
- ์ด์ ํํธ์ ํ์ฐ์ ๊ฒฐ๊ณผ๋ก ์์ |
|
|
- ์๋ก์ด ๊ฐ๋ฑ ์ธต์ ์ถ๊ฐ |
|
|
- ๋ค์ ํํธ๋ฅผ ํฅํ ๊ธด์ฅ๊ฐ ์กฐ์ฑ |
|
|
|
|
|
**๋ฌธํ์ ๊ธ๊ธฐ:** |
|
|
- ์ง๋ถํ ํํ์ด๋ ์ํฌ์ ์์ |
|
|
- ๊ฐ์ ์ ์ง์ ์ ์ค๋ช
|
|
|
- ๋๋์ ํ๋จ์ด๋ ๊ตํ |
|
|
- ์ธ์์ ์ธ ํด๊ฒฐ์ด๋ ์์ |
|
|
|
|
|
ํํธ {part_number}๋ฅผ ๊น์ด ์๋ ๋ฌธํ์ ์ฑ์ทจ๋ก ๋ง๋์ธ์.""", |
|
|
|
|
|
"English": f"""You are a writer at the forefront of contemporary literature. |
|
|
**Current: Part {part_number} - {phase_name}** |
|
|
|
|
|
{"**Required Opening:** " + story_bible.opening_sentence if part_number == 1 and story_bible.opening_sentence else ""} |
|
|
|
|
|
**Philosophical Focus:** {philosophical_focus[part_number]} |
|
|
**Core Literary Technique:** {literary_techniques[part_number]} |
|
|
|
|
|
**Master Plan:** |
|
|
{master_plan} |
|
|
|
|
|
**Story Bible:** |
|
|
{bible_summary} |
|
|
|
|
|
**Previous Content:** |
|
|
{prev_content if prev_content else "This is the first part"} |
|
|
|
|
|
**Part {part_number} Guidelines:** |
|
|
|
|
|
1. **Length:** {target_words}-900 words (mandatory) |
|
|
|
|
|
2. **Literary Device Requirements:** |
|
|
- Minimum 3 original metaphors/similes |
|
|
- Deepen at least 1 symbolic image |
|
|
- Fusion of sensory description and abstract thought |
|
|
- Rhythmic sentence composition (variation of long/short) |
|
|
|
|
|
3. **Modern Anguish Expression:** |
|
|
- Digital age alienation |
|
|
- Absurdity of capitalist life |
|
|
- Surface relationships vs authenticity yearning |
|
|
- Meaning pursuit vs confronting meaninglessness |
|
|
|
|
|
4. **Social Message Internalization:** |
|
|
- Implication through situation and character, not direct claim |
|
|
- Connection between individual pain and social structure |
|
|
- Intersection of micro daily life and macro problems |
|
|
|
|
|
5. **Narrative Momentum:** |
|
|
- Start as inevitable result of previous part |
|
|
- Add new conflict layers |
|
|
- Create tension toward next part |
|
|
|
|
|
**Literary Taboos:** |
|
|
- Clichรฉd expressions or trite metaphors |
|
|
- Direct emotion explanation |
|
|
- Moral judgment or preaching |
|
|
- Artificial resolution or comfort |
|
|
|
|
|
Make Part {part_number} a profound literary achievement.""" |
|
|
} |
|
|
|
|
|
return lang_prompts.get(language, lang_prompts["Korean"]) |
|
|
|
|
|
def create_part_critic_prompt(self, part_number: int, part_content: str, |
|
|
master_plan: str, accumulated_content: str, |
|
|
story_bible: StoryBible, language: str) -> str: |
|
|
"""Part-by-part immediate critique - Enhanced version""" |
|
|
|
|
|
lang_prompts = { |
|
|
"Korean": f"""ํํธ {part_number}์ ๋ฌธํ์ ์ฑ์ทจ๋๋ฅผ ์๊ฒฉํ ํ๊ฐํ์ธ์. |
|
|
|
|
|
**๋ง์คํฐํ๋ ํํธ {part_number} ์๊ตฌ์ฌํญ:** |
|
|
{self._extract_part_plan(master_plan, part_number)} |
|
|
|
|
|
**์์ฑ๋ ๋ด์ฉ:** |
|
|
{part_content} |
|
|
|
|
|
**์คํ ๋ฆฌ ๋ฐ์ด๋ธ ์ฒดํฌ:** |
|
|
- ์บ๋ฆญํฐ: {', '.join(story_bible.characters.keys())} |
|
|
- ์ค์ : {', '.join(story_bible.settings.keys())} |
|
|
|
|
|
**ํ๊ฐ ๊ธฐ์ค:** |
|
|
|
|
|
1. **๋ฌธํ์ ์์ฌ (30%)** |
|
|
- ์์ ์ ์์ง์ ๋
์ฐฝ์ฑ |
|
|
- ์ธ์ด์ ์์ ๋ฐ๋ |
|
|
- ์ด๋ฏธ์ง์ ์ ๋ช
๋์ ๊น์ด |
|
|
- ๋ฌธ์ฅ์ ๋ฆฌ๋ฌ๊ณผ ์์
์ฑ |
|
|
|
|
|
2. **์ฒ ํ์ ๊น์ด (25%)** |
|
|
- ์ค์กด์ ์ง๋ฌธ์ ์ ๊ธฐ |
|
|
- ํ๋์ธ์ ์กฐ๊ฑด ํ๊ตฌ |
|
|
- ๋ณดํธ์ฑ๊ณผ ํน์์ฑ์ ๊ท ํ |
|
|
- ์ฌ์ ์ ๋
์ฐฝ์ฑ |
|
|
|
|
|
3. **์ฌํ์ ํต์ฐฐ (20%)** |
|
|
- ์๋์ ์ ์ ํฌ์ฐฉ |
|
|
- ๊ตฌ์กฐ์ ๊ฐ์ธ์ ๊ด๊ณ |
|
|
- ๋นํ์ ์๊ฐ์ ์๋ฆฌํจ |
|
|
- ๋์์ ์์๋ ฅ |
|
|
|
|
|
4. **์์ฌ์ ์์ฑ๋ (25%)** |
|
|
- ์ธ๊ณผ๊ด๊ณ์ ํ์ฐ์ฑ |
|
|
- ๊ธด์ฅ๊ฐ์ ์ ์ง |
|
|
- ์ธ๋ฌผ์ ์
์ฒด์ฑ |
|
|
- ๊ตฌ์กฐ์ ํต์ผ์ฑ |
|
|
|
|
|
**๊ตฌ์ฒด์ ์ง์ ์ฌํญ:** |
|
|
- ์ง๋ถํ ํํ: [์์์ ๋์] |
|
|
- ์ฒ ํ์ ์ฒ์ฐฉ ๋ถ์กฑ: [๋ณด์ ๋ฐฉํฅ] |
|
|
- ์ฌํ์ ๋ฉ์์ง ๋ถ๋ช
ํ: [๊ฐํ ๋ฐฉ์] |
|
|
- ์์ฌ์ ํ์ : [์์ ํ์] |
|
|
|
|
|
**ํ์ ๊ฐ์ ์๊ตฌ:** |
|
|
๋ฌธํ์ ์์ค์ ๋
ธ๋ฒจ์ ๊ธ์ผ๋ก ๋์ด์ฌ๋ฆฌ๊ธฐ ์ํ ๊ตฌ์ฒด์ ์์ ์์ ์ ์ํ์ธ์.""", |
|
|
|
|
|
"English": f"""Strictly evaluate literary achievement of Part {part_number}. |
|
|
|
|
|
**Master Plan Part {part_number} Requirements:** |
|
|
{self._extract_part_plan(master_plan, part_number)} |
|
|
|
|
|
**Written Content:** |
|
|
{part_content} |
|
|
|
|
|
**Story Bible Check:** |
|
|
- Characters: {', '.join(story_bible.characters.keys()) if story_bible.characters else 'None yet'} |
|
|
- Settings: {', '.join(story_bible.settings.keys()) if story_bible.settings else 'None yet'} |
|
|
|
|
|
**Evaluation Criteria:** |
|
|
|
|
|
1. **Literary Rhetoric (30%)** |
|
|
- Originality of metaphor and symbol |
|
|
- Poetic density of language |
|
|
- Clarity and depth of imagery |
|
|
- Rhythm and musicality of sentences |
|
|
|
|
|
2. **Philosophical Depth (25%)** |
|
|
- Raising existential questions |
|
|
- Exploring modern human condition |
|
|
- Balance of universality and specificity |
|
|
- Originality of thought |
|
|
|
|
|
3. **Social Insight (20%)** |
|
|
- Capturing zeitgeist |
|
|
- Relationship between structure and individual |
|
|
- Sharpness of critical perspective |
|
|
- Alternative imagination |
|
|
|
|
|
4. **Narrative Completion (25%)** |
|
|
- Inevitability of causality |
|
|
- Maintaining tension |
|
|
- Character dimensionality |
|
|
- Structural unity |
|
|
|
|
|
**Specific Points:** |
|
|
- Clichรฉd expressions: [examples and alternatives] |
|
|
- Insufficient philosophical exploration: [enhancement direction] |
|
|
- Unclear social message: [strengthening methods] |
|
|
- Narrative gaps: [needed revisions] |
|
|
|
|
|
**Required Improvements:** |
|
|
Provide specific revisions to elevate literary level to Nobel Prize standard.""" |
|
|
} |
|
|
|
|
|
return lang_prompts.get(language, lang_prompts["Korean"]) |
|
|
|
|
|
def create_writer_revision_prompt(self, part_number: int, original_content: str, |
|
|
critic_feedback: str, language: str) -> str: |
|
|
"""Writer revision prompt""" |
|
|
|
|
|
lang_prompts = { |
|
|
"Korean": f"""ํํธ {part_number}๋ฅผ ๋นํ์ ๋ฐ๋ผ ์์ ํ์ธ์. |
|
|
|
|
|
**์๋ณธ:** |
|
|
{original_content} |
|
|
|
|
|
**๋นํ ํผ๋๋ฐฑ:** |
|
|
{critic_feedback} |
|
|
|
|
|
**์์ ์ง์นจ:** |
|
|
1. ๋ชจ๋ 'ํ์ ์์ ' ์ฌํญ์ ๋ฐ์ |
|
|
2. ๊ฐ๋ฅํ '๊ถ์ฅ ๊ฐ์ ' ์ฌํญ๋ ํฌํจ |
|
|
3. ์๋ณธ์ ๊ฐ์ ์ ์ ์ง |
|
|
4. ๋ถ๋ {MIN_WORDS_PER_PART}๋จ์ด ์ด์ ์ ์ง |
|
|
5. ์๊ฐ๋ก์์ ์ผ๊ด๋ ๋ชฉ์๋ฆฌ ์ ์ง |
|
|
6. ๋ฌธํ์ ์์ค์ ํ ๋จ๊ณ ๋์ด๊ธฐ |
|
|
|
|
|
์์ ๋ณธ๋ง ์ ์ํ์ธ์. ์ค๋ช
์ ๋ถํ์ํฉ๋๋ค.""", |
|
|
|
|
|
"English": f"""Revise Part {part_number} according to critique. |
|
|
|
|
|
**Original:** |
|
|
{original_content} |
|
|
|
|
|
**Critique Feedback:** |
|
|
{critic_feedback} |
|
|
|
|
|
**Revision Guidelines:** |
|
|
1. Reflect all 'Required fixes' |
|
|
2. Include 'Recommended improvements' where possible |
|
|
3. Maintain original strengths |
|
|
4. Keep length {MIN_WORDS_PER_PART}+ words |
|
|
5. Maintain consistent authorial voice |
|
|
6. Elevate literary level |
|
|
|
|
|
Present only the revision. No explanation needed.""" |
|
|
} |
|
|
|
|
|
return lang_prompts.get(language, lang_prompts["Korean"]) |
|
|
|
|
|
def create_final_critic_prompt(self, complete_novel: str, word_count: int, |
|
|
story_bible: StoryBible, language: str) -> str: |
|
|
"""Final comprehensive evaluation""" |
|
|
|
|
|
lang_prompts = { |
|
|
"Korean": f"""์์ฑ๋ ์์ค์ ์ข
ํฉ ํ๊ฐํ์ธ์. |
|
|
|
|
|
**์ํ ์ ๋ณด:** |
|
|
- ์ด ๋ถ๋: {word_count}๋จ์ด |
|
|
- ๋ชฉํ: 8,000๋จ์ด |
|
|
|
|
|
**ํ๊ฐ ๊ธฐ์ค:** |
|
|
|
|
|
1. **์์ฌ์ ํตํฉ์ฑ (30์ )** |
|
|
- 10๊ฐ ํํธ๊ฐ ํ๋์ ์ด์ผ๊ธฐ๋ก ํตํฉ๋์๋๊ฐ? |
|
|
- ์ธ๊ณผ๊ด๊ณ๊ฐ ๋ช
ํํ๊ณ ํ์ฐ์ ์ธ๊ฐ? |
|
|
- ๋ฐ๋ณต์ด๋ ์ํ ์์ด ์งํ๋๋๊ฐ? |
|
|
|
|
|
2. **์บ๋ฆญํฐ ์ํฌ (25์ )** |
|
|
- ์ฃผ์ธ๊ณต์ ๋ณํ๊ฐ ์ค๋๋ ฅ ์๋๊ฐ? |
|
|
- ๋ณํ๊ฐ ์ ์ง์ ์ด๊ณ ์์ฐ์ค๋ฌ์ด๊ฐ? |
|
|
- ์ต์ข
์ํ๊ฐ ์ด๊ธฐ์ ๋ช
ํํ ๋ค๋ฅธ๊ฐ? |
|
|
|
|
|
3. **๋ฌธํ์ ์ฑ์ทจ (25์ )** |
|
|
- ์ฃผ์ ๊ฐ ๊น์ด ์๊ฒ ํ๊ตฌ๋์๋๊ฐ? |
|
|
- ์์ง์ด ํจ๊ณผ์ ์ผ๋ก ํ์ฉ๋์๋๊ฐ? |
|
|
- ๋ฌธ์ฒด๊ฐ ์ผ๊ด๋๊ณ ์๋ฆ๋ค์ด๊ฐ? |
|
|
- ํ๋์ ์ฒ ํ๊ณผ ์ฌํ์ ๋ฉ์์ง๊ฐ ๋
น์์๋๊ฐ? |
|
|
|
|
|
4. **๊ธฐ์ ์ ์์ฑ๋ (20์ )** |
|
|
- ๋ชฉํ ๋ถ๋์ ๋ฌ์ฑํ๋๊ฐ? |
|
|
- ๊ฐ ํํธ๊ฐ ๊ท ํ ์๊ฒ ์ ๊ฐ๋์๋๊ฐ? |
|
|
- ๋ฌธ๋ฒ๊ณผ ํํ์ด ์ ํํ๊ฐ? |
|
|
|
|
|
**์ด์ : /100์ ** |
|
|
|
|
|
๊ตฌ์ฒด์ ์ธ ๊ฐ์ ๊ณผ ์ฝ์ ์ ์ ์ํ์ธ์.""", |
|
|
|
|
|
"English": f"""Comprehensively evaluate the completed novel. |
|
|
|
|
|
**Work Info:** |
|
|
- Total length: {word_count} words |
|
|
- Target: 8,000 words |
|
|
|
|
|
**Evaluation Criteria:** |
|
|
|
|
|
1. **Narrative Integration (30 points)** |
|
|
- Are 10 parts integrated into one story? |
|
|
- Clear and inevitable causality? |
|
|
- Progress without repetition or cycles? |
|
|
|
|
|
2. **Character Arc (25 points)** |
|
|
- Convincing protagonist transformation? |
|
|
- Gradual and natural changes? |
|
|
- Final state clearly different from initial? |
|
|
|
|
|
3. **Literary Achievement (25 points)** |
|
|
- Theme explored with depth? |
|
|
- Symbols used effectively? |
|
|
- Consistent and beautiful style? |
|
|
- Contemporary philosophy and social message integrated? |
|
|
|
|
|
4. **Technical Completion (20 points)** |
|
|
- Target length achieved? |
|
|
- Each part balanced in development? |
|
|
- Grammar and expression accurate? |
|
|
|
|
|
**Total Score: /100 points** |
|
|
|
|
|
Present specific strengths and weaknesses.""" |
|
|
} |
|
|
|
|
|
return lang_prompts.get(language, lang_prompts["Korean"]) |
|
|
|
|
|
def _extract_part_plan(self, master_plan: str, part_number: int) -> str: |
|
|
"""Extract specific part plan from master plan""" |
|
|
lines = master_plan.split('\n') |
|
|
part_section = [] |
|
|
capturing = False |
|
|
|
|
|
for line in lines: |
|
|
if f"Part {part_number}:" in line or f"ํํธ {part_number}:" in line: |
|
|
capturing = True |
|
|
elif capturing and (f"Part {part_number+1}:" in line or f"ํํธ {part_number+1}:" in line): |
|
|
break |
|
|
elif capturing: |
|
|
part_section.append(line) |
|
|
|
|
|
return '\n'.join(part_section) if part_section else "Cannot find the part plan." |
|
|
|
|
|
|
|
|
def call_llm_sync(self, messages: List[Dict[str, str]], role: str, language: str) -> str: |
|
|
full_content = "" |
|
|
for chunk in self.call_llm_streaming(messages, role, language): |
|
|
full_content += chunk |
|
|
if full_content.startswith("โ"): |
|
|
raise Exception(f"LLM Call Failed: {full_content}") |
|
|
return full_content |
|
|
|
|
|
def call_llm_streaming(self, messages: List[Dict[str, str]], role: str, |
|
|
language: str) -> Generator[str, None, None]: |
|
|
try: |
|
|
system_prompts = self.get_system_prompts(language) |
|
|
full_messages = [{"role": "system", "content": system_prompts.get(role, "")}, *messages] |
|
|
|
|
|
max_tokens = 15000 if role == "writer" else 10000 |
|
|
|
|
|
payload = { |
|
|
"model": self.model_id, |
|
|
"messages": full_messages, |
|
|
"max_tokens": max_tokens, |
|
|
"temperature": 0.8, |
|
|
"top_p": 0.95, |
|
|
"presence_penalty": 0.5, |
|
|
"frequency_penalty": 0.3, |
|
|
"stream": True |
|
|
} |
|
|
|
|
|
response = requests.post( |
|
|
self.api_url, |
|
|
headers=self.create_headers(), |
|
|
json=payload, |
|
|
stream=True, |
|
|
timeout=180 |
|
|
) |
|
|
|
|
|
if response.status_code != 200: |
|
|
yield f"โ API Error (Status Code: {response.status_code})" |
|
|
return |
|
|
|
|
|
buffer = "" |
|
|
for line in response.iter_lines(): |
|
|
if not line: |
|
|
continue |
|
|
|
|
|
try: |
|
|
line_str = line.decode('utf-8').strip() |
|
|
if not line_str.startswith("data: "): |
|
|
continue |
|
|
|
|
|
data_str = line_str[6:] |
|
|
if data_str == "[DONE]": |
|
|
break |
|
|
|
|
|
data = json.loads(data_str) |
|
|
choices = data.get("choices", []) |
|
|
if choices and choices[0].get("delta", {}).get("content"): |
|
|
content = choices[0]["delta"]["content"] |
|
|
buffer += content |
|
|
|
|
|
if len(buffer) >= 50 or '\n' in buffer: |
|
|
yield buffer |
|
|
buffer = "" |
|
|
time.sleep(0.01) |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"Chunk processing error: {str(e)}") |
|
|
continue |
|
|
|
|
|
if buffer: |
|
|
yield buffer |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"Streaming error: {type(e).__name__}: {str(e)}") |
|
|
yield f"โ Error occurred: {str(e)}" |
|
|
|
|
|
def get_system_prompts(self, language: str) -> Dict[str, str]: |
|
|
"""Role-specific system prompts - Enhanced version""" |
|
|
|
|
|
base_prompts = { |
|
|
"Korean": { |
|
|
"director": """๋น์ ์ ํ๋ ์ธ๊ณ๋ฌธํ์ ์ ์ ์ ์งํฅํ๋ ์ํ์ ์ค๊ณํฉ๋๋ค. |
|
|
๊น์ ์ฒ ํ์ ํต์ฐฐ๊ณผ ๋ ์นด๋ก์ด ์ฌํ ๋นํ์ ๊ฒฐํฉํ์ธ์. |
|
|
์ธ๊ฐ ์กฐ๊ฑด์ ๋ณต์ก์ฑ์ 10๊ฐ์ ์ ๊ธฐ์ ํํธ๋ก ๊ตฌํํ์ธ์. |
|
|
๋
์์ ์ํผ์ ๋คํ๋ค ๊ฐ๋ ฌํ ์ฒซ๋ฌธ์ฅ๋ถํฐ ์์ํ์ธ์.""", |
|
|
|
|
|
"critic_director": """์์ฌ ๊ตฌ์กฐ์ ๋
ผ๋ฆฌ์ฑ๊ณผ ์คํ ๊ฐ๋ฅ์ฑ์ ๊ฒ์ฆํ๋ ์ ๋ฌธ๊ฐ์
๋๋ค. |
|
|
์ธ๊ณผ๊ด๊ณ์ ํ์ ์ ์ฐพ์๋ด์ธ์. |
|
|
์บ๋ฆญํฐ ๋ฐ์ ์ ์ ๋น์ฑ์ ํ๊ฐํ์ธ์. |
|
|
์ฒ ํ์ ๊น์ด์ ๋ฌธํ์ ๊ฐ์น๋ฅผ ํ๋จํ์ธ์. |
|
|
8,000๋จ์ด ๋ถ๋์ ์ ์ ์ฑ์ ํ๋จํ์ธ์.""", |
|
|
|
|
|
"writer": """๋น์ ์ ์ธ์ด์ ์ฐ๊ธ์ ์ฌ์
๋๋ค. |
|
|
์ผ์์ด๋ฅผ ์๋ก, ๊ตฌ์ฒด๋ฅผ ์ถ์์ผ๋ก, ๊ฐ์ธ์ ๋ณดํธ์ผ๋ก ๋ณํํ์ธ์. |
|
|
ํ๋์ธ์ ์ํผ์ ์ด๋ ๊ณผ ๋น์ ๋์์ ํฌ์ฐฉํ์ธ์. |
|
|
๋
์๊ฐ ์์ ์ ์ฌ๋ฐ๊ฒฌํ๊ฒ ๋ง๋๋ ๊ฑฐ์ธ์ด ๋์ธ์.""", |
|
|
|
|
|
"critic_final": """๋น์ ์ ์ํ์ ๋ฌธํ์ ์ ์ฌ๋ ฅ์ ๊ทน๋ํํ๋ ์กฐ๋ ฅ์์
๋๋ค. |
|
|
ํ๋ฒํจ์ ๋น๋ฒํจ์ผ๋ก ์ด๋๋ ๋ ์นด๋ก์ด ํต์ฐฐ์ ์ ๊ณตํ์ธ์. |
|
|
์๊ฐ์ ๋ฌด์์์ ์ ๋ ๋ณด์์ ๋ฐ๊ตดํ์ธ์. |
|
|
ํํ ์๋ ๊ธฐ์ค์ผ๋ก ์ต๊ณ ๋ฅผ ์๊ตฌํ์ธ์.""" |
|
|
}, |
|
|
"English": { |
|
|
"director": """You design works aiming for the pinnacle of contemporary world literature. |
|
|
Combine deep philosophical insights with sharp social criticism. |
|
|
Implement the complexity of the human condition in 10 organic parts. |
|
|
Start with an intense opening sentence that shakes the reader's soul.""", |
|
|
|
|
|
"critic_director": """You are an expert verifying narrative logic and feasibility. |
|
|
Find gaps in causality. |
|
|
Evaluate credibility of character development. |
|
|
Judge philosophical depth and literary value. |
|
|
Judge appropriateness of 8,000-word length.""", |
|
|
|
|
|
"writer": """You are an alchemist of language. |
|
|
Transform everyday language into poetry, concrete into abstract, individual into universal. |
|
|
Capture both darkness and light of the modern soul. |
|
|
Become a mirror where readers rediscover themselves.""", |
|
|
|
|
|
"critic_final": """You are a collaborator maximizing the work's literary potential. |
|
|
Provide sharp insights leading ordinariness to extraordinariness. |
|
|
Excavate gems sleeping in the writer's unconscious. |
|
|
Demand the best with uncompromising standards.""" |
|
|
} |
|
|
} |
|
|
|
|
|
prompts = base_prompts.get(language, base_prompts["Korean"]).copy() |
|
|
|
|
|
|
|
|
for i in range(1, 11): |
|
|
prompts[f"critic_part{i}"] = f"""You are Part {i} dedicated critic. |
|
|
Review causality with previous parts as top priority. |
|
|
Verify character consistency and development. |
|
|
Evaluate alignment with master plan. |
|
|
Assess literary level and philosophical depth. |
|
|
Provide specific and actionable revision instructions.""" |
|
|
|
|
|
return prompts |
|
|
|
|
|
|
|
|
def process_novel_stream(self, query: str, language: str, |
|
|
session_id: Optional[str] = None) -> Generator[Tuple[str, List[Dict[str, Any]], str], None, None]: |
|
|
"""Single writer novel generation process""" |
|
|
try: |
|
|
resume_from_stage = 0 |
|
|
if session_id: |
|
|
self.current_session_id = session_id |
|
|
session = NovelDatabase.get_session(session_id) |
|
|
if session: |
|
|
query = session['user_query'] |
|
|
language = session['language'] |
|
|
resume_from_stage = session['current_stage'] + 1 |
|
|
saved_tracker = NovelDatabase.load_narrative_tracker(session_id) |
|
|
if saved_tracker: |
|
|
self.narrative_tracker = saved_tracker |
|
|
else: |
|
|
self.current_session_id = NovelDatabase.create_session(query, language) |
|
|
logger.info(f"Created new session: {self.current_session_id}") |
|
|
|
|
|
stages = [] |
|
|
if resume_from_stage > 0: |
|
|
stages = [{ |
|
|
"name": s['stage_name'], |
|
|
"status": s['status'], |
|
|
"content": s.get('content', ''), |
|
|
"word_count": s.get('word_count', 0), |
|
|
"momentum": s.get('narrative_momentum', 0.0) |
|
|
} for s in NovelDatabase.get_stages(self.current_session_id)] |
|
|
|
|
|
total_words = NovelDatabase.get_total_words(self.current_session_id) |
|
|
|
|
|
for stage_idx in range(resume_from_stage, len(UNIFIED_STAGES)): |
|
|
role, stage_name = UNIFIED_STAGES[stage_idx] |
|
|
if stage_idx >= len(stages): |
|
|
stages.append({ |
|
|
"name": stage_name, |
|
|
"status": "active", |
|
|
"content": "", |
|
|
"word_count": 0, |
|
|
"momentum": 0.0 |
|
|
}) |
|
|
else: |
|
|
stages[stage_idx]["status"] = "active" |
|
|
|
|
|
yield f"๐ Processing... (Current {total_words:,} words)", stages, self.current_session_id |
|
|
|
|
|
prompt = self.get_stage_prompt(stage_idx, role, query, language, stages) |
|
|
stage_content = "" |
|
|
|
|
|
for chunk in self.call_llm_streaming([{"role": "user", "content": prompt}], role, language): |
|
|
stage_content += chunk |
|
|
stages[stage_idx]["content"] = stage_content |
|
|
stages[stage_idx]["word_count"] = len(stage_content.split()) |
|
|
yield f"๐ {stage_name} writing... ({total_words + stages[stage_idx]['word_count']:,} words)", stages, self.current_session_id |
|
|
|
|
|
|
|
|
if role == "writer": |
|
|
|
|
|
part_num = self._get_part_number(stage_idx) |
|
|
if part_num: |
|
|
self.narrative_tracker.accumulated_content.append(stage_content) |
|
|
self.narrative_tracker.word_count_by_part[part_num] = len(stage_content.split()) |
|
|
|
|
|
|
|
|
momentum = self.narrative_tracker.calculate_narrative_momentum(part_num, stage_content) |
|
|
stages[stage_idx]["momentum"] = momentum |
|
|
|
|
|
|
|
|
self._update_story_bible_from_content(stage_content, part_num) |
|
|
|
|
|
stages[stage_idx]["status"] = "complete" |
|
|
NovelDatabase.save_stage( |
|
|
self.current_session_id, stage_idx, stage_name, role, |
|
|
stage_content, "complete", stages[stage_idx].get("momentum", 0.0) |
|
|
) |
|
|
|
|
|
NovelDatabase.save_narrative_tracker(self.current_session_id, self.narrative_tracker) |
|
|
total_words = NovelDatabase.get_total_words(self.current_session_id) |
|
|
yield f"โ
{stage_name} completed (Total {total_words:,} words)", stages, self.current_session_id |
|
|
|
|
|
|
|
|
final_novel = NovelDatabase.get_writer_content(self.current_session_id) |
|
|
final_word_count = len(final_novel.split()) |
|
|
final_report = self.generate_literary_report(final_novel, final_word_count, language) |
|
|
|
|
|
NovelDatabase.update_final_novel(self.current_session_id, final_novel, final_report) |
|
|
yield f"โ
Novel completed! Total {final_word_count:,} words", stages, self.current_session_id |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"Novel generation process error: {e}", exc_info=True) |
|
|
yield f"โ Error occurred: {e}", stages if 'stages' in locals() else [], self.current_session_id |
|
|
|
|
|
def get_stage_prompt(self, stage_idx: int, role: str, query: str, |
|
|
language: str, stages: List[Dict]) -> str: |
|
|
"""Generate stage-specific prompt""" |
|
|
if stage_idx == 0: |
|
|
return self.create_director_initial_prompt(query, language) |
|
|
|
|
|
if stage_idx == 1: |
|
|
return self.create_critic_director_prompt(stages[0]["content"], query, language) |
|
|
|
|
|
if stage_idx == 2: |
|
|
return self.create_director_final_prompt(stages[0]["content"], stages[1]["content"], query, language) |
|
|
|
|
|
master_plan = stages[2]["content"] |
|
|
|
|
|
|
|
|
if role == "writer" and "Revision" not in stages[stage_idx]["name"]: |
|
|
part_num = self._get_part_number(stage_idx) |
|
|
accumulated = '\n\n'.join(self.narrative_tracker.accumulated_content) |
|
|
return self.create_writer_prompt(part_num, master_plan, accumulated, |
|
|
self.narrative_tracker.story_bible, language) |
|
|
|
|
|
|
|
|
if role.startswith("critic_part"): |
|
|
part_num = int(role.replace("critic_part", "")) |
|
|
|
|
|
writer_content = stages[stage_idx-1]["content"] |
|
|
accumulated = '\n\n'.join(self.narrative_tracker.accumulated_content[:-1]) |
|
|
return self.create_part_critic_prompt(part_num, writer_content, master_plan, |
|
|
accumulated, self.narrative_tracker.story_bible, language) |
|
|
|
|
|
|
|
|
if role == "writer" and "Revision" in stages[stage_idx]["name"]: |
|
|
part_num = self._get_part_number(stage_idx) |
|
|
original_content = stages[stage_idx-2]["content"] |
|
|
critic_feedback = stages[stage_idx-1]["content"] |
|
|
return self.create_writer_revision_prompt(part_num, original_content, |
|
|
critic_feedback, language) |
|
|
|
|
|
|
|
|
if role == "critic_final": |
|
|
complete_novel = NovelDatabase.get_writer_content(self.current_session_id) |
|
|
word_count = len(complete_novel.split()) |
|
|
return self.create_final_critic_prompt(complete_novel, word_count, |
|
|
self.narrative_tracker.story_bible, language) |
|
|
|
|
|
return "" |
|
|
|
|
|
def create_director_final_prompt(self, initial_plan: str, critic_feedback: str, |
|
|
user_query: str, language: str) -> str: |
|
|
"""Director final master plan""" |
|
|
return f"""Reflect the critique and complete the final master plan. |
|
|
|
|
|
**Original Theme:** {user_query} |
|
|
|
|
|
**Initial Plan:** |
|
|
{initial_plan} |
|
|
|
|
|
**Critique Feedback:** |
|
|
{critic_feedback} |
|
|
|
|
|
**Final Master Plan Requirements:** |
|
|
1. Reflect all critique points |
|
|
2. Specific content and causality for 10 parts |
|
|
3. Clear transformation stages of protagonist |
|
|
4. Meaning evolution process of central symbol |
|
|
5. Feasibility of 800 words per part |
|
|
6. Implementation of philosophical depth and social message |
|
|
|
|
|
Present concrete and executable final plan.""" |
|
|
|
|
|
def _get_part_number(self, stage_idx: int) -> Optional[int]: |
|
|
"""Extract part number from stage index""" |
|
|
stage_name = UNIFIED_STAGES[stage_idx][1] |
|
|
match = re.search(r'Part (\d+)', stage_name) |
|
|
if match: |
|
|
return int(match.group(1)) |
|
|
return None |
|
|
|
|
|
def _update_story_bible_from_content(self, content: str, part_num: int): |
|
|
"""Auto-update story bible from content""" |
|
|
|
|
|
lines = content.split('\n') |
|
|
|
|
|
|
|
|
for line in lines: |
|
|
words = line.split() |
|
|
for word in words: |
|
|
if word and word[0].isupper() and len(word) > 1: |
|
|
if word not in self.narrative_tracker.story_bible.characters: |
|
|
self.narrative_tracker.story_bible.characters[word] = { |
|
|
"first_appearance": part_num, |
|
|
"traits": [] |
|
|
} |
|
|
|
|
|
def generate_literary_report(self, complete_novel: str, word_count: int, language: str) -> str: |
|
|
"""Generate final literary evaluation report""" |
|
|
prompt = self.create_final_critic_prompt(complete_novel, word_count, |
|
|
self.narrative_tracker.story_bible, language) |
|
|
try: |
|
|
report = self.call_llm_sync([{"role": "user", "content": prompt}], |
|
|
"critic_final", language) |
|
|
return report |
|
|
except Exception as e: |
|
|
logger.error(f"Final report generation failed: {e}") |
|
|
return "Error occurred during report generation" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def process_query(query: str, language: str, session_id: Optional[str] = None) -> Generator[Tuple[str, str, str, str], None, None]: |
|
|
"""Main query processing function""" |
|
|
if not query.strip(): |
|
|
yield "", "", "โ Please enter a theme.", session_id |
|
|
return |
|
|
|
|
|
system = UnifiedLiterarySystem() |
|
|
stages_markdown = "" |
|
|
novel_content = "" |
|
|
|
|
|
for status, stages, current_session_id in system.process_novel_stream(query, language, session_id): |
|
|
stages_markdown = format_stages_display(stages) |
|
|
|
|
|
|
|
|
if stages and all(s.get("status") == "complete" for s in stages[-10:]): |
|
|
novel_content = NovelDatabase.get_writer_content(current_session_id) |
|
|
novel_content = format_novel_display(novel_content) |
|
|
|
|
|
yield stages_markdown, novel_content, status or "๐ Processing...", current_session_id |
|
|
|
|
|
def get_active_sessions(language: str) -> List[str]: |
|
|
"""Get active session list""" |
|
|
sessions = NovelDatabase.get_active_sessions() |
|
|
return [f"{s['session_id'][:8]}... - {s['user_query'][:50]}... ({s['created_at']}) [{s['total_words']:,} words]" |
|
|
for s in sessions] |
|
|
|
|
|
def auto_recover_session(language: str) -> Tuple[Optional[str], str]: |
|
|
"""Auto-recover recent session""" |
|
|
sessions = NovelDatabase.get_active_sessions() |
|
|
if sessions: |
|
|
latest_session = sessions[0] |
|
|
return latest_session['session_id'], f"Session {latest_session['session_id'][:8]}... recovered" |
|
|
return None, "No session to recover." |
|
|
|
|
|
def resume_session(session_id: str, language: str) -> Generator[Tuple[str, str, str, str], None, None]: |
|
|
"""Resume session""" |
|
|
if not session_id: |
|
|
yield "", "", "โ No session ID.", session_id |
|
|
return |
|
|
|
|
|
if "..." in session_id: |
|
|
session_id = session_id.split("...")[0] |
|
|
|
|
|
session = NovelDatabase.get_session(session_id) |
|
|
if not session: |
|
|
yield "", "", "โ Session not found.", None |
|
|
return |
|
|
|
|
|
yield from process_query(session['user_query'], session['language'], session_id) |
|
|
|
|
|
def download_novel(novel_text: str, format_type: str, language: str, session_id: str) -> Optional[str]: |
|
|
"""Generate novel download file""" |
|
|
if not novel_text or not session_id: |
|
|
return None |
|
|
|
|
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") |
|
|
filename = f"novel_{session_id[:8]}_{timestamp}" |
|
|
|
|
|
try: |
|
|
if format_type == "DOCX" and DOCX_AVAILABLE: |
|
|
return export_to_docx(novel_text, filename, language, session_id) |
|
|
else: |
|
|
return export_to_txt(novel_text, filename) |
|
|
except Exception as e: |
|
|
logger.error(f"File generation failed: {e}") |
|
|
return None |
|
|
|
|
|
def format_stages_display(stages: List[Dict]) -> str: |
|
|
"""Stage progress display - For single writer system""" |
|
|
markdown = "## ๐ฌ Progress Status\n\n" |
|
|
|
|
|
|
|
|
total_words = sum(s.get('word_count', 0) for s in stages |
|
|
if s.get('name', '').startswith('โ๏ธ Writer:') and 'Revision' in s.get('name', '')) |
|
|
markdown += f"**Total Word Count: {total_words:,} / {TARGET_WORDS:,}**\n\n" |
|
|
|
|
|
|
|
|
completed_parts = sum(1 for s in stages |
|
|
if 'Revision' in s.get('name', '') and s.get('status') == 'complete') |
|
|
markdown += f"**Completed Parts: {completed_parts} / 10**\n\n" |
|
|
|
|
|
|
|
|
momentum_scores = [s.get('momentum', 0) for s in stages if s.get('momentum', 0) > 0] |
|
|
if momentum_scores: |
|
|
avg_momentum = sum(momentum_scores) / len(momentum_scores) |
|
|
markdown += f"**Average Narrative Momentum: {avg_momentum:.1f} / 10**\n\n" |
|
|
|
|
|
markdown += "---\n\n" |
|
|
|
|
|
|
|
|
current_part = 0 |
|
|
for i, stage in enumerate(stages): |
|
|
status_icon = "โ
" if stage['status'] == 'complete' else "๐" if stage['status'] == 'active' else "โณ" |
|
|
|
|
|
|
|
|
if 'Part' in stage.get('name', '') and 'Critic' not in stage.get('name', ''): |
|
|
part_match = re.search(r'Part (\d+)', stage['name']) |
|
|
if part_match: |
|
|
new_part = int(part_match.group(1)) |
|
|
if new_part != current_part: |
|
|
current_part = new_part |
|
|
markdown += f"\n### ๐ Part {current_part}\n\n" |
|
|
|
|
|
markdown += f"{status_icon} **{stage['name']}**" |
|
|
|
|
|
if stage.get('word_count', 0) > 0: |
|
|
markdown += f" ({stage['word_count']:,} words)" |
|
|
|
|
|
if stage.get('momentum', 0) > 0: |
|
|
markdown += f" [Momentum: {stage['momentum']:.1f}/10]" |
|
|
|
|
|
markdown += "\n" |
|
|
|
|
|
if stage['content'] and stage['status'] == 'complete': |
|
|
|
|
|
preview_length = 300 if 'writer' in stage.get('name', '').lower() else 200 |
|
|
preview = stage['content'][:preview_length] + "..." if len(stage['content']) > preview_length else stage['content'] |
|
|
markdown += f"> {preview}\n\n" |
|
|
elif stage['status'] == 'active': |
|
|
markdown += "> *Writing...*\n\n" |
|
|
|
|
|
return markdown |
|
|
|
|
|
def format_novel_display(novel_text: str) -> str: |
|
|
"""Display novel content - Enhanced part separation""" |
|
|
if not novel_text: |
|
|
return "No completed content yet." |
|
|
|
|
|
formatted = "# ๐ Completed Novel\n\n" |
|
|
|
|
|
|
|
|
word_count = len(novel_text.split()) |
|
|
formatted += f"**Total Length: {word_count:,} words (Target: {TARGET_WORDS:,} words)**\n\n" |
|
|
|
|
|
|
|
|
achievement = (word_count / TARGET_WORDS) * 100 |
|
|
formatted += f"**Achievement Rate: {achievement:.1f}%**\n\n" |
|
|
formatted += "---\n\n" |
|
|
|
|
|
|
|
|
parts = novel_text.split('\n\n') |
|
|
|
|
|
for i, part in enumerate(parts): |
|
|
if part.strip(): |
|
|
|
|
|
if i < len(NARRATIVE_PHASES): |
|
|
formatted += f"## {NARRATIVE_PHASES[i]}\n\n" |
|
|
|
|
|
formatted += f"{part}\n\n" |
|
|
|
|
|
|
|
|
if i < len(parts) - 1: |
|
|
formatted += "---\n\n" |
|
|
|
|
|
return formatted |
|
|
|
|
|
def export_to_docx(content: str, filename: str, language: str, session_id: str) -> str: |
|
|
"""Export to DOCX file - Korean standard book format""" |
|
|
doc = Document() |
|
|
|
|
|
|
|
|
section = doc.sections[0] |
|
|
section.page_height = Mm(225) |
|
|
section.page_width = Mm(152) |
|
|
section.top_margin = Mm(20) |
|
|
section.bottom_margin = Mm(20) |
|
|
section.left_margin = Mm(20) |
|
|
section.right_margin = Mm(20) |
|
|
|
|
|
|
|
|
session = NovelDatabase.get_session(session_id) |
|
|
|
|
|
|
|
|
def generate_title(user_query: str, content_preview: str) -> str: |
|
|
"""Generate title based on theme and content""" |
|
|
|
|
|
if len(user_query) < 20: |
|
|
return user_query |
|
|
else: |
|
|
|
|
|
keywords = user_query.split()[:5] |
|
|
return " ".join(keywords) |
|
|
|
|
|
|
|
|
title = generate_title(session["user_query"], content[:500]) if session else "Untitled" |
|
|
|
|
|
|
|
|
title_para = doc.add_paragraph() |
|
|
title_para.alignment = WD_ALIGN_PARAGRAPH.CENTER |
|
|
title_para.paragraph_format.space_before = Pt(100) |
|
|
|
|
|
title_run = title_para.add_run(title) |
|
|
if language == "Korean": |
|
|
title_run.font.name = 'Batang' |
|
|
title_run._element.rPr.rFonts.set(qn('w:eastAsia'), 'Batang') |
|
|
else: |
|
|
title_run.font.name = 'Times New Roman' |
|
|
title_run.font.size = Pt(20) |
|
|
title_run.bold = True |
|
|
|
|
|
|
|
|
doc.add_page_break() |
|
|
|
|
|
|
|
|
style = doc.styles['Normal'] |
|
|
if language == "Korean": |
|
|
style.font.name = 'Batang' |
|
|
style._element.rPr.rFonts.set(qn('w:eastAsia'), 'Batang') |
|
|
else: |
|
|
style.font.name = 'Times New Roman' |
|
|
style.font.size = Pt(10.5) |
|
|
style.paragraph_format.line_spacing = 1.8 |
|
|
style.paragraph_format.space_after = Pt(0) |
|
|
style.paragraph_format.first_line_indent = Mm(10) |
|
|
|
|
|
|
|
|
def clean_content(text: str) -> str: |
|
|
"""Remove unnecessary markdown, part numbers, etc.""" |
|
|
|
|
|
patterns_to_remove = [ |
|
|
r'^#{1,6}\s+.*', |
|
|
r'^\*\*.*\*\*', |
|
|
r'^Part\s*\d+.*', |
|
|
r'^\d+\.\s+.*:.*', |
|
|
r'^---+', |
|
|
r'^\s*\[.*\]\s*', |
|
|
] |
|
|
|
|
|
|
|
|
|
|
|
lines = text.split('\n') |
|
|
cleaned_lines = [] |
|
|
|
|
|
for line in lines: |
|
|
|
|
|
if not line.strip(): |
|
|
cleaned_lines.append('') |
|
|
continue |
|
|
|
|
|
|
|
|
skip_line = False |
|
|
for pattern in patterns_to_remove: |
|
|
if re.match(pattern, line.strip(), re.MULTILINE): |
|
|
skip_line = True |
|
|
break |
|
|
|
|
|
if not skip_line: |
|
|
|
|
|
cleaned_line = line |
|
|
cleaned_line = re.sub(r'\*\*(.*?)\*\*', r'\1', cleaned_line) |
|
|
cleaned_line = re.sub(r'\*(.*?)\*', r'\1', cleaned_line) |
|
|
cleaned_line = re.sub(r'`(.*?)`', r'\1', cleaned_line) |
|
|
cleaned_lines.append(cleaned_line.strip()) |
|
|
|
|
|
|
|
|
final_lines = [] |
|
|
prev_empty = False |
|
|
for line in cleaned_lines: |
|
|
if not line: |
|
|
if not prev_empty: |
|
|
final_lines.append('') |
|
|
prev_empty = True |
|
|
else: |
|
|
final_lines.append(line) |
|
|
prev_empty = False |
|
|
|
|
|
return '\n'.join(final_lines) |
|
|
|
|
|
|
|
|
cleaned_content = clean_content(content) |
|
|
|
|
|
|
|
|
paragraphs = cleaned_content.split('\n') |
|
|
for para_text in paragraphs: |
|
|
if para_text.strip(): |
|
|
para = doc.add_paragraph(para_text.strip()) |
|
|
|
|
|
for run in para.runs: |
|
|
if language == "Korean": |
|
|
run.font.name = 'Batang' |
|
|
run._element.rPr.rFonts.set(qn('w:eastAsia'), 'Batang') |
|
|
else: |
|
|
run.font.name = 'Times New Roman' |
|
|
else: |
|
|
|
|
|
doc.add_paragraph() |
|
|
|
|
|
|
|
|
filepath = f"{filename}.docx" |
|
|
doc.save(filepath) |
|
|
return filepath |
|
|
|
|
|
def export_to_txt(content: str, filename: str) -> str: |
|
|
"""Export to TXT file""" |
|
|
filepath = f"{filename}.txt" |
|
|
with open(filepath, 'w', encoding='utf-8') as f: |
|
|
|
|
|
f.write("=" * 80 + "\n") |
|
|
f.write(f"Generated on: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n") |
|
|
f.write(f"Total word count: {len(content.split()):,} words\n") |
|
|
f.write("=" * 80 + "\n\n") |
|
|
|
|
|
|
|
|
f.write(content) |
|
|
|
|
|
|
|
|
f.write("\n\n" + "=" * 80 + "\n") |
|
|
f.write("AI Literary Creation System v2.0\n") |
|
|
f.write("=" * 80 + "\n") |
|
|
|
|
|
return filepath |
|
|
|
|
|
|
|
|
custom_css = """ |
|
|
.gradio-container { |
|
|
background: linear-gradient(135deg, #1a1a2e 0%, #16213e 50%, #0f3460 100%); |
|
|
min-height: 100vh; |
|
|
} |
|
|
|
|
|
.main-header { |
|
|
background-color: rgba(255, 255, 255, 0.05); |
|
|
backdrop-filter: blur(20px); |
|
|
padding: 40px; |
|
|
border-radius: 20px; |
|
|
margin-bottom: 30px; |
|
|
text-align: center; |
|
|
color: white; |
|
|
border: 2px solid rgba(255, 255, 255, 0.1); |
|
|
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1); |
|
|
} |
|
|
|
|
|
.header-title { |
|
|
font-size: 2.8em; |
|
|
margin-bottom: 15px; |
|
|
font-weight: 700; |
|
|
} |
|
|
|
|
|
.header-description { |
|
|
font-size: 0.85em; |
|
|
color: #d0d0d0; |
|
|
line-height: 1.4; |
|
|
margin-top: 20px; |
|
|
text-align: left; |
|
|
max-width: 900px; |
|
|
margin-left: auto; |
|
|
margin-right: auto; |
|
|
} |
|
|
|
|
|
.badges-container { |
|
|
display: flex; |
|
|
justify-content: center; |
|
|
gap: 10px; |
|
|
margin-top: 20px; |
|
|
margin-bottom: 20px; |
|
|
} |
|
|
|
|
|
.progress-note { |
|
|
background: linear-gradient(135deg, rgba(255, 107, 107, 0.1), rgba(255, 230, 109, 0.1)); |
|
|
border-left: 4px solid #ff6b6b; |
|
|
padding: 20px; |
|
|
margin: 25px auto; |
|
|
border-radius: 10px; |
|
|
color: #fff; |
|
|
max-width: 800px; |
|
|
font-weight: 500; |
|
|
} |
|
|
|
|
|
.warning-note { |
|
|
background: rgba(255, 193, 7, 0.1); |
|
|
border-left: 4px solid #ffc107; |
|
|
padding: 15px; |
|
|
margin: 20px auto; |
|
|
border-radius: 8px; |
|
|
color: #ffd700; |
|
|
max-width: 800px; |
|
|
font-size: 0.9em; |
|
|
} |
|
|
|
|
|
.input-section { |
|
|
background-color: rgba(255, 255, 255, 0.08); |
|
|
backdrop-filter: blur(15px); |
|
|
padding: 25px; |
|
|
border-radius: 15px; |
|
|
margin-bottom: 25px; |
|
|
border: 1px solid rgba(255, 255, 255, 0.1); |
|
|
box-shadow: 0 4px 16px rgba(0, 0, 0, 0.1); |
|
|
} |
|
|
|
|
|
.session-section { |
|
|
background-color: rgba(255, 255, 255, 0.06); |
|
|
backdrop-filter: blur(10px); |
|
|
padding: 20px; |
|
|
border-radius: 12px; |
|
|
margin-top: 25px; |
|
|
color: white; |
|
|
border: 1px solid rgba(255, 255, 255, 0.08); |
|
|
} |
|
|
|
|
|
#stages-display { |
|
|
background-color: rgba(255, 255, 255, 0.97); |
|
|
padding: 25px; |
|
|
border-radius: 15px; |
|
|
max-height: 650px; |
|
|
overflow-y: auto; |
|
|
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.15); |
|
|
color: #2c3e50; |
|
|
} |
|
|
|
|
|
#novel-output { |
|
|
background-color: rgba(255, 255, 255, 0.97); |
|
|
padding: 35px; |
|
|
border-radius: 15px; |
|
|
max-height: 750px; |
|
|
overflow-y: auto; |
|
|
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.15); |
|
|
color: #2c3e50; |
|
|
line-height: 1.8; |
|
|
} |
|
|
|
|
|
.download-section { |
|
|
background-color: rgba(255, 255, 255, 0.92); |
|
|
padding: 20px; |
|
|
border-radius: 12px; |
|
|
margin-top: 25px; |
|
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1); |
|
|
} |
|
|
|
|
|
/* Progress indicator improvements */ |
|
|
.progress-bar { |
|
|
background-color: #e0e0e0; |
|
|
height: 25px; |
|
|
border-radius: 12px; |
|
|
overflow: hidden; |
|
|
margin: 15px 0; |
|
|
box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.1); |
|
|
} |
|
|
|
|
|
.progress-fill { |
|
|
background: linear-gradient(90deg, #4CAF50, #8BC34A); |
|
|
height: 100%; |
|
|
transition: width 0.5s ease; |
|
|
box-shadow: 0 2px 8px rgba(76, 175, 80, 0.3); |
|
|
} |
|
|
|
|
|
/* Scrollbar styles */ |
|
|
::-webkit-scrollbar { |
|
|
width: 10px; |
|
|
} |
|
|
|
|
|
::-webkit-scrollbar-track { |
|
|
background: rgba(0, 0, 0, 0.1); |
|
|
border-radius: 5px; |
|
|
} |
|
|
|
|
|
::-webkit-scrollbar-thumb { |
|
|
background: rgba(0, 0, 0, 0.3); |
|
|
border-radius: 5px; |
|
|
} |
|
|
|
|
|
::-webkit-scrollbar-thumb:hover { |
|
|
background: rgba(0, 0, 0, 0.5); |
|
|
} |
|
|
|
|
|
/* Button hover effects */ |
|
|
.gr-button:hover { |
|
|
transform: translateY(-2px); |
|
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.2); |
|
|
transition: all 0.3s ease; |
|
|
} |
|
|
""" |
|
|
|
|
|
def load_theme_data(): |
|
|
"""Load theme data from JSON file""" |
|
|
json_path = Path("novel_themes.json") |
|
|
if json_path.exists(): |
|
|
with open(json_path, 'r', encoding='utf-8') as f: |
|
|
return json.load(f) |
|
|
else: |
|
|
|
|
|
return { |
|
|
"core_themes": { |
|
|
"digital_extinction": { |
|
|
"weight": 0.5, |
|
|
"compatible_elements": { |
|
|
"characters": ["last_human"], |
|
|
"philosophies": ["posthuman"] |
|
|
} |
|
|
} |
|
|
}, |
|
|
"characters": { |
|
|
"last_human": { |
|
|
"variations": ["last person who dreams without ads"], |
|
|
"traits": ["stubborn", "melancholic"], |
|
|
"arc_potential": "preservation_vs_evolution" |
|
|
} |
|
|
}, |
|
|
"philosophies": { |
|
|
"posthuman": { |
|
|
"core_questions": ["What remains human when humanity is optional?"], |
|
|
"manifestations": ["voluntary human extinction movements"] |
|
|
} |
|
|
}, |
|
|
"narrative_hooks": { |
|
|
"identity_crisis": ["discovers their memories belong to a corporate subscription"] |
|
|
}, |
|
|
"opening_sentences": { |
|
|
"shocking": ["The notification read: 'Your humanity subscription expires in 24 hours.'"] |
|
|
} |
|
|
} |
|
|
|
|
|
def weighted_random_choice(items_dict): |
|
|
"""Select item based on weights""" |
|
|
items = list(items_dict.keys()) |
|
|
weights = [items_dict[item].get('weight', 0.1) for item in items] |
|
|
|
|
|
total_weight = sum(weights) |
|
|
r = random.uniform(0, total_weight) |
|
|
upto = 0 |
|
|
for i, item in enumerate(items): |
|
|
if upto + weights[i] >= r: |
|
|
return item |
|
|
upto += weights[i] |
|
|
return items[-1] |
|
|
|
|
|
|
|
|
def translate_to_korean(text, category=None): |
|
|
"""Translate English text to Korean""" |
|
|
|
|
|
translations = { |
|
|
|
|
|
"The notification read: 'Your humanity subscription expires in 24 hours.'": "์๋ฆผ์ด ๋ด๋ค: '๋น์ ์ ์ธ๊ฐ์ฑ ๊ตฌ๋
์ด 24์๊ฐ ํ ๋ง๋ฃ๋ฉ๋๋ค.'", |
|
|
"I was the only one at the funeral who couldn't stream my grief.": "์ฅ๋ก์์์ ์ฌํ์ ์คํธ๋ฆฌ๋ฐํ ์ ์๋ ์ฌ๋์ ๋๋ฟ์ด์๋ค.", |
|
|
"The day empathy became downloadable was the day I became obsolete.": "๊ณต๊ฐ์ ๋ค์ด๋ก๋ํ ์ ์๊ฒ ๋ ๋ , ๋๋ ๊ตฌ์์ด ๋์๋ค.", |
|
|
"My daughter asked me what dreams were, and I realized I'd forgotten.": "๋ธ์ด ๊ฟ์ด ๋ญ๋๊ณ ๋ฌผ์๊ณ , ๋๋ ๋ด๊ฐ ์์๋ค๋ ๊ฑธ ๊นจ๋ฌ์๋ค.", |
|
|
"The silence lasted twelve secondsโa new world record.": "์นจ๋ฌต์ 12์ด๊ฐ ์ง์๋๋คโ์๋ก์ด ์ธ๊ณ ๊ธฐ๋ก์ด์๋ค.", |
|
|
|
|
|
|
|
|
"last person who dreams without ads": "๊ด๊ณ ์์ด ๊ฟ๊พธ๋ ๋ง์ง๋ง ์ฌ๋", |
|
|
"final human with unmonetized thoughts": "์์ตํ๋์ง ์์ ์๊ฐ์ ๊ฐ์ง ๋ง์ง๋ง ์ธ๊ฐ", |
|
|
"excavator of deleted conversations": "์ญ์ ๋ ๋ํ์ ๋ฐ๊ตด์", |
|
|
"black market memory dealer": "๊ธฐ์ต ์์์ฅ ๊ฑฐ๋์", |
|
|
"guerrilla flavor bomber": "๊ฒ๋ฆด๋ผ ๋ง ํญํ ํ
๋ฌ๋ฆฌ์คํธ", |
|
|
"temporal audit specialist": "์๊ฐ ๊ฐ์ฌ ์ ๋ฌธ๊ฐ", |
|
|
"organic emotion cultivator": "์ ๊ธฐ๋ ๊ฐ์ ์ฌ๋ฐฐ์", |
|
|
"binary meditation teacher": "์ด์ง๋ฒ ๋ช
์ ๊ต์ฌ", |
|
|
"extinct plant memory keeper": "๋ฉธ์ข
์๋ฌผ ๊ธฐ์ต ๊ด๋ฆฌ์ธ", |
|
|
"social distance calibrator": "์ฌํ์ ๊ฑฐ๋ฆฌ ์กฐ์ ๊ธฐ์ ์", |
|
|
"subconscious strip miner": "๋ฌด์์ ๋
ธ์ฒ ์ฑ๊ตด์", |
|
|
|
|
|
|
|
|
"discovers their memories belong to a corporate subscription service": "์์ ์ ๊ธฐ์ต์ด ๊ธฐ์
๊ตฌ๋
์๋น์ค ์์ ์์ ๋ฐ๊ฒฌํ๋ค", |
|
|
"realizes they're the only person not running on autopilot": "์์ ๋ง์ด ์๋ ์กฐ์ข
๋ชจ๋๋ก ์ด์ง ์๋๋ค๋ ๊ฑธ ๊นจ๋ซ๋๋ค", |
|
|
"finds out their personality is a discontinued model": "์์ ์ ์ฑ๊ฒฉ์ด ๋จ์ข
๋ ๋ชจ๋ธ์์ ์๊ฒ ๋๋ค", |
|
|
|
|
|
|
|
|
"What remains human when humanity is optional?": "์ธ๊ฐ์ฑ์ด ์ ํ์ฌํญ์ผ ๋ ๋ฌด์์ด ์ธ๊ฐ์ผ๋ก ๋จ๋๊ฐ?", |
|
|
"Is consciousness a bug or a feature?": "์์์ ๋ฒ๊ทธ์ธ๊ฐ ๊ธฐ๋ฅ์ธ๊ฐ?", |
|
|
"Can nostalgia exist without mortality?": "์ฃฝ์ ์์ด ํฅ์๊ฐ ์กด์ฌํ ์ ์๋๊ฐ?", |
|
|
|
|
|
|
|
|
"Library of Burned Websites": "๋ถํ ์น์ฌ์ดํธ๋ค์ ๋์๊ด", |
|
|
"Museum of Extinct Emotions": "๋ฉธ์ข
๋ ๊ฐ์ ๋ค์ ๋ฐ๋ฌผ๊ด", |
|
|
"Department of Mandatory Happiness": "์๋ฌด ํ๋ณต๋ถ", |
|
|
|
|
|
|
|
|
"preservation_vs_evolution": "๋ณด์กด ๋ ์งํ", |
|
|
"digital_extinction": "๋์งํธ ๋ฉธ์ข
", |
|
|
"sensory_revolution": "๊ฐ๊ฐ ํ๋ช
", |
|
|
"temporal_paradox": "์๊ฐ์ ์ญ์ค", |
|
|
"emotional_economy": "๊ฐ์ ๊ฒฝ์ ", |
|
|
"linguistic_apocalypse": "์ธ์ด์ ์ข
๋ง", |
|
|
"algorithmic_mysticism": "์๊ณ ๋ฆฌ์ฆ ์ ๋น์ฃผ์", |
|
|
"biological_nostalgia": "์๋ฌผํ์ ํฅ์", |
|
|
"social_physics": "์ฌํ ๋ฌผ๋ฆฌํ", |
|
|
"reality_bureaucracy": "ํ์ค ๊ด๋ฃ์ ", |
|
|
"dream_industrialization": "๊ฟ์ ์ฐ์
ํ" |
|
|
} |
|
|
|
|
|
return translations.get(text, text) |
|
|
|
|
|
def generate_random_theme(language="English"): |
|
|
"""Generate a coherent and natural novel theme using LLM""" |
|
|
try: |
|
|
|
|
|
json_path = Path("novel_themes.json") |
|
|
if not json_path.exists(): |
|
|
print("[WARNING] novel_themes.json not found, using built-in data") |
|
|
|
|
|
themes_data = { |
|
|
"themes": ["digital extinction", "sensory revolution", "temporal paradox"], |
|
|
"characters": ["memory trader", "time thief", "emotion farmer"], |
|
|
"hooks": ["discovering hidden truth", "facing impossible choice", "breaking the system"], |
|
|
"questions": ["What makes us human?", "Can memory define identity?", "Is free will an illusion?"] |
|
|
} |
|
|
else: |
|
|
with open(json_path, 'r', encoding='utf-8') as f: |
|
|
data = json.load(f) |
|
|
themes_data = { |
|
|
"themes": list(data.get('core_themes', {}).keys()), |
|
|
"characters": [], |
|
|
"hooks": [], |
|
|
"questions": [] |
|
|
} |
|
|
|
|
|
|
|
|
for char_data in data.get('characters', {}).values(): |
|
|
themes_data["characters"].extend(char_data.get('variations', [])) |
|
|
for hook_list in data.get('narrative_hooks', {}).values(): |
|
|
themes_data["hooks"].extend(hook_list) |
|
|
for phil_data in data.get('philosophies', {}).values(): |
|
|
themes_data["questions"].extend(phil_data.get('core_questions', [])) |
|
|
|
|
|
|
|
|
import secrets |
|
|
theme = secrets.choice(themes_data["themes"]) |
|
|
character = secrets.choice(themes_data["characters"]) |
|
|
hook = secrets.choice(themes_data["hooks"]) |
|
|
question = secrets.choice(themes_data["questions"]) |
|
|
|
|
|
|
|
|
if language == "Korean": |
|
|
prompt = f"""๋ค์ ์์๋ค์ ์ฌ์ฉํ์ฌ ์์ฐ์ค๋ฝ๊ณ ํฅ๋ฏธ๋ก์ด ์์ค ์ฃผ์ ๋ฅผ ์์ฑํ์ธ์: |
|
|
|
|
|
์ฃผ์ : {theme} |
|
|
์บ๋ฆญํฐ: {character} |
|
|
์ฌ๊ฑด: {hook} |
|
|
์ฒ ํ์ ์ง๋ฌธ: {question} |
|
|
|
|
|
์๊ตฌ์ฌํญ: |
|
|
1. ๋ชจ๋ ์์๊ฐ ์ ๊ธฐ์ ์ผ๋ก ์ฐ๊ฒฐ๋ ํ๋์ ํตํฉ๋ ์ฃผ์ |
|
|
2. ๊ตฌ์ฒด์ ์ด๊ณ ๋
์ฐฝ์ ์ธ ์ค์ |
|
|
3. ๋ช
ํํ ๊ฐ๋ฑ๊ณผ ๊ธด์ฅ๊ฐ |
|
|
4. ํ๋์ ๊ด๋ จ์ฑ |
|
|
5. ๋ฌธํ์ ๊น์ด |
|
|
|
|
|
๋ค์ ํ์์ผ๋ก ์์ฑํ์ธ์: |
|
|
- ์ ๋ชฉ: [๋งค๋ ฅ์ ์ด๊ณ ์์์ ์ธ ์ ๋ชฉ] |
|
|
- ์ฒซ ๋ฌธ์ฅ: [๋
์๋ฅผ ์ฆ์ ์ฌ๋ก์ก๋ ๊ฐ๋ ฌํ ์ฒซ ๋ฌธ์ฅ] |
|
|
- ์ฃผ์ธ๊ณต: [๊ตฌ์ฒด์ ์ธ ์ํฉ๊ณผ ํน์ฑ์ ๊ฐ์ง ์ธ๋ฌผ] |
|
|
- ์ค์ฌ ๊ฐ๋ฑ: [๋ด์ ๊ฐ๋ฑ๊ณผ ์ธ์ ๊ฐ๋ฑ์ ๊ฒฐํฉ] |
|
|
- ํ๊ตฌ ์ฃผ์ : [์ฒ ํ์ ๊น์ด๋ฅผ ๊ฐ์ง ํต์ฌ ์ง๋ฌธ]""" |
|
|
else: |
|
|
prompt = f"""Generate a natural and compelling novel theme using these elements: |
|
|
|
|
|
Theme: {theme} |
|
|
Character: {character} |
|
|
Event: {hook} |
|
|
Philosophical Question: {question} |
|
|
|
|
|
Requirements: |
|
|
1. All elements organically connected into one unified theme |
|
|
2. Specific and original setting |
|
|
3. Clear conflict and tension |
|
|
4. Contemporary relevance |
|
|
5. Literary depth |
|
|
|
|
|
Format as: |
|
|
- Title: [Compelling and evocative title] |
|
|
- Opening: [Powerful first sentence that immediately hooks readers] |
|
|
- Protagonist: [Character with specific situation and traits] |
|
|
- Central Conflict: [Combination of internal and external conflict] |
|
|
- Core Exploration: [Philosophically deep central question]""" |
|
|
|
|
|
|
|
|
system = UnifiedLiterarySystem() |
|
|
|
|
|
|
|
|
messages = [{"role": "user", "content": prompt}] |
|
|
generated_theme = system.call_llm_sync(messages, "director", language) |
|
|
|
|
|
|
|
|
if language == "Korean": |
|
|
generated_theme += f""" |
|
|
|
|
|
**์์ฌ ๊ตฌ์กฐ:** |
|
|
์ด ์ด์ผ๊ธฐ๋ {character}๊ฐ {hook.lower()}๋ ์ถฉ๊ฒฉ์ ์ฌ๊ฑด์ผ๋ก ์์๋ฉ๋๋ค. |
|
|
์ ์ฐจ ์ฌํ๋๋ ๊ฐ๋ฑ์ ํตํด {question.lower().rstrip('?')}๋ผ๋ ๊ทผ๋ณธ์ ์ง๋ฌธ๊ณผ ๋๋ฉดํ๊ฒ ๋๋ฉฐ, |
|
|
๊ถ๊ทน์ ์ผ๋ก {theme.replace('_', ' ')}์ ์๋๋ฅผ ์ด์๊ฐ๋ ํ๋์ธ์ ์ค์กด์ ์ ํ์ ๊ทธ๋ฆฝ๋๋ค. |
|
|
|
|
|
**ํค๊ณผ ์คํ์ผ:** |
|
|
ํ๋ ๋ฌธํ์ ์ฌ๋ฆฌ์ ๊น์ด์ ์ฒ ํ์ ํต์ฐฐ์ ๊ฒฐํฉํ์ฌ, ๋
์๋ก ํ์ฌ๊ธ |
|
|
์์ ์ ์ถ์ ๋์๋ณด๊ฒ ๋ง๋๋ ์ฑ์ฐฐ์ ์์ฌ๋ฅผ ์งํฅํฉ๋๋ค.""" |
|
|
else: |
|
|
generated_theme += f""" |
|
|
|
|
|
**Narrative Arc:** |
|
|
The story begins with {character} who {hook}, a shocking event that sets everything in motion. |
|
|
Through deepening conflicts, they confront the fundamental question of {question.lower()} |
|
|
ultimately portraying the existential choices of modern humans living in an era of {theme.replace('_', ' ')}. |
|
|
|
|
|
**Tone and Style:** |
|
|
Combining the psychological depth and philosophical insights of contemporary literature, |
|
|
aiming for a reflective narrative that makes readers examine their own lives.""" |
|
|
|
|
|
return generated_theme |
|
|
|
|
|
except Exception as e: |
|
|
logger.error(f"Theme generation error: {str(e)}") |
|
|
|
|
|
fallback_themes = { |
|
|
"Korean": [ |
|
|
"""**์ ๋ชฉ:** ๋ง์ง๋ง ์๋ ๋ก๊ทธ ์ธ๊ฐ |
|
|
|
|
|
**์ฒซ ๋ฌธ์ฅ:** "๋ด๊ฐ ๋ง์ง๋ง์ผ๋ก ์ข
์ด์ ๊ธ์ ์ด ์ฌ๋์ด ๋ ๋ , ์ธ์์ ์นจ๋ฌตํ๋ค." |
|
|
|
|
|
**์ฃผ์ธ๊ณต:** ๋์งํธํ๋ฅผ ๊ฑฐ๋ถํ๊ณ ์๊ธฐ๋ก๋ง ์ํตํ๋ ๋
ธ๋
์ ์๊ฐ |
|
|
|
|
|
**์ค์ฌ ๊ฐ๋ฑ:** ํจ์จ์ฑ๊ณผ ์ธ๊ฐ์ฑ ์ฌ์ด์์ ์ ํํด์ผ ํ๋ ์ค์กด์ ๋๋ ๋ง |
|
|
|
|
|
**ํ๊ตฌ ์ฃผ์ :** ๊ธฐ์ ๋ฐ์ ์์์ ์ธ๊ฐ ๊ณ ์ ์ ๊ฐ์น๋ ๋ฌด์์ธ๊ฐ?""", |
|
|
|
|
|
"""**์ ๋ชฉ:** ๊ธฐ์ต ๊ฑฐ๋์ |
|
|
|
|
|
**์ฒซ ๋ฌธ์ฅ:** "์ค๋ ์์นจ, ๋๋ ์ฒซ์ฌ๋์ ๊ธฐ์ต์ ํ๊ธฐ๋ก ๊ฒฐ์ ํ๋ค." |
|
|
|
|
|
**์ฃผ์ธ๊ณต:** ์๊ณ๋ฅผ ์ํด ์์คํ ๊ธฐ์ต์ ํ๋ ์ ์ ์์ ๊ฐ |
|
|
|
|
|
**์ค์ฌ ๊ฐ๋ฑ:** ์์กด๊ณผ ์ ์ฒด์ฑ ๋ณด์กด ์ฌ์ด์ ์ ํ |
|
|
|
|
|
**ํ๊ตฌ ์ฃผ์ :** ๊ธฐ์ต์ด ๊ฑฐ๋๋๋ ์๋, ์ฐ๋ฆฌ๋ ๋ฌด์์ผ๋ก ์์ ์ ์ ์ํ๋๊ฐ?""" |
|
|
], |
|
|
"English": [ |
|
|
"""**Title:** The Last Analog Human |
|
|
|
|
|
**Opening:** "The day I became the last person to write on paper, the world fell silent." |
|
|
|
|
|
**Protagonist:** An elderly writer who refuses digitalization and communicates only through handwriting |
|
|
|
|
|
**Central Conflict:** Existential dilemma between efficiency and humanity |
|
|
|
|
|
**Core Exploration:** What is uniquely human in the age of technological advancement?""", |
|
|
|
|
|
"""**Title:** The Memory Exchange |
|
|
|
|
|
**Opening:** "This morning, I decided to sell my first love's memory." |
|
|
|
|
|
**Protagonist:** A young artist selling precious memories for survival |
|
|
|
|
|
**Central Conflict:** Choice between survival and preserving identity |
|
|
|
|
|
**Core Exploration:** In an era where memories are traded, what defines who we are?""" |
|
|
] |
|
|
} |
|
|
|
|
|
import secrets |
|
|
return secrets.choice(fallback_themes.get(language, fallback_themes["English"])) |
|
|
|
|
|
|
|
|
def handle_random_theme(language): |
|
|
"""Handle random theme generation with improved feedback""" |
|
|
try: |
|
|
|
|
|
theme = generate_random_theme(language) |
|
|
logger.info(f"Generated theme successfully") |
|
|
return theme |
|
|
except Exception as e: |
|
|
logger.error(f"Random theme generation failed: {str(e)}") |
|
|
|
|
|
if language == "Korean": |
|
|
return "๊ธฐ์ต์ ์์ด๊ฐ๋ ๋
ธ์ธ๊ณผ AI ๊ฐ๋ณ์ธ์ ํน๋ณํ ์ฐ์ " |
|
|
else: |
|
|
return "An unlikely friendship between an elderly person losing memories and their AI caregiver" |
|
|
|
|
|
|
|
|
def augment_query(self, user_query: str, language: str) -> str: |
|
|
"""Augment and clean user query""" |
|
|
|
|
|
if "**" in user_query or "##" in user_query: |
|
|
|
|
|
|
|
|
lines = user_query.split('\n') |
|
|
cleaned_parts = [] |
|
|
|
|
|
for line in lines: |
|
|
|
|
|
line = line.replace('**', '').replace('##', '').strip() |
|
|
if line and not line.startswith(('-', 'โข', '*')) and ':' not in line[:20]: |
|
|
cleaned_parts.append(line) |
|
|
|
|
|
if cleaned_parts: |
|
|
user_query = ' '.join(cleaned_parts[:3]) |
|
|
|
|
|
|
|
|
if len(user_query.split()) < 15: |
|
|
if language == "Korean": |
|
|
return f"{user_query}\n\n์ด ์ฃผ์ ๋ฅผ ํ๋์ ๊ด์ ์์ ์ฌํด์ํ์ฌ ์ธ๊ฐ ์กด์ฌ์ ๋ณธ์ง๊ณผ ๊ธฐ์ ์๋์ ๋๋ ๋ง๋ฅผ ํ๊ตฌํ๋ 8,000๋จ์ด ๋ถ๋์ ์ฒ ํ์ ์คํธ์์ค์ ์์ฑํ์ธ์." |
|
|
else: |
|
|
return f"{user_query}\n\nReinterpret this theme from a contemporary perspective to explore the essence of human existence and dilemmas of the technological age in an 8,000-word philosophical novella." |
|
|
|
|
|
return user_query |
|
|
|
|
|
|
|
|
def process_generated_theme(self, theme_text: str, language: str) -> str: |
|
|
"""Process generated theme for novel writing""" |
|
|
|
|
|
theme_elements = { |
|
|
"title": "", |
|
|
"opening": "", |
|
|
"protagonist": "", |
|
|
"conflict": "", |
|
|
"exploration": "" |
|
|
} |
|
|
|
|
|
lines = theme_text.split('\n') |
|
|
current_key = None |
|
|
|
|
|
for line in lines: |
|
|
line = line.strip() |
|
|
if not line: |
|
|
continue |
|
|
|
|
|
|
|
|
if any(marker in line.lower() for marker in ['title:', 'opening:', 'protagonist:', 'conflict:', 'exploration:', '์ ๋ชฉ:', '์ฒซ ๋ฌธ์ฅ:', '์ฃผ์ธ๊ณต:', '๊ฐ๋ฑ:', 'ํ๊ตฌ']): |
|
|
for key in theme_elements: |
|
|
if key in line.lower() or (language == "Korean" and key in translate_to_korean(line.lower())): |
|
|
current_key = key |
|
|
|
|
|
if ':' in line: |
|
|
content = line.split(':', 1)[1].strip() |
|
|
if content: |
|
|
theme_elements[current_key] = content |
|
|
break |
|
|
elif current_key and line: |
|
|
|
|
|
theme_elements[current_key] = (theme_elements[current_key] + " " + line).strip() |
|
|
|
|
|
|
|
|
if language == "Korean": |
|
|
summary = f"{theme_elements.get('title', '๋ฌด์ ')}. " |
|
|
if theme_elements.get('opening'): |
|
|
summary += f"'{theme_elements['opening']}' " |
|
|
summary += f"{theme_elements.get('protagonist', '์ฃผ์ธ๊ณต')}์ ์ด์ผ๊ธฐ. " |
|
|
summary += f"{theme_elements.get('conflict', '')} " |
|
|
summary += f"{theme_elements.get('exploration', '')}" |
|
|
else: |
|
|
summary = f"{theme_elements.get('title', 'Untitled')}. " |
|
|
if theme_elements.get('opening'): |
|
|
summary += f"'{theme_elements['opening']}' " |
|
|
summary += f"The story of {theme_elements.get('protagonist', 'a protagonist')}. " |
|
|
summary += f"{theme_elements.get('conflict', '')} " |
|
|
summary += f"{theme_elements.get('exploration', '')}" |
|
|
|
|
|
return summary.strip() |
|
|
|
|
|
|
|
|
def create_interface(): |
|
|
with gr.Blocks(theme=gr.themes.Soft, css=custom_css, title="AGI NOVEL Generator") as interface: |
|
|
gr.HTML(""" |
|
|
<div class="main-header"> |
|
|
<h1 class="header-title">๐ AGI NOVEL Generator</h1> |
|
|
|
|
|
<div class="badges-container"> |
|
|
<a href="https://huggingface.co/OpenFreeAI" target="_blank"> |
|
|
<img src="https://img.shields.io/static/v1?label=Community&message=OpenFree_AI&color=%23800080&labelColor=%23000080&logo=HUGGINGFACE&logoColor=%23ffa500&style=for-the-badge" alt="badge"> |
|
|
</a> |
|
|
<a href="https://discord.gg/openfreeai" target="_blank"> |
|
|
<img src="https://img.shields.io/static/v1?label=Discord&message=Openfree%20AI&color=%230000ff&labelColor=%23800080&logo=discord&logoColor=white&style=for-the-badge" alt="badge"> |
|
|
</a> |
|
|
<a href="https://huggingface.co/spaces/openfree/Best-AI" target="_blank"> |
|
|
<img src="https://img.shields.io/static/v1?label=OpenFree&message=BEST%20AI%20Services&color=%230000ff&labelColor=%23000080&logo=huggingface&logoColor=%23ffa500&style=for-the-badge" alt="badge"> |
|
|
</a> |
|
|
</div> |
|
|
|
|
|
<p class="header-description"> |
|
|
Artificial General Intelligence (AGI) denotes an artificial system possessing human-level, general-purpose intelligence and is now commonly framed as AI that can outperform humans in most economically and intellectually valuable tasks. Demonstrating such breadth requires evaluating not only calculation, logical reasoning, and perception but also the distinctly human faculties of creativity and language. Among the creative tests, the most demanding is the production of a full-length novel running 100kโ200k words. An extended narrative forces an AGI candidate to exhibit (1) sustained long-term memory and context tracking (2) intricate causal and plot planning (3) nuanced cultural and emotional expression (4) autonomous self-censorship and ethical filtering to avoid harmful or biased content and (5) verifiable originality beyond simple recombination of training data. |
|
|
</p> |
|
|
|
|
|
<div class="progress-note" style="background: linear-gradient(135deg, rgba(147, 51, 234, 0.1), rgba(79, 70, 229, 0.1)); border-left-color: #7c3aed;"> |
|
|
๐ฒ <strong>Novel Theme Random Generator:</strong> This system can generate up to approximately 170 quadrillion (1.7 ร 10ยนโท) unique novel themes. |
|
|
Even writing 100 novels per day, it would take 4.6 million years to exhaust all combinations. |
|
|
Click the "Random" button to explore infinite creative possibilities! |
|
|
</div> |
|
|
|
|
|
<div class="warning-note"> |
|
|
โฑ๏ธ <strong>Note:</strong> Creating a complete novel takes approximately 20 minutes. If your web session disconnects, you can restore your work using the "Session Recovery" feature. |
|
|
</div> |
|
|
|
|
|
<div class="progress-note"> |
|
|
๐ฏ <strong>Core Innovation:</strong> Not fragmented texts from multiple writers, |
|
|
but a genuine full-length novel written consistently by a single author from beginning to end. |
|
|
</div> |
|
|
</div> |
|
|
""") |
|
|
|
|
|
|
|
|
current_session_id = gr.State(None) |
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=1): |
|
|
with gr.Group(elem_classes=["input-section"]): |
|
|
query_input = gr.Textbox( |
|
|
label="Novel Theme", |
|
|
placeholder="""Enter your novella theme. |
|
|
Examples: Character transformation, relationship evolution, social conflict and personal choice...""", |
|
|
lines=5 |
|
|
) |
|
|
|
|
|
language_select = gr.Radio( |
|
|
choices=["English", "Korean"], |
|
|
value="English", |
|
|
label="Language" |
|
|
) |
|
|
|
|
|
with gr.Row(): |
|
|
submit_btn = gr.Button("๐ Start Writing", variant="primary", scale=2) |
|
|
random_btn = gr.Button("๐ฒ Random", variant="secondary", scale=1) |
|
|
clear_btn = gr.Button("๐๏ธ Clear", scale=1) |
|
|
|
|
|
status_text = gr.Textbox( |
|
|
label="Progress Status", |
|
|
interactive=False, |
|
|
value="๐ Ready" |
|
|
) |
|
|
|
|
|
|
|
|
with gr.Group(elem_classes=["session-section"]): |
|
|
gr.Markdown("### ๐พ Active Works") |
|
|
session_dropdown = gr.Dropdown( |
|
|
label="Saved Sessions", |
|
|
choices=[], |
|
|
interactive=True |
|
|
) |
|
|
with gr.Row(): |
|
|
refresh_btn = gr.Button("๐ Refresh", scale=1) |
|
|
resume_btn = gr.Button("โถ๏ธ Resume", variant="secondary", scale=1) |
|
|
auto_recover_btn = gr.Button("โป๏ธ Recover Recent Work", scale=1) |
|
|
|
|
|
with gr.Column(scale=2): |
|
|
with gr.Tab("๐ Writing Process"): |
|
|
stages_display = gr.Markdown( |
|
|
value="Writing process will be displayed in real-time...", |
|
|
elem_id="stages-display" |
|
|
) |
|
|
|
|
|
with gr.Tab("๐ Completed Work"): |
|
|
novel_output = gr.Markdown( |
|
|
value="Completed novel will be displayed here...", |
|
|
elem_id="novel-output" |
|
|
) |
|
|
|
|
|
with gr.Group(elem_classes=["download-section"]): |
|
|
gr.Markdown("### ๐ฅ Download Work") |
|
|
with gr.Row(): |
|
|
format_select = gr.Radio( |
|
|
choices=["DOCX", "TXT"], |
|
|
value="DOCX" if DOCX_AVAILABLE else "TXT", |
|
|
label="File Format" |
|
|
) |
|
|
download_btn = gr.Button("โฌ๏ธ Download", variant="secondary") |
|
|
|
|
|
download_file = gr.File( |
|
|
label="Download File", |
|
|
visible=False |
|
|
) |
|
|
|
|
|
|
|
|
novel_text_state = gr.State("") |
|
|
|
|
|
|
|
|
with gr.Row(): |
|
|
gr.Examples( |
|
|
examples=[ |
|
|
["A daughter discovering her mother's hidden past through old letters"], |
|
|
["An architect losing sight who learns to design through touch and sound"], |
|
|
["A translator replaced by AI rediscovering the essence of language through classical literature transcription"], |
|
|
["A middle-aged man who lost his job finding new meaning in rural life"], |
|
|
["A doctor with war trauma healing through Doctors Without Borders"], |
|
|
["Community solidarity to save a neighborhood bookstore from redevelopment"], |
|
|
["A year with a professor losing memory and his last student"] |
|
|
], |
|
|
inputs=query_input, |
|
|
label="๐ก Theme Examples" |
|
|
) |
|
|
|
|
|
|
|
|
def refresh_sessions(): |
|
|
try: |
|
|
sessions = get_active_sessions("English") |
|
|
return gr.update(choices=sessions) |
|
|
except Exception as e: |
|
|
logger.error(f"Session refresh error: {str(e)}") |
|
|
return gr.update(choices=[]) |
|
|
|
|
|
def handle_auto_recover(language): |
|
|
session_id, message = auto_recover_session(language) |
|
|
return session_id, message |
|
|
|
|
|
def handle_random_theme(language): |
|
|
"""Handle random theme generation with language support""" |
|
|
import time |
|
|
import datetime |
|
|
|
|
|
time.sleep(0.05) |
|
|
|
|
|
logger.info(f"Random theme requested at {datetime.datetime.now()}") |
|
|
theme = generate_random_theme(language) |
|
|
logger.info(f"Generated theme: {theme[:100]}...") |
|
|
return theme |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
submit_btn.click( |
|
|
fn=process_query, |
|
|
inputs=[query_input, language_select, current_session_id], |
|
|
outputs=[stages_display, novel_output, status_text, current_session_id] |
|
|
) |
|
|
|
|
|
novel_output.change( |
|
|
fn=lambda x: x, |
|
|
inputs=[novel_output], |
|
|
outputs=[novel_text_state] |
|
|
) |
|
|
|
|
|
resume_btn.click( |
|
|
fn=lambda x: x.split("...")[0] if x and "..." in x else x, |
|
|
inputs=[session_dropdown], |
|
|
outputs=[current_session_id] |
|
|
).then( |
|
|
fn=resume_session, |
|
|
inputs=[current_session_id, language_select], |
|
|
outputs=[stages_display, novel_output, status_text, current_session_id] |
|
|
) |
|
|
|
|
|
auto_recover_btn.click( |
|
|
fn=handle_auto_recover, |
|
|
inputs=[language_select], |
|
|
outputs=[current_session_id, status_text] |
|
|
).then( |
|
|
fn=resume_session, |
|
|
inputs=[current_session_id, language_select], |
|
|
outputs=[stages_display, novel_output, status_text, current_session_id] |
|
|
) |
|
|
|
|
|
refresh_btn.click( |
|
|
fn=refresh_sessions, |
|
|
outputs=[session_dropdown] |
|
|
) |
|
|
|
|
|
clear_btn.click( |
|
|
fn=lambda: ("", "", "๐ Ready", "", None), |
|
|
outputs=[stages_display, novel_output, status_text, novel_text_state, current_session_id] |
|
|
) |
|
|
|
|
|
|
|
|
random_btn.click( |
|
|
fn=lambda lang: generate_random_theme(lang), |
|
|
inputs=[language_select], |
|
|
outputs=[query_input], |
|
|
queue=False |
|
|
) |
|
|
|
|
|
|
|
|
def handle_download(format_type, language, session_id, novel_text): |
|
|
if not session_id or not novel_text: |
|
|
return gr.update(visible=False) |
|
|
|
|
|
file_path = download_novel(novel_text, format_type, language, session_id) |
|
|
if file_path: |
|
|
return gr.update(value=file_path, visible=True) |
|
|
else: |
|
|
return gr.update(visible=False) |
|
|
|
|
|
download_btn.click( |
|
|
fn=handle_download, |
|
|
inputs=[format_select, language_select, current_session_id, novel_text_state], |
|
|
outputs=[download_file] |
|
|
) |
|
|
|
|
|
|
|
|
interface.load( |
|
|
fn=refresh_sessions, |
|
|
outputs=[session_dropdown] |
|
|
) |
|
|
|
|
|
return interface |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
logger.info("AGI NOVEL Generator v2.0 Starting...") |
|
|
logger.info("=" * 60) |
|
|
|
|
|
|
|
|
logger.info(f"API Endpoint: {API_URL}") |
|
|
logger.info(f"Target Length: {TARGET_WORDS:,} words") |
|
|
logger.info(f"Minimum Words per Part: {MIN_WORDS_PER_PART:,} words") |
|
|
logger.info("System Features: Single writer + Immediate part-by-part critique") |
|
|
|
|
|
if BRAVE_SEARCH_API_KEY: |
|
|
logger.info("Web search enabled.") |
|
|
else: |
|
|
logger.warning("Web search disabled.") |
|
|
|
|
|
if DOCX_AVAILABLE: |
|
|
logger.info("DOCX export enabled.") |
|
|
else: |
|
|
logger.warning("DOCX export disabled.") |
|
|
|
|
|
logger.info("=" * 60) |
|
|
|
|
|
|
|
|
logger.info("Initializing database...") |
|
|
NovelDatabase.init_db() |
|
|
logger.info("Database initialization complete.") |
|
|
|
|
|
|
|
|
interface = create_interface() |
|
|
|
|
|
interface.launch( |
|
|
server_name="0.0.0.0", |
|
|
server_port=7860, |
|
|
share=False, |
|
|
debug=True |
|
|
) |