fantaxy commited on
Commit
e46db4b
ยท
verified ยท
1 Parent(s): d82f5ea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +105 -142
app.py CHANGED
@@ -1,5 +1,3 @@
1
- # -*- coding: utf-8 -*-
2
-
3
  import gradio as gr
4
  import requests
5
  from bs4 import BeautifulSoup
@@ -15,6 +13,8 @@ from transformers import pipeline
15
  import torch
16
  from diffusers import StableDiffusionXLPipeline
17
  import uuid
 
 
18
 
19
  def setup_session():
20
  try:
@@ -109,15 +109,23 @@ def create_client(model_name):
109
 
110
  client = create_client("CohereForAI/c4ai-command-r-plus")
111
 
112
- def call_api(content, system_message, max_tokens, temperature, top_p):
113
- messages = [{"role": "system", "content": system_message}, {"role": "user", "content": content}]
114
- random_seed = random.randint(0, 1000000)
115
- response = client.chat_completion(messages=messages, max_tokens=max_tokens, temperature=temperature, top_p=top_p, seed=random_seed)
116
- modified_text = response.choices[0].message.content
117
- input_tokens = response.usage.prompt_tokens
118
- output_tokens = response.usage.completion_tokens
119
- total_tokens = response.usage.total_tokens
120
- return modified_text, input_tokens, output_tokens, total_tokens
 
 
 
 
 
 
 
 
121
 
122
  def analyze_info(category, style, topic, references1, references2, references3):
123
  return f"์„ ํƒํ•œ ์นดํ…Œ๊ณ ๋ฆฌ: {category}\n์„ ํƒํ•œ ํฌ์ŠคํŒ… ์Šคํƒ€์ผ: {style}\n๋ธ”๋กœ๊ทธ ์ฃผ์ œ: {topic}\n์ฐธ๊ณ  ๊ธ€1: {references1}\n์ฐธ๊ณ  ๊ธ€2: {references2}\n์ฐธ๊ณ  ๊ธ€3: {references3}"
@@ -129,10 +137,42 @@ def suggest_title(category, style, topic, references1, references2, references3)
129
  max_tokens = 5000
130
  temperature = 0.8
131
  top_p = 0.95
132
- combined_prompt = f"{category_prompt}\n\n{style_prompt}"
133
  modified_text, input_tokens, output_tokens, total_tokens = call_api(full_content, combined_prompt, max_tokens, temperature, top_p)
 
 
 
 
 
134
  token_usage_message = f"[์ž…๋ ฅ ํ† ํฐ์ˆ˜: {input_tokens}]\n[์ถœ๋ ฅ ํ† ํฐ์ˆ˜: {output_tokens}]\n[์ด ํ† ํฐ์ˆ˜: {total_tokens}]"
135
- return modified_text, token_usage_message
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
136
 
137
  def generate_outline(category, style, topic, references1, references2, references3, title):
138
  full_content = analyze_info(category, style, topic, references1, references2, references3)
@@ -490,7 +530,7 @@ def format_filename(text):
490
  def save_content_to_pdf(blog_post):
491
  return save_to_pdf(blog_post)
492
 
493
- title = "๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ… ์ž๋™์ƒ์„ฑ๊ธฐ(์ œ๋ชฉ์ถ”์ฒœ ํ›„ ์ž๋™)"
494
 
495
  def update_prompts_and_description(category, style):
496
  title_prompt = get_title_prompt(category)
@@ -500,153 +540,76 @@ def update_prompts_and_description(category, style):
500
  style_description = get_style_description(style)
501
  return style_description
502
 
503
- # ์ด๋ฏธ์ง€ ์ƒ์„ฑ์„ ์œ„ํ•œ ์„ค์ •
504
- translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en")
505
-
506
- device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
507
 
508
- pipe = StableDiffusionXLPipeline.from_pretrained(
509
- "SG161222/RealVisXL_V4.0",
510
- torch_dtype=torch.float16,
511
- use_safetensors=True,
512
- add_watermarker=False,
513
- variant="fp16"
514
- ).to(device)
515
 
516
-
517
- def generate_image(prompt):
518
- # ํ•œ๊ตญ์–ด๋ฅผ ์˜์–ด๋กœ ๋ฒˆ์—ญ
519
- translated = translator(prompt)[0]['translation_text']
520
-
521
- # Hi-res์™€ 3840x2160 ์Šคํƒ€์ผ ์ ์šฉ
522
- enhanced_prompt = f"hyper-realistic 8K image of {translated}. ultra-detailed, lifelike, high-resolution, sharp, vibrant colors, photorealistic"
523
-
524
- # ๊ณ ์ •๋œ ์„ค์ •๊ฐ’
525
- negative_prompt = "cartoonish, low resolution, blurry, simplistic, abstract, deformed, ugly, (deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, disgusting, amputation"
526
- width = 1024
527
- height = 1024
528
- guidance_scale = 10
529
- num_inference_steps = 150
530
- seed = random.randint(0, 2**32 - 1)
531
- generator = torch.Generator("cuda").manual_seed(seed)
532
-
533
- # ์ด๋ฏธ์ง€ ์ƒ์„ฑ
534
- image = pipe(
535
- prompt=enhanced_prompt,
536
- negative_prompt=negative_prompt,
537
- width=width,
538
- height=height,
539
- guidance_scale=guidance_scale,
540
- num_inference_steps=num_inference_steps,
541
- generator=generator
542
- ).images[0]
543
 
544
- return image
 
 
 
 
 
 
 
545
 
546
- def generate_images(blog_post):
547
- images = []
548
- lines = blog_post.split('\n')
549
- title = lines[0].strip()
550
-
551
- # 1. ์ œ๋ชฉ์— ๋Œ€ํ•œ ์š”์•ฝ์œผ๋กœ ๊ฐœ์š” ๋ชฉ์  ์ด๋ฏธ์ง€ ์ƒ์„ฑ
552
- title_prompt = f"Conceptual overview image representing '{title}'. Visual summary, infographic style, key concepts illustrated"
553
- images.append(generate_image(title_prompt))
554
-
555
- # ๋„์ž…๋ถ€์™€ ๋ณธ๋ฌธ, ๊ฒฐ๋ก  ์ฐพ๊ธฐ
556
- intro_index = next((i for i, line in enumerate(lines) if '[๋„์ž…๋ถ€]' in line), -1)
557
- body_index = next((i for i, line in enumerate(lines) if '[๋ณธ๋ก 1]' in line), -1)
558
- conclusion_index = next((i for i, line in enumerate(lines) if '[๊ฒฐ๋ก ]' in line), -1)
559
-
560
- # 2. ๋„์ž…๋ถ€์— ๋Œ€ํ•œ ์š”์•ฝ์œผ๋กœ ํฅ๋ฏธ ์œ ๋ฐœ ๋ชฉ์  ์ด๋ฏธ์ง€ ์ƒ์„ฑ
561
- intro_text = ' '.join(lines[intro_index+1:body_index]) if intro_index != -1 and body_index != -1 else title
562
- intro_summary = f"Engaging and intriguing image to spark interest: {intro_text[:100]}... Eye-catching, thought-provoking, visually appealing"
563
- images.append(generate_image(intro_summary))
564
-
565
- # 3. ๋ณธ๋ฌธ ๋‚ด์šฉ ์š”์•ฝ์œผ๋กœ ์ด๋ฏธ์ง€ ์ƒ์„ฑ
566
- body_text = ' '.join(lines[body_index+1:conclusion_index]) if body_index != -1 and conclusion_index != -1 else title
567
- body_summary = f"Detailed illustration of main content: {body_text[:100]}... Informative, clear visualization, key points highlighted"
568
- images.append(generate_image(body_summary))
569
-
570
- # 4. ๊ฒฐ๋ก  ๋‚ด์šฉ ์š”์•ฝ์œผ๋กœ ๋…์ž์—๊ฒŒ ์ œ๊ณต๋˜๋Š” ๊ฐ€์น˜์™€ ์ด์ต์„ ์ด๋ฏธ์ง€๋กœ ์ƒ์„ฑ
571
- conclusion_text = ' '.join(lines[conclusion_index+1:]) if conclusion_index != -1 else title
572
- value_summary = f"Image symbolizing the value and benefits for readers: {conclusion_text[:100]}... Positive outcomes, achievements, growth visualization"
573
- images.append(generate_image(value_summary))
574
-
575
- # 5. ๊ฒฐ๋ก  ๋‚ด์šฉ ์š”์•ฝ์œผ๋กœ ์ƒ์ง•์„ ์ด๋ฏธ์ง€๋กœ ์ƒ์„ฑ
576
- symbolic_summary = f"Symbolic representation of the conclusion: {conclusion_text[:100]}... Abstract, metaphorical, thought-provoking imagery"
577
- images.append(generate_image(symbolic_summary))
578
-
579
- return images # ํ•ญ์ƒ 5๊ฐœ์˜ ์ด๋ฏธ์ง€๋ฅผ ๋ฐ˜ํ™˜
580
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
581
  with gr.Blocks() as demo:
582
  gr.Markdown(f"# {title}")
583
 
584
  gr.Markdown("### 1๋‹จ๊ณ„ : ํฌ์ŠคํŒ… ์นดํ…Œ๊ณ ๋ฆฌ๋ฅผ ์ง€์ •ํ•ด์ฃผ์„ธ์š”")
585
  category = gr.Radio(choices=["์ผ๋ฐ˜", "๊ฑด๊ฐ•์ •๋ณด"], label="ํฌ์ŠคํŒ… ์นดํ…Œ๊ณ ๋ฆฌ", value="์ผ๋ฐ˜")
586
 
587
- gr.Markdown("---\n\n")
588
-
589
- gr.Markdown("### 2๋‹จ๊ณ„: ํฌ์ŠคํŒ… ์Šคํƒ€์ผ์„ ์„ ํƒํ•ด์ฃผ์„ธ์š”", elem_id="step-title")
590
  style = gr.Radio(choices=["์นœ๊ทผํ•œ", "์ผ๋ฐ˜", "์ „๋ฌธ์ ์ธ"], label="ํฌ์ŠคํŒ… ์Šคํƒ€์ผ", value="์นœ๊ทผํ•œ")
591
  style_description = gr.Markdown(f"_{get_style_description('์นœ๊ทผํ•œ')}_", elem_id="style-description")
592
 
593
- gr.Markdown("---\n\n")
594
-
595
- # 2๋‹จ๊ณ„
596
  gr.Markdown("### 3๋‹จ๊ณ„ : ๋ธ”๋กœ๊ทธ ์ฃผ์ œ, ๋˜๋Š” ํ‚ค์›Œ๋“œ๋ฅผ ์ƒ์„ธํžˆ ์ž…๋ ฅํ•˜์„ธ์š”")
597
- topic = gr.Textbox(label="๋ธ”๋กœ๊ทธ ์ฃผ์ œ(์˜ˆ์‹œ: ์˜ค์ง•์–ด ๋ฌด์นจํšŒ(X), ์˜ค์ง•์–ด ๋ฌด์นจํšŒ ๋ ˆ์‹œํ”ผ(O))", placeholder="์˜ˆ์‹œ: ์—ฌํ–‰์ง€ ์ถ”์ฒœ(X), 8์›” ๊ตญ๋‚ด ์—ฌํ–‰์ง€ ์ถ”์ฒœ(O)")
598
 
599
- # 3๋‹จ๊ณ„: ์ฐธ๊ณ  ๊ธ€์„ ์œ„ํ•œ ๋ณ€์ˆ˜๋“ค ๋ฏธ๋ฆฌ ์ •์˜
600
- references1 = gr.Textbox(label="์ฐธ๊ณ  ๊ธ€ 1", placeholder="์ฐธ๊ณ ํ•  ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ…๊ธ€์„ ๋ณต์‚ฌํ•˜์—ฌ ๋ถ™์—ฌ๋„ฃ์œผ์„ธ์š”", lines=10, visible=False)
601
- references2 = gr.Textbox(label="์ฐธ๊ณ  ๊ธ€ 2", placeholder="์ฐธ๊ณ ํ•  ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ…๊ธ€์„ ๋ณต์‚ฌํ•˜์—ฌ ๋ถ™์—ฌ๋„ฃ์œผ์„ธ์š”", lines=10, visible=False)
602
- references3 = gr.Textbox(label="์ฐธ๊ณ  ๊ธ€ 3", placeholder="์ฐธ๊ณ ํ•  ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ…๊ธ€์„ ๋ณต์‚ฌํ•˜์—ฌ ๋ถ™์—ฌ๋„ฃ์œผ์„ธ์š”", lines=10, visible=False)
603
 
604
- # ์ œ๋ชฉ ์ถ”์ฒœ
605
- gr.Markdown("### 4๋‹จ๊ณ„ : ์ œ๋ชฉ ์ถ”์ฒœํ•˜๊ธฐ")
606
-
607
- title_suggestions = gr.Textbox(label="์ œ๋ชฉ ์ถ”์ฒœ", lines=10)
608
- title_token_output = gr.Markdown(label="์‚ฌ์šฉ๋œ ํ† ํฐ ์ˆ˜")
609
-
610
- # ์ œ๋ชฉ ์ถ”์ฒœ ๋ฒ„ํŠผ
611
- title_btn = gr.Button("์ œ๋ชฉ ์ถ”์ฒœํ•˜๊ธฐ")
612
- title_btn.click(fn=suggest_title, inputs=[category, style, topic, references1, references2, references3], outputs=[title_suggestions, title_token_output])
613
-
614
- blog_title = gr.Textbox(label="๋ธ”๋กœ๊ทธ ์ œ๋ชฉ", placeholder="๋ธ”๋กœ๊ทธ ์ œ๋ชฉ์„ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”")
615
-
616
- # ๋ธ”๋กœ๊ทธ ๊ธ€ ์ƒ์„ฑ
617
- gr.Markdown("### 5๋‹จ๊ณ„ : ๋ธ”๋กœ๊ทธ ๊ธ€ ์ƒ์„ฑํ•˜๊ธฐ")
618
- gr.HTML("<span style='color: grey;'>[๋ธ”๋กœ๊ทธ ๊ธ€ ์ƒ์„ฑํ•˜๊ธฐ ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜๋ฉด ์•„์›ƒ๋ผ์ธ ์ƒ์„ฑ ๋ฐ ๋ธ”๋กœ๊ทธ ๊ธ€ ์ž‘์„ฑ์ด ์ž๋™์œผ๋กœ ์ง„ํ–‰๋ฉ๋‹ˆ๋‹ค.]</span>")
619
-
620
- outline_result = gr.Textbox(label="์•„์›ƒ๋ผ์ธ ๊ฒฐ๊ณผ", lines=15, visible=False)
621
- outline_token_output = gr.Markdown(label="์‚ฌ์šฉ๋œ ํ† ํฐ ์ˆ˜", visible=False)
622
- output = gr.Textbox(label="์ƒ์„ฑ๋œ ๋ธ”๋กœ๊ทธ ๊ธ€", lines=30)
623
- token_output = gr.Markdown(label="์‚ฌ์šฉ๋œ ํ† ํฐ ์ˆ˜")
624
-
625
- generate_post_btn = gr.Button("๋ธ”๋กœ๊ทธ ๊ธ€ ์ƒ์„ฑํ•˜๊ธฐ")
626
- generate_post_btn.click(
627
- fn=fetch_references_and_generate_all_steps,
628
- inputs=[category, style, topic, blog_title],
629
- outputs=[references1, references2, references3, outline_result, outline_token_output, output, token_output],
630
- api_name="generate_post"
631
  )
632
-
633
-
634
- # ์ด๋ฏธ์ง€ ์ƒ์„ฑ ๋ฒ„ํŠผ ๋ฐ ์ถœ๋ ฅ ์ถ”๊ฐ€
635
- generate_images_btn = gr.Button("์ด๋ฏธ์ง€ ์ƒ์„ฑํ•˜๊ธฐ")
636
- image_outputs = [gr.Image(label=f"์ƒ์„ฑ๋œ ์ด๋ฏธ์ง€ {i+1}") for i in range(5)]
637
-
638
- generate_images_btn.click(
639
- fn=generate_images,
640
- inputs=[output],
641
- outputs=image_outputs
642
- )
643
-
644
- save_pdf_btn = gr.Button("PDF๋กœ ์ €์žฅํ•˜๊ธฐ")
645
- pdf_output = gr.File(label="์ƒ์„ฑ๋œ PDF ํŒŒ์ผ")
646
-
647
- save_pdf_btn.click(fn=save_content_to_pdf, inputs=[output], outputs=[pdf_output])
648
 
649
  category.change(fn=update_prompts_and_description, inputs=[category, style], outputs=[style_description])
650
- style.change(fn=update_prompts_and_description, inputs=[category, style], outputs=[style_description])
651
 
652
  demo.launch()
 
 
 
1
  import gradio as gr
2
  import requests
3
  from bs4 import BeautifulSoup
 
13
  import torch
14
  from diffusers import StableDiffusionXLPipeline
15
  import uuid
16
+ import json
17
+ from huggingface_hub.utils._errors import HfHubHTTPError
18
 
19
  def setup_session():
20
  try:
 
109
 
110
  client = create_client("CohereForAI/c4ai-command-r-plus")
111
 
112
+ def call_api(content, system_message, max_tokens, temperature, top_p, max_retries=3):
113
+ for attempt in range(max_retries):
114
+ try:
115
+ messages = [{"role": "system", "content": system_message}, {"role": "user", "content": content}]
116
+ random_seed = random.randint(0, 1000000)
117
+ response = client.chat_completion(messages=messages, max_tokens=max_tokens, temperature=temperature, top_p=top_p, seed=random_seed)
118
+ modified_text = response.choices[0].message.content
119
+ input_tokens = response.usage.prompt_tokens
120
+ output_tokens = response.usage.completion_tokens
121
+ total_tokens = response.usage.total_tokens
122
+ return modified_text, input_tokens, output_tokens, total_tokens
123
+ except HfHubHTTPError as e:
124
+ if attempt < max_retries - 1:
125
+ time.sleep(5) # 5์ดˆ ๋Œ€๊ธฐ ํ›„ ์žฌ์‹œ๋„
126
+ else:
127
+ return f"API ํ˜ธ์ถœ ์‹คํŒจ: {str(e)}", 0, 0, 0
128
+
129
 
130
  def analyze_info(category, style, topic, references1, references2, references3):
131
  return f"์„ ํƒํ•œ ์นดํ…Œ๊ณ ๋ฆฌ: {category}\n์„ ํƒํ•œ ํฌ์ŠคํŒ… ์Šคํƒ€์ผ: {style}\n๋ธ”๋กœ๊ทธ ์ฃผ์ œ: {topic}\n์ฐธ๊ณ  ๊ธ€1: {references1}\n์ฐธ๊ณ  ๊ธ€2: {references2}\n์ฐธ๊ณ  ๊ธ€3: {references3}"
 
137
  max_tokens = 5000
138
  temperature = 0.8
139
  top_p = 0.95
140
+ combined_prompt = f"{category_prompt}\n\n{style_prompt}\n\n์ถ”๊ฐ€ ์ง€์‹œ์‚ฌํ•ญ: ์ ˆ๋Œ€๋กœ ์ œ๋ชฉ ์•ž์— ๋ฒˆํ˜ธ๋ฅผ ๋ถ™์ด์ง€ ๋งˆ์„ธ์š”."
141
  modified_text, input_tokens, output_tokens, total_tokens = call_api(full_content, combined_prompt, max_tokens, temperature, top_p)
142
+
143
+ # ๋ฒˆํ˜ธ์™€ ์ (.)์„ ์ œ๊ฑฐํ•˜๊ณ  ๊ฐ ์ค„์„ ์ •๋ฆฌํ•ฉ๋‹ˆ๋‹ค.
144
+ titles = [line.strip() for line in modified_text.split('\n') if line.strip()]
145
+ titles = [re.sub(r'^\d+\.\s*', '', title) for title in titles]
146
+
147
  token_usage_message = f"[์ž…๋ ฅ ํ† ํฐ์ˆ˜: {input_tokens}]\n[์ถœ๋ ฅ ํ† ํฐ์ˆ˜: {output_tokens}]\n[์ด ํ† ํฐ์ˆ˜: {total_tokens}]"
148
+ return "\n".join(titles), token_usage_message
149
+
150
+ def process_all_titles(category, style, topic):
151
+ # ์ œ๋ชฉ ์ถ”์ฒœ
152
+ title_suggestions, _ = suggest_title(category, style, topic, "", "", "")
153
+ titles = title_suggestions.split('\n')
154
+
155
+ results = []
156
+ for title in titles[:10]: # ์ฒ˜์Œ 10๊ฐœ์˜ ์ œ๋ชฉ๋งŒ ์‚ฌ์šฉ
157
+ try:
158
+ # ๋ธ”๋กœ๊ทธ ๊ธ€ ์ƒ์„ฑ
159
+ _, _, _, _, _, blog_content, _ = fetch_references_and_generate_all_steps(category, style, topic, title)
160
+
161
+ if blog_content.startswith("API ํ˜ธ์ถœ ์‹คํŒจ"):
162
+ results.append(f"์ œ๋ชฉ: {title}\n์ƒ์„ฑ ์‹คํŒจ: {blog_content}\n\n")
163
+ continue
164
+
165
+ # ํฌ์ŠคํŒ… ์ „์†ก
166
+ send_result = send_to_blogger(title, blog_content)
167
+
168
+ results.append(f"์ œ๋ชฉ: {title}\n์ „์†ก ๊ฒฐ๊ณผ: {send_result}\n\n")
169
+ except Exception as e:
170
+ results.append(f"์ œ๋ชฉ: {title}\n์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}\n\n")
171
+
172
+ time.sleep(5) # API ํ˜ธ์ถœ ์‚ฌ์ด์— 5์ดˆ ๋Œ€๊ธฐ
173
+
174
+ return "\n".join(results)
175
+
176
 
177
  def generate_outline(category, style, topic, references1, references2, references3, title):
178
  full_content = analyze_info(category, style, topic, references1, references2, references3)
 
530
  def save_content_to_pdf(blog_post):
531
  return save_to_pdf(blog_post)
532
 
533
+ title = "๋ธ”๋กœ๊ทธ ํฌ์ŠคํŒ… ์ž๋™์ƒ์„ฑ ๋ฐ ์ „์†ก(์ฃผ์ œ๋งŒ ์ž…๋ ฅ์‹œ ์ถ”์ฒœ ์ œ๋ชฉ 10๊ฑด ๊ฐ๊ฐ ์ƒ์„ฑ/์ „์†ก)"
534
 
535
  def update_prompts_and_description(category, style):
536
  title_prompt = get_title_prompt(category)
 
540
  style_description = get_style_description(style)
541
  return style_description
542
 
 
 
 
 
543
 
544
+ WEBHOOK_URL = os.getenv("WEBHOOK_URL")
545
+ BLOGGER_ID = os.getenv("BLOGGER_ID")
 
 
 
 
 
546
 
547
+ def send_to_blogger(blog_title, blog_content):
548
+ payload = {
549
+ "id": BLOGGER_ID,
550
+ "title": blog_title,
551
+ "content": blog_content
552
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
553
 
554
+ try:
555
+ response = requests.post(WEBHOOK_URL, json=payload)
556
+ if response.status_code == 200:
557
+ return "ํฌ์ŠคํŒ…์ด ์„ฑ๊ณต์ ์œผ๋กœ ์ „์†ก๋˜์—ˆ์Šต๋‹ˆ๋‹ค."
558
+ else:
559
+ return f"ํฌ์ŠคํŒ… ์ „์†ก ์‹คํŒจ. ์ƒํƒœ ์ฝ”๋“œ: {response.status_code}"
560
+ except Exception as e:
561
+ return f"์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
562
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
563
 
564
+ def process_all_titles(category, style, topic):
565
+ # ์ œ๋ชฉ ์ถ”์ฒœ
566
+ title_suggestions, _ = suggest_title(category, style, topic, "", "", "")
567
+ titles = title_suggestions.split('\n')
568
+
569
+ results = []
570
+ for title in titles[:10]: # ์ฒ˜์Œ 10๊ฐœ์˜ ์ œ๋ชฉ๋งŒ ์‚ฌ์šฉ
571
+ try:
572
+ # ๋ธ”๋กœ๊ทธ ๊ธ€ ์ƒ์„ฑ
573
+ _, _, _, _, _, blog_content, _ = fetch_references_and_generate_all_steps(category, style, topic, title)
574
+
575
+ if blog_content.startswith("API ํ˜ธ์ถœ ์‹คํŒจ"):
576
+ results.append(f"์ œ๋ชฉ: {title}\n์ƒ์„ฑ ์‹คํŒจ: {blog_content}\n\n")
577
+ continue
578
+
579
+ # ํฌ์ŠคํŒ… ์ „๏ฟฝ๏ฟฝ๏ฟฝ
580
+ send_result = send_to_blogger(title, blog_content)
581
+
582
+ results.append(f"์ œ๋ชฉ: {title}\n์ „์†ก ๊ฒฐ๊ณผ: {send_result}\n\n")
583
+ except Exception as e:
584
+ results.append(f"์ œ๋ชฉ: {title}\n์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}\n\n")
585
+
586
+ time.sleep(5) # API ํ˜ธ์ถœ ์‚ฌ์ด์— 5์ดˆ ๋Œ€๊ธฐ
587
+
588
+ return "\n".join(results)
589
+
590
  with gr.Blocks() as demo:
591
  gr.Markdown(f"# {title}")
592
 
593
  gr.Markdown("### 1๋‹จ๊ณ„ : ํฌ์ŠคํŒ… ์นดํ…Œ๊ณ ๋ฆฌ๋ฅผ ์ง€์ •ํ•ด์ฃผ์„ธ์š”")
594
  category = gr.Radio(choices=["์ผ๋ฐ˜", "๊ฑด๊ฐ•์ •๋ณด"], label="ํฌ์ŠคํŒ… ์นดํ…Œ๊ณ ๋ฆฌ", value="์ผ๋ฐ˜")
595
 
596
+ gr.Markdown("### 2๋‹จ๊ณ„: ํฌ์ŠคํŒ… ์Šคํƒ€์ผ์„ ์„ ํƒํ•ด์ฃผ์„ธ์š”")
 
 
597
  style = gr.Radio(choices=["์นœ๊ทผํ•œ", "์ผ๋ฐ˜", "์ „๋ฌธ์ ์ธ"], label="ํฌ์ŠคํŒ… ์Šคํƒ€์ผ", value="์นœ๊ทผํ•œ")
598
  style_description = gr.Markdown(f"_{get_style_description('์นœ๊ทผํ•œ')}_", elem_id="style-description")
599
 
 
 
 
600
  gr.Markdown("### 3๋‹จ๊ณ„ : ๋ธ”๋กœ๊ทธ ์ฃผ์ œ, ๋˜๋Š” ํ‚ค์›Œ๋“œ๋ฅผ ์ƒ์„ธํžˆ ์ž…๋ ฅํ•˜์„ธ์š”")
601
+ topic = gr.Textbox(label="๋ธ”๋กœ๊ทธ ์ฃผ์ œ", placeholder="์˜ˆ์‹œ: 8์›” ๊ตญ๋‚ด ์—ฌํ–‰์ง€ ์ถ”์ฒœ")
602
 
603
+ start_btn = gr.Button("์‹œ์ž‘")
604
+ result_output = gr.Textbox(label="์ฒ˜๋ฆฌ ๊ฒฐ๊ณผ", lines=20)
 
 
605
 
606
+ start_btn.click(
607
+ fn=process_all_titles,
608
+ inputs=[category, style, topic],
609
+ outputs=[result_output]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
610
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
611
 
612
  category.change(fn=update_prompts_and_description, inputs=[category, style], outputs=[style_description])
613
+ style.change(fn=update_prompts_and_description, inputs=[category, style], outputs=[style_description])
614
 
615
  demo.launch()