Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import os | |
| import json | |
| import requests | |
| from huggingface_hub import InferenceClient | |
| from reportlab.lib.pagesizes import letter | |
| from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer | |
| from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle | |
| from reportlab.lib.units import inch | |
| from reportlab.lib import colors | |
| import io | |
| import tempfile | |
| # Initialize the text generation pipeline and MCP client | |
| generator = None | |
| mcp_client = None | |
| image_generator = None | |
| img2img_generator = None | |
| # MCP client configuration | |
| MCP_ENDPOINTS = { | |
| "claude": "https://api.anthropic.com/v1/mcp", | |
| "openai": "https://api.openai.com/v1/mcp", | |
| "huggingface": None # Will use local model | |
| } | |
| def initialize_model(): | |
| global generator | |
| try: | |
| # Use HF Inference API with modern models (no local downloads) | |
| generator = InferenceClient(model="microsoft/Phi-3-mini-4k-instruct") | |
| return "Phi-3-mini loaded via Inference API!" | |
| except Exception as e: | |
| try: | |
| # Fallback to Qwen via API | |
| generator = InferenceClient(model="Qwen/Qwen2.5-1.5B-Instruct") | |
| return "Qwen 2.5-1.5B loaded via Inference API!" | |
| except Exception as e2: | |
| # Final fallback to any available model | |
| generator = InferenceClient() # Use default model | |
| return f"Default model loaded via Inference API! Primary error: {str(e)}" | |
| def initialize_mcp_client(): | |
| """Initialize MCP client for external AI services""" | |
| global mcp_client | |
| try: | |
| # Simplified MCP client (no external dependencies) | |
| mcp_client = {"status": "ready", "type": "local_only"} | |
| return "MCP client initialized successfully!" | |
| except Exception as e: | |
| return f"MCP client initialization failed: {str(e)}" | |
| def initialize_image_generator(): | |
| """Initialize basic image generator (FLUX disabled for dependency issues)""" | |
| global image_generator | |
| try: | |
| # For now, disable image generation to avoid dependency issues | |
| print('Image generation temporarily disabled due to dependency conflicts...') | |
| image_generator = None | |
| return "Image generation disabled - focusing on text generation and PDF export" | |
| except Exception as e: | |
| return f"Image generation initialization failed: {str(e)}" | |
| def generate_with_mcp(topic, target_audience, key_points, tone, length, model_choice="local"): | |
| """Generate one-pager using MCP client or local model""" | |
| if model_choice == "local" or mcp_client is None: | |
| return generate_onepager(topic, target_audience, key_points, tone, length) | |
| try: | |
| # Example of using MCP client to connect to other services | |
| # This would be where you'd implement actual MCP protocol calls | |
| prompt = f"""Create a compelling one-page business document about "{topic}" for {target_audience}. | |
| Style: {tone.lower()} but action-oriented | |
| Key points: {key_points} | |
| Length: {length} | |
| Format as a TRUE one-pager with visual elements, benefits, and clear next steps.""" | |
| # For demonstration, fall back to local generation | |
| # In practice, this would make MCP calls to external services | |
| return generate_onepager(topic, target_audience, key_points, tone, length) | |
| except Exception as e: | |
| # Fallback to local generation | |
| return generate_onepager(topic, target_audience, key_points, tone, length) | |
| def generate_onepager(topic, target_audience, key_points, tone, length): | |
| if generator is None: | |
| return "Error: Model not initialized. Please wait for the model to load." | |
| # Create a structured prompt for one-pager generation | |
| length_tokens = {"Short": 200, "Medium": 400, "Long": 600} | |
| max_tokens = length_tokens.get(length, 400) | |
| # Create a simple prompt that works well with GPT-2 | |
| prompt = f"""Business Document: {topic} | |
| Target Audience: {target_audience} | |
| Key Points: {key_points} | |
| Tone: {tone} | |
| Professional one-page business summary: | |
| {topic.upper()} | |
| Business Case & Action Plan | |
| Executive Summary: | |
| {topic} represents a strategic opportunity for {target_audience.lower()}. This initiative delivers measurable business value through focused implementation and clear outcomes. | |
| Key Benefits: | |
| """ | |
| try: | |
| # Generate using HF Inference API | |
| response = generator.text_generation( | |
| prompt, | |
| max_new_tokens=max_tokens, | |
| temperature=0.7, | |
| do_sample=True, | |
| return_full_text=False | |
| ) | |
| # Extract generated text | |
| if isinstance(response, str): | |
| onepager = response.strip() | |
| else: | |
| onepager = response.generated_text.strip() | |
| # If output is too short, provide a structured fallback | |
| if len(onepager) < 50: | |
| onepager = create_structured_onepager(topic, target_audience, key_points, tone) | |
| return onepager | |
| except Exception as e: | |
| # Fallback to structured template | |
| return create_structured_onepager(topic, target_audience, key_points, tone) | |
| def create_structured_onepager(topic, target_audience, key_points, tone): | |
| """Create a structured one-pager that looks like a real business document""" | |
| key_points_list = [point.strip() for point in key_points.split(',') if point.strip()] | |
| # Create a visual one-pager that looks professional, not markdown | |
| template = f""" | |
| ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| β {topic.upper()} β | |
| β Business Case & Action Plan β | |
| ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| TARGET AUDIENCE: {target_audience.title()} DATE: {import_date()} | |
| ββ EXECUTIVE SUMMARY ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| β {topic} represents a strategic opportunity to drive significant business β | |
| β value through focused implementation. This initiative delivers measurable β | |
| β outcomes with clear ROI and competitive advantages. β | |
| βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| β KEY BENEFITS & VALUE DRIVERS | |
| {chr(10).join([f" βͺ {point.strip()}" for point in key_points_list[:4]])} | |
| β‘ BUSINESS IMPACT | |
| Revenue Growth: 15-30% increase through improved efficiency | |
| Cost Reduction: 20-25% operational cost savings | |
| Time to Market: 40-50% faster delivery cycles | |
| Risk Mitigation: Reduced compliance and operational risks | |
| π IMPLEMENTATION ROADMAP | |
| Phase 1 (Month 1-2): Assessment & Planning | |
| Phase 2 (Month 3-4): Core Implementation | |
| Phase 3 (Month 5-6): Optimization & Scale | |
| π΅ INVESTMENT SUMMARY | |
| Initial Investment: $XXX,XXX (one-time) | |
| Annual Operating: $XX,XXX (ongoing) | |
| Break-even Point: 8-12 months | |
| 3-Year ROI: 250-400% | |
| ββ DECISION REQUIRED ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| β APPROVE: Proceed with {topic.lower()} implementation β | |
| β TIMELINE: Decision needed by [DATE] to meet Q[X] targets β | |
| β NEXT STEP: Schedule planning session with implementation team β | |
| βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| Contact: [Implementation Team] | Email: [team@company.com] | Ext: XXXX | |
| """ | |
| return template | |
| def import_date(): | |
| """Get current date for the one-pager""" | |
| from datetime import datetime | |
| return datetime.now().strftime("%B %d, %Y") | |
| def generate_header_image(topic, tone): | |
| """Generate header image placeholder (image generation disabled)""" | |
| # Image generation disabled for now to avoid dependency issues | |
| return None | |
| def export_to_pdf(content, topic, header_image=None): | |
| """Export the one-pager content to PDF""" | |
| try: | |
| # Create a temporary file for the PDF | |
| with tempfile.NamedTemporaryFile(delete=False, suffix='.pdf') as tmp_file: | |
| pdf_path = tmp_file.name | |
| # Create PDF document | |
| doc = SimpleDocTemplate(pdf_path, pagesize=letter, topMargin=0.5*inch) | |
| styles = getSampleStyleSheet() | |
| # Custom styles | |
| title_style = ParagraphStyle( | |
| 'CustomTitle', | |
| parent=styles['Heading1'], | |
| fontSize=16, | |
| spaceAfter=20, | |
| textColor=colors.darkblue, | |
| alignment=1 # Center alignment | |
| ) | |
| body_style = ParagraphStyle( | |
| 'CustomBody', | |
| parent=styles['Normal'], | |
| fontSize=10, | |
| fontName='Courier', # Monospace font to preserve ASCII formatting | |
| leftIndent=0, | |
| rightIndent=0 | |
| ) | |
| # Build PDF content | |
| story = [] | |
| # Skip image handling for now (images disabled) | |
| if header_image: | |
| try: | |
| # Add placeholder for image | |
| story.append(Paragraph("[Header Image Placeholder]", title_style)) | |
| story.append(Spacer(1, 20)) | |
| except Exception as e: | |
| print(f"Failed to add image placeholder: {str(e)}") | |
| # Add title | |
| story.append(Paragraph(f"Business Document: {topic}", title_style)) | |
| story.append(Spacer(1, 20)) | |
| # Add content (preserve formatting) | |
| content_lines = content.split('\n') | |
| for line in content_lines: | |
| if line.strip(): | |
| story.append(Paragraph(line.replace('<', '<').replace('>', '>'), body_style)) | |
| else: | |
| story.append(Spacer(1, 6)) | |
| # Build PDF | |
| doc.build(story) | |
| return pdf_path | |
| except Exception as e: | |
| print(f"PDF export failed: {str(e)}") | |
| return None | |
| def generate_complete_onepager(topic, target_audience, key_points, tone, length, model_choice="local", include_image=True): | |
| """Generate complete one-pager with optional image and return both content and PDF""" | |
| # Generate the text content | |
| content = generate_with_mcp(topic, target_audience, key_points, tone, length, model_choice) | |
| # Generate header image if requested | |
| header_image = None | |
| if include_image and image_generator is not None: | |
| header_image = generate_header_image(topic, tone) | |
| # Generate PDF | |
| pdf_path = export_to_pdf(content, topic, header_image) | |
| return content, pdf_path, header_image | |
| # Create the Gradio interface | |
| def create_interface(): | |
| with gr.Blocks(title="One-Pager Generator", theme=gr.themes.Soft()) as demo: | |
| gr.Markdown("# π AI One-Pager Generator") | |
| gr.Markdown("Generate professional business documents using modern AI models via Inference API + PDF export!") | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| topic_input = gr.Textbox( | |
| label="Topic", | |
| placeholder="e.g., Digital Marketing Strategy, Climate Change Solutions, etc.", | |
| lines=2, | |
| value="Artificial Intelligence in Healthcare" | |
| ) | |
| audience_input = gr.Textbox( | |
| label="Target Audience", | |
| placeholder="e.g., Business executives, Students, General public, etc.", | |
| lines=1, | |
| value="Healthcare professionals" | |
| ) | |
| keypoints_input = gr.Textbox( | |
| label="Key Points to Cover", | |
| placeholder="Enter main points separated by commas", | |
| lines=4, | |
| value="Machine learning applications, Data privacy, Cost-effectiveness, Implementation challenges" | |
| ) | |
| tone_dropdown = gr.Dropdown( | |
| choices=["Professional", "Casual", "Academic", "Persuasive", "Informative"], | |
| label="Tone", | |
| value="Professional" | |
| ) | |
| length_dropdown = gr.Dropdown( | |
| choices=["Short", "Medium", "Long"], | |
| label="Length", | |
| value="Medium" | |
| ) | |
| model_dropdown = gr.Dropdown( | |
| choices=["local", "mcp-claude", "mcp-openai"], | |
| label="AI Model", | |
| value="local", | |
| info="Choose between local Qwen model or MCP-connected external services" | |
| ) | |
| include_image_checkbox = gr.Checkbox( | |
| label="Generate Header Image", | |
| value=False, | |
| info="Image generation temporarily disabled", | |
| interactive=False | |
| ) | |
| generate_btn = gr.Button("π Generate One-Pager", variant="primary") | |
| with gr.Column(scale=2): | |
| with gr.Row(): | |
| output_text = gr.Textbox( | |
| label="Generated One-Pager", | |
| lines=20, | |
| max_lines=30, | |
| show_copy_button=True, | |
| placeholder="Your generated one-pager will appear here...", | |
| scale=2 | |
| ) | |
| generated_image = gr.Image( | |
| label="Header Image", | |
| scale=1, | |
| height=200 | |
| ) | |
| # PDF download temporarily disabled to avoid schema issues | |
| with gr.Row(): | |
| gr.Markdown(""" | |
| ### π‘ Tips for Best Results: | |
| - **Be specific** with your topic for more targeted content | |
| - **Include 3-5 key points** separated by commas | |
| - **Choose the right tone** for your intended audience | |
| - **Use descriptive audience** details (e.g., "C-level executives" vs "executives") | |
| - **Try different AI models** - Local for privacy, MCP for enhanced capabilities | |
| """) | |
| # Connect the generate button to the function | |
| def generate_and_display(topic, audience, keypoints, tone, length, model, include_image): | |
| content, pdf_path, header_image = generate_complete_onepager( | |
| topic, audience, keypoints, tone, length, model, include_image | |
| ) | |
| # Return only text and image for now (simplified) | |
| return ( | |
| content, # output_text | |
| header_image # generated_image | |
| ) | |
| generate_btn.click( | |
| fn=generate_and_display, | |
| inputs=[topic_input, audience_input, keypoints_input, tone_dropdown, length_dropdown, model_dropdown, include_image_checkbox], | |
| outputs=[output_text, generated_image] | |
| ) | |
| return demo | |
| # Initialize model and launch | |
| if __name__ == "__main__": | |
| print("π Starting One-Pager Generator with modern AI via Inference API...") | |
| print("π₯ Loading AI text model...") | |
| model_status = initialize_model() | |
| print(f"β {model_status}") | |
| print("π¨ Initializing image generator...") | |
| image_status = initialize_image_generator() | |
| print(f"β {image_status}") | |
| print("π Initializing MCP client...") | |
| mcp_status = initialize_mcp_client() | |
| print(f"β {mcp_status}") | |
| print("π Launching interface...") | |
| demo = create_interface() | |
| demo.launch(share=True, server_name="0.0.0.0") |