from litellm import completion import os from flask import Flask, render_template, request, jsonify, session from flask_session import Session from datetime import datetime, timedelta os.environ["OPENAI_API_KEY"] = os.environ.get("OPENAI_API_KEY") app = Flask(__name__) app.secret_key = 'your_secret_key' app.config['SESSION_TYPE'] = 'filesystem' Session(app) prompt_dict = {} @app.route('/') def index(): return render_template('index.html', prompts=prompt_dict) @app.route('/add', methods=['POST']) def add_prompt(): prompt = request.form['prompt'].strip() response = request.form['response'].strip() if prompt and response: prompt_dict[prompt] = response flash('Prompt added successfully.') else: flash('Prompt or response cannot be empty.') return redirect(url_for('index')) @app.route('/gpt3', methods=['POST']) def gpt3(): prompt = request.form['prompt'] # Initialize message_history in session if it doesn't exist if 'message_history' not in session: session['message_history'] = [] # Append the user input to message_history session['message_history'].append("User: " + prompt) def get_litellm_response(user_input, message_history): # Convert message history to the format required by LiteLLM messages = [{"role": msg.split(': ')[0].lower(), "content": msg.split(': ')[1]} for msg in message_history] response = completion( model="gpt-3.5-turbo", messages=messages, max_tokens=1200, temperature=0.85, n=1 ) return response['choices'][0]['message']['content'] try: response_text = get_litellm_response(prompt, session['message_history']) # Append the assistant's response to message_history session['message_history'].append("Assistant: " + response_text.strip()) session.modified = True # Ensure the session is saved after modification return jsonify({"text": response_text.strip()}) except Exception as e: return jsonify({"error": str(e)}) @app.route('/super_coder', methods=['POST']) def super_coder(): super_coder_choice = request.form['super_coder_choice'] if super_coder_choice == "1": app_name = request.form['app_name'] app_prompt = request.form['app_prompt'] # Initialize super_coder_history in session if it doesn't exist if 'super_coder_history' not in session: session['super_coder_history'] = [] session['super_coder_history'].append(f"New application {app_name} setup initiated.") while True: choice = request.form['choice'] if choice == "1": session['super_coder_history'].append("Continuing development...") response = get_litellm_response(f"Continue developing the {app_name} application based on the prompt: {app_prompt}", session['super_coder_history']) session['super_coder_history'].append(response) elif choice == "2": guidance = request.form['guidance'] response = get_litellm_response(f"Provide guidance for the current development of {app_name}: {guidance}", session['super_coder_history']) session['super_coder_history'].append(response) elif choice == "3": break else: session['super_coder_history'].append("Invalid choice. Please try again.") session.modified = True return jsonify({"history": session['super_coder_history']}) elif super_coder_choice == "2": template_choice = request.form['template_choice'] template_instructions = { "1": "Create a PyTorch application template with basic structure and dependencies.", "2": "Create a machine learning pipeline template with data preprocessing, model training, and evaluation steps.", "3": "Create a template that demonstrates the integration of Mergekit library for advanced functionality." } if template_choice in template_instructions: instruction = template_instructions[template_choice] response = get_litellm_response(instruction, []) return jsonify({"text": response}) else: return jsonify({"error": "Invalid choice. Please try again."}) elif super_coder_choice == "3": prompt = request.form['prompt'] auto_steps = int(request.form['auto_steps']) additional_steps = int(request.form['additional_steps']) def autonomous_coding(prompt, steps, additional_steps): history = [] if steps == 0: while True: choice = request.form['choice'] if choice == "1": history.append("Continuing development...") response = get_litellm_response(f"Continue developing the code based on the previous prompt: {prompt}", history) history.append(response) elif choice == "2": guidance = request.form['guidance'] response = get_litellm_response(f"Provide guidance for the current development: {guidance}", history) history.append(response) elif choice == "3": break else: history.append("Invalid choice. Please try again.") else: for i in range(steps): history.append(f"Autonomous Coding Step {i+1}/{steps}") response = get_litellm_response(f"Continue developing the code autonomously based on the prompt: {prompt}", history) history.append(response) if additional_steps > 0: for i in range(additional_steps): history.append(f"Additional Autonomous Coding Step {i+1}/{additional_steps}") response = get_litellm_response(f"Continue developing the code autonomously based on the prompt: {prompt}", history) history.append(response) autonomous_coding(prompt, 0, 0) return history history = autonomous_coding(prompt, auto_steps, additional_steps) return jsonify({"history": history}) elif super_coder_choice == "4": # Placeholder for advanced settings return jsonify({"text": "Advanced Settings functionality to be implemented."}) elif super_coder_choice == "5": # Placeholder for managing prompt folder return jsonify({"text": "Manage Prompt Folder functionality to be implemented."}) elif super_coder_choice == "6": return jsonify({"text": "Returning to the main menu..."}) else: return jsonify({"error": "Invalid choice. Please try again."}) import json @app.route('/clear_session', methods=['GET']) def clear_session(): session.clear() return jsonify({"result": "Session cleared"}) @app.route('/history') def history(): if 'message_history' in session: message_history = session['message_history'] # Convert the message history array to a JSON string history_str = json.dumps(message_history) return jsonify({"history": history_str}) else: return jsonify({"history": "No message history found in the current session."}) if __name__ == '__main__': app.run(host='0.0.0.0', port=8080)