-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapi.py
More file actions
127 lines (108 loc) · 4.12 KB
/
api.py
File metadata and controls
127 lines (108 loc) · 4.12 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
from flask import Flask, request, jsonify, render_template_string
from agentx.seo import analyze_seo, get_keyword_suggestions, optimize_metadata
from agentx.scraper import scrape_website
from agentx.rag_utils import vec_store, retrieval
from agentx.content_update import process_update
from agentx.content_addition import process_add
from agentx.error_link import process_links
import json
app = Flask(__name__)
@app.route('/scrape',methods=["GET"])
def scrape():
print("hello world")
url = request.args.get("url")
if not url:
return jsonify({"error": "URL parameter is required"}), 400
result = scrape_website(url)
try:
data = json.loads(result)
return jsonify(data)
except Exception as e:
return jsonify({"error": str(e)}), 500
@app.route('/update', methods=["GET"])
def update():
print("hello world")
url = "https://api-docs.deepseek.com/"
if not url:
return jsonify({"error": "URL parameter is required"}), 400
try:
scraped = scrape_website(url)
if isinstance(scraped, str):
scraped = json.loads(scraped)
suggestions = process_update(scraped)
return jsonify(suggestions)
except Exception as e:
return jsonify({"error": str(e)}), 500
@app.route('/add', methods=["GET"])
def add():
print("hello world")
url = "https://api-docs.deepseek.com/"
if not url:
return jsonify({"error": "URL parameter is required"}), 400
try:
scraped = scrape_website(url)
if isinstance(scraped, str):
scraped = json.loads(scraped)
suggestions = process_add(scraped)
return jsonify(suggestions)
except Exception as e:
return jsonify({"error": str(e)}), 500
@app.route('/errorlink', methods=["GET"])
def errorlink():
print("hello world")
url = "https://en.wikipedia.org/wiki/Beheshtiabad"
if not url:
return jsonify({"error": "URL parameter is required"}), 400
try:
scraped = scrape_website(url)
# Ensure that the response is a valid JSON string before parsing.
try:
scraped_data = json.loads(scraped)
except json.JSONDecodeError:
return jsonify({"error": "Invalid JSON response from scraper"}), 500
broken_links = process_links(scraped_data, base_url=url)
return jsonify({"broken_links": broken_links})
except Exception as e:
return jsonify({"error": str(e)}), 500
@app.route('/seo', methods=["GET"])
def seo():
print("hello world")
url = request.args.get("url")
if not url:
return jsonify({"error": "URL parameter is required"}), 400
try:
# Run all SEO analyses
seo_report = analyze_seo(url)
keyword_data = get_keyword_suggestions(url)
# Placeholder HTML for optimization
current_html = "<html><head><title>Example</title></head><body></body></html>"
optimized_html = optimize_metadata(current_html)
# Create a nicely formatted HTML response
html_template = """
<html>
<head><title>SEO Analysis</title></head>
<body>
<h1>SEO Analysis for {{ url }}</h1>
<h2>SEO Report</h2>
<ul>
{% for key, value in seo_report.items() %}
<li><strong>{{ key }}:</strong> {{ value }}</li>
{% endfor %}
</ul>
<h2>Keyword Suggestions</h2>
<ul>
{% for keyword in keyword_data %}
<li>{{ keyword }}</li>
{% endfor %}
</ul>
<h2>Optimized Metadata</h2>
<pre>{{ optimized_html }}</pre>
</body>
</html>
"""
return render_template_string(html_template, url=url, seo_report=seo_report, keyword_data=keyword_data, optimized_html=optimized_html)
except Exception as e:
app.logger.error(f"Error during SEO analysis: {e}")
return jsonify({"error": str(e)}), 500
if __name__ == "__main__":
app.run(debug=True, host="127.0.0.1", port=5000)